1 /* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this
3 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
5 #include "VideoUtils.h"
10 #include "CubebUtils.h"
11 #include "ImageContainer.h"
12 #include "MediaContainerType.h"
13 #include "MediaResource.h"
14 #include "TimeUnits.h"
15 #include "VorbisUtils.h"
16 #include "mozilla/Base64.h"
17 #include "mozilla/SharedThreadPool.h"
18 #include "mozilla/StaticPrefs.h"
19 #include "mozilla/SystemGroup.h"
20 #include "mozilla/TaskCategory.h"
21 #include "mozilla/TaskQueue.h"
22 #include "mozilla/Telemetry.h"
23 #include "nsCharSeparatedTokenizer.h"
24 #include "nsContentTypeParser.h"
25 #include "nsIConsoleService.h"
26 #include "nsIRandomGenerator.h"
27 #include "nsIServiceManager.h"
28 #include "nsMathUtils.h"
29 #include "nsServiceManagerUtils.h"
30 #include "nsThreadUtils.h"
34 NS_NAMED_LITERAL_CSTRING(kEMEKeySystemClearkey
, "org.w3.clearkey");
35 NS_NAMED_LITERAL_CSTRING(kEMEKeySystemWidevine
, "com.widevine.alpha");
37 using layers::PlanarYCbCrImage
;
38 using media::TimeUnit
;
40 CheckedInt64
SaferMultDiv(int64_t aValue
, uint64_t aMul
, uint64_t aDiv
) {
41 if (aMul
> INT64_MAX
|| aDiv
> INT64_MAX
) {
42 return CheckedInt64(INT64_MAX
) + 1; // Return an invalid checked int.
46 int64_t major
= aValue
/ div
;
47 int64_t remainder
= aValue
% div
;
48 return CheckedInt64(remainder
) * mul
/ div
+ CheckedInt64(major
) * mul
;
51 // Converts from number of audio frames to microseconds, given the specified
53 CheckedInt64
FramesToUsecs(int64_t aFrames
, uint32_t aRate
) {
54 return SaferMultDiv(aFrames
, USECS_PER_S
, aRate
);
57 TimeUnit
FramesToTimeUnit(int64_t aFrames
, uint32_t aRate
) {
58 int64_t major
= aFrames
/ aRate
;
59 int64_t remainder
= aFrames
% aRate
;
60 return TimeUnit::FromMicroseconds(major
) * USECS_PER_S
+
61 (TimeUnit::FromMicroseconds(remainder
) * USECS_PER_S
) / aRate
;
64 // Converts from microseconds to number of audio frames, given the specified
66 CheckedInt64
UsecsToFrames(int64_t aUsecs
, uint32_t aRate
) {
67 return SaferMultDiv(aUsecs
, aRate
, USECS_PER_S
);
70 // Format TimeUnit as number of frames at given rate.
71 CheckedInt64
TimeUnitToFrames(const TimeUnit
& aTime
, uint32_t aRate
) {
72 return UsecsToFrames(aTime
.ToMicroseconds(), aRate
);
75 nsresult
SecondsToUsecs(double aSeconds
, int64_t& aOutUsecs
) {
76 if (aSeconds
* double(USECS_PER_S
) > INT64_MAX
) {
77 return NS_ERROR_FAILURE
;
79 aOutUsecs
= int64_t(aSeconds
* double(USECS_PER_S
));
83 static int32_t ConditionDimension(float aValue
) {
84 // This will exclude NaNs and too-big values.
85 if (aValue
> 1.0 && aValue
<= INT32_MAX
) return int32_t(NS_round(aValue
));
89 void ScaleDisplayByAspectRatio(gfx::IntSize
& aDisplay
, float aAspectRatio
) {
90 if (aAspectRatio
> 1.0) {
91 // Increase the intrinsic width
92 aDisplay
.width
= ConditionDimension(aAspectRatio
* aDisplay
.width
);
94 // Increase the intrinsic height
95 aDisplay
.height
= ConditionDimension(aDisplay
.height
/ aAspectRatio
);
99 static int64_t BytesToTime(int64_t offset
, int64_t length
, int64_t durationUs
) {
100 NS_ASSERTION(length
> 0, "Must have positive length");
101 double r
= double(offset
) / double(length
);
102 if (r
> 1.0) r
= 1.0;
103 return int64_t(double(durationUs
) * r
);
106 media::TimeIntervals
GetEstimatedBufferedTimeRanges(
107 mozilla::MediaResource
* aStream
, int64_t aDurationUsecs
) {
108 media::TimeIntervals buffered
;
109 // Nothing to cache if the media takes 0us to play.
110 if (aDurationUsecs
<= 0 || !aStream
) return buffered
;
112 // Special case completely cached files. This also handles local files.
113 if (aStream
->IsDataCachedToEndOfResource(0)) {
114 buffered
+= media::TimeInterval(TimeUnit::Zero(),
115 TimeUnit::FromMicroseconds(aDurationUsecs
));
119 int64_t totalBytes
= aStream
->GetLength();
121 // If we can't determine the total size, pretend that we have nothing
122 // buffered. This will put us in a state of eternally-low-on-undecoded-data
123 // which is not great, but about the best we can do.
124 if (totalBytes
<= 0) return buffered
;
126 int64_t startOffset
= aStream
->GetNextCachedData(0);
127 while (startOffset
>= 0) {
128 int64_t endOffset
= aStream
->GetCachedDataEnd(startOffset
);
129 // Bytes [startOffset..endOffset] are cached.
130 NS_ASSERTION(startOffset
>= 0, "Integer underflow in GetBuffered");
131 NS_ASSERTION(endOffset
>= 0, "Integer underflow in GetBuffered");
133 int64_t startUs
= BytesToTime(startOffset
, totalBytes
, aDurationUsecs
);
134 int64_t endUs
= BytesToTime(endOffset
, totalBytes
, aDurationUsecs
);
135 if (startUs
!= endUs
) {
136 buffered
+= media::TimeInterval(TimeUnit::FromMicroseconds(startUs
),
137 TimeUnit::FromMicroseconds(endUs
));
139 startOffset
= aStream
->GetNextCachedData(endOffset
);
144 void DownmixStereoToMono(mozilla::AudioDataValue
* aBuffer
, uint32_t aFrames
) {
146 const int channels
= 2;
147 for (uint32_t fIdx
= 0; fIdx
< aFrames
; ++fIdx
) {
148 #ifdef MOZ_SAMPLE_TYPE_FLOAT32
153 // The sample of the buffer would be interleaved.
154 sample
= (aBuffer
[fIdx
* channels
] + aBuffer
[fIdx
* channels
+ 1]) * 0.5;
155 aBuffer
[fIdx
* channels
] = aBuffer
[fIdx
* channels
+ 1] = sample
;
159 uint32_t DecideAudioPlaybackChannels(const AudioInfo
& info
) {
160 if (StaticPrefs::accessibility_monoaudio_enable()) {
164 if (StaticPrefs::MediaForcestereoEnabled()) {
168 return info
.mChannels
;
171 bool IsDefaultPlaybackDeviceMono() {
172 return CubebUtils::MaxNumberOfChannels() == 1;
175 bool IsVideoContentType(const nsCString
& aContentType
) {
176 NS_NAMED_LITERAL_CSTRING(video
, "video");
177 if (FindInReadable(video
, aContentType
)) {
183 bool IsValidVideoRegion(const gfx::IntSize
& aFrame
,
184 const gfx::IntRect
& aPicture
,
185 const gfx::IntSize
& aDisplay
) {
186 return aFrame
.width
<= PlanarYCbCrImage::MAX_DIMENSION
&&
187 aFrame
.height
<= PlanarYCbCrImage::MAX_DIMENSION
&&
188 aFrame
.width
* aFrame
.height
<= MAX_VIDEO_WIDTH
* MAX_VIDEO_HEIGHT
&&
189 aFrame
.width
* aFrame
.height
!= 0 &&
190 aPicture
.width
<= PlanarYCbCrImage::MAX_DIMENSION
&&
191 aPicture
.x
< PlanarYCbCrImage::MAX_DIMENSION
&&
192 aPicture
.x
+ aPicture
.width
< PlanarYCbCrImage::MAX_DIMENSION
&&
193 aPicture
.height
<= PlanarYCbCrImage::MAX_DIMENSION
&&
194 aPicture
.y
< PlanarYCbCrImage::MAX_DIMENSION
&&
195 aPicture
.y
+ aPicture
.height
< PlanarYCbCrImage::MAX_DIMENSION
&&
196 aPicture
.width
* aPicture
.height
<=
197 MAX_VIDEO_WIDTH
* MAX_VIDEO_HEIGHT
&&
198 aPicture
.width
* aPicture
.height
!= 0 &&
199 aDisplay
.width
<= PlanarYCbCrImage::MAX_DIMENSION
&&
200 aDisplay
.height
<= PlanarYCbCrImage::MAX_DIMENSION
&&
201 aDisplay
.width
* aDisplay
.height
<=
202 MAX_VIDEO_WIDTH
* MAX_VIDEO_HEIGHT
&&
203 aDisplay
.width
* aDisplay
.height
!= 0;
206 already_AddRefed
<SharedThreadPool
> GetMediaThreadPool(MediaThreadType aType
) {
209 case MediaThreadType::PLATFORM_DECODER
:
210 name
= "MediaPDecoder";
212 case MediaThreadType::MSG_CONTROL
:
215 case MediaThreadType::WEBRTC_DECODER
:
219 MOZ_FALLTHROUGH_ASSERT("Unexpected MediaThreadType");
220 case MediaThreadType::PLAYBACK
:
221 name
= "MediaPlayback";
225 static const uint32_t kMediaThreadPoolDefaultCount
= 4;
226 RefPtr
<SharedThreadPool
> pool
= SharedThreadPool::Get(
227 nsDependentCString(name
), kMediaThreadPoolDefaultCount
);
229 // Ensure a larger stack for platform decoder threads
230 if (aType
== MediaThreadType::PLATFORM_DECODER
) {
231 const uint32_t minStackSize
= 512 * 1024;
233 MOZ_ALWAYS_SUCCEEDS(pool
->GetThreadStackSize(&stackSize
));
234 if (stackSize
< minStackSize
) {
235 MOZ_ALWAYS_SUCCEEDS(pool
->SetThreadStackSize(minStackSize
));
239 return pool
.forget();
242 bool ExtractVPXCodecDetails(const nsAString
& aCodec
, uint8_t& aProfile
,
243 uint8_t& aLevel
, uint8_t& aBitDepth
) {
244 uint8_t dummyChromaSubsampling
= 1;
245 VideoColorSpace dummyColorspace
;
246 return ExtractVPXCodecDetails(aCodec
, aProfile
, aLevel
, aBitDepth
,
247 dummyChromaSubsampling
, dummyColorspace
);
250 bool ExtractVPXCodecDetails(const nsAString
& aCodec
, uint8_t& aProfile
,
251 uint8_t& aLevel
, uint8_t& aBitDepth
,
252 uint8_t& aChromaSubsampling
,
253 VideoColorSpace
& aColorSpace
) {
254 // Assign default value.
255 aChromaSubsampling
= 1;
256 auto splitter
= aCodec
.Split(u
'.');
257 auto fieldsItr
= splitter
.begin();
258 auto fourCC
= *fieldsItr
;
260 if (!fourCC
.EqualsLiteral("vp09") && !fourCC
.EqualsLiteral("vp08")) {
265 uint8_t* fields
[] = {&aProfile
,
269 &aColorSpace
.mPrimaryId
,
270 &aColorSpace
.mTransferId
,
271 &aColorSpace
.mMatrixId
,
272 &aColorSpace
.mRangeId
};
275 for (; fieldsItr
!= splitter
.end(); ++fieldsItr
, ++fieldsCount
) {
276 if (fieldsCount
> 7) {
277 // No more than 8 fields are expected.
280 *(fields
[fieldsCount
]) = static_cast<uint8_t>(
281 PromiseFlatString((*fieldsItr
)).ToInteger(&rv
, 10));
282 // We got invalid field value, parsing error.
283 NS_ENSURE_SUCCESS(rv
, false);
286 // <sample entry 4CC>.<profile>.<level>.<bitDepth>.
288 // <chromaSubsampling>.<colourPrimaries>.<transferCharacteristics>.
289 // <matrixCoefficients>.<videoFullRangeFlag>
290 // First three fields are mandatory(we have parsed 4CC).
291 if (fieldsCount
< 3) {
292 // Invalid number of fields.
295 // Start to validate the parsing value.
297 // profile should be 0,1,2 or 3.
298 // See https://www.webmproject.org/vp9/profiles/
299 // We don't support more than profile 2
305 // level, See https://www.webmproject.org/vp9/mp4/#semantics_1
327 if (aBitDepth
!= 8 && aBitDepth
!= 10 && aBitDepth
!= 12) {
332 if (fieldsCount
== 3) {
337 // chromaSubsampling should be 0,1,2,3...4~7 are reserved.
338 if (aChromaSubsampling
> 3) {
342 if (fieldsCount
== 4) {
347 // It is an integer that is defined by the "Colour primaries"
348 // section of ISO/IEC 23001-8:2016 Table 2.
349 // We treat reserved value as false case.
350 const auto& primaryId
= aColorSpace
.mPrimaryId
;
351 if (primaryId
== 0 || primaryId
== 3 || primaryId
> 22) {
355 if (primaryId
> 12 && primaryId
< 22) {
356 // 13~21 are reserved values.
360 if (fieldsCount
== 5) {
365 // It is an integer that is defined by the
366 // "Transfer characteristics" section of ISO/IEC 23001-8:2016 Table 3.
367 // We treat reserved value as false case.
368 const auto& transferId
= aColorSpace
.mTransferId
;
369 if (transferId
== 0 || transferId
== 3 || transferId
> 18) {
374 if (fieldsCount
== 6) {
379 // It is an integer that is defined by the
380 // "Matrix coefficients" section of ISO/IEC 23001-8:2016 Table 4.
381 // We treat reserved value as false case.
382 const auto& matrixId
= aColorSpace
.mMatrixId
;
383 if (matrixId
== 3 || matrixId
> 11) {
387 // If matrixCoefficients is 0 (RGB), then chroma subsampling MUST be 3
389 if (matrixId
== 0 && aChromaSubsampling
!= 3) {
393 if (fieldsCount
== 7) {
398 // videoFullRangeFlag indicates the black level and range of the luma and
399 // chroma signals. 0 = legal range (e.g. 16-235 for 8 bit sample depth);
400 // 1 = full range (e.g. 0-255 for 8-bit sample depth).
401 const auto& rangeId
= aColorSpace
.mRangeId
;
405 bool ExtractH264CodecDetails(const nsAString
& aCodec
, uint8_t& aProfile
,
406 uint8_t& aConstraint
, uint8_t& aLevel
) {
407 // H.264 codecs parameters have a type defined as avcN.PPCCLL, where
408 // N = avc type. avc3 is avcc with SPS & PPS implicit (within stream)
409 // PP = profile_idc, CC = constraint_set flags, LL = level_idc.
410 // We ignore the constraint_set flags, as it's not clear from any
411 // documentation what constraints the platform decoders support.
413 // http://blog.pearce.org.nz/2013/11/what-does-h264avc1-codecs-parameters.html
415 if (aCodec
.Length() != strlen("avc1.PPCCLL")) {
419 // Verify the codec starts with "avc1." or "avc3.".
420 const nsAString
& sample
= Substring(aCodec
, 0, 5);
421 if (!sample
.EqualsASCII("avc1.") && !sample
.EqualsASCII("avc3.")) {
425 // Extract the profile_idc, constraint_flags and level_idc.
427 aProfile
= PromiseFlatString(Substring(aCodec
, 5, 2)).ToInteger(&rv
, 16);
428 NS_ENSURE_SUCCESS(rv
, false);
430 // Constraint flags are stored on the 6 most significant bits, first two bits
431 // are reserved_zero_2bits.
432 aConstraint
= PromiseFlatString(Substring(aCodec
, 7, 2)).ToInteger(&rv
, 16);
433 NS_ENSURE_SUCCESS(rv
, false);
435 aLevel
= PromiseFlatString(Substring(aCodec
, 9, 2)).ToInteger(&rv
, 16);
436 NS_ENSURE_SUCCESS(rv
, false);
439 aLevel
= H264_LEVEL_1_b
;
440 } else if (aLevel
<= 5) {
444 // We only make sure constraints is above 4 for collection perspective
445 // otherwise collect 0 for unknown.
446 Telemetry::Accumulate(Telemetry::VIDEO_CANPLAYTYPE_H264_CONSTRAINT_SET_FLAG
,
447 aConstraint
>= 4 ? aConstraint
: 0);
448 // 244 is the highest meaningful profile value (High 4:4:4 Intra Profile)
449 // that can be represented as single hex byte, otherwise collect 0 for
451 Telemetry::Accumulate(Telemetry::VIDEO_CANPLAYTYPE_H264_PROFILE
,
452 aProfile
<= 244 ? aProfile
: 0);
454 // Make sure aLevel represents a value between levels 1 and 5.2,
455 // otherwise collect 0 for unknown.
456 Telemetry::Accumulate(Telemetry::VIDEO_CANPLAYTYPE_H264_LEVEL
,
457 (aLevel
>= 10 && aLevel
<= 52) ? aLevel
: 0);
462 nsresult
GenerateRandomName(nsCString
& aOutSalt
, uint32_t aLength
) {
464 nsCOMPtr
<nsIRandomGenerator
> rg
=
465 do_GetService("@mozilla.org/security/random-generator;1", &rv
);
466 if (NS_FAILED(rv
)) return rv
;
468 // For each three bytes of random data we will get four bytes of ASCII.
469 const uint32_t requiredBytesLength
=
470 static_cast<uint32_t>((aLength
+ 3) / 4 * 3);
473 rv
= rg
->GenerateRandomBytes(requiredBytesLength
, &buffer
);
474 if (NS_FAILED(rv
)) return rv
;
477 nsDependentCSubstring
randomData(reinterpret_cast<const char*>(buffer
),
478 requiredBytesLength
);
479 rv
= Base64Encode(randomData
, temp
);
482 if (NS_FAILED(rv
)) return rv
;
488 nsresult
GenerateRandomPathName(nsCString
& aOutSalt
, uint32_t aLength
) {
489 nsresult rv
= GenerateRandomName(aOutSalt
, aLength
);
490 if (NS_FAILED(rv
)) return rv
;
492 // Base64 characters are alphanumeric (a-zA-Z0-9) and '+' and '/', so we need
493 // to replace illegal characters -- notably '/'
494 aOutSalt
.ReplaceChar(FILE_PATH_SEPARATOR FILE_ILLEGAL_CHARACTERS
, '_');
498 already_AddRefed
<TaskQueue
> CreateMediaDecodeTaskQueue(const char* aName
) {
499 RefPtr
<TaskQueue
> queue
= new TaskQueue(
500 GetMediaThreadPool(MediaThreadType::PLATFORM_DECODER
), aName
);
501 return queue
.forget();
504 void SimpleTimer::Cancel() {
507 nsCOMPtr
<nsIEventTarget
> target
;
508 mTimer
->GetTarget(getter_AddRefs(target
));
510 nsresult rv
= target
->IsOnCurrentThread(&onCurrent
);
511 MOZ_ASSERT(NS_SUCCEEDED(rv
) && onCurrent
);
520 SimpleTimer::Notify(nsITimer
* timer
) {
521 RefPtr
<SimpleTimer
> deathGrip(this);
530 SimpleTimer::GetName(nsACString
& aName
) {
531 aName
.AssignLiteral("SimpleTimer");
535 nsresult
SimpleTimer::Init(nsIRunnable
* aTask
, uint32_t aTimeoutMs
,
536 nsIEventTarget
* aTarget
) {
539 // Get target thread first, so we don't have to cancel the timer if it fails.
540 nsCOMPtr
<nsIEventTarget
> target
;
544 target
= GetMainThreadEventTarget();
546 return NS_ERROR_NOT_AVAILABLE
;
550 rv
= NS_NewTimerWithCallback(getter_AddRefs(mTimer
), this, aTimeoutMs
,
551 nsITimer::TYPE_ONE_SHOT
, target
);
560 NS_IMPL_ISUPPORTS(SimpleTimer
, nsITimerCallback
, nsINamed
)
562 already_AddRefed
<SimpleTimer
> SimpleTimer::Create(nsIRunnable
* aTask
,
564 nsIEventTarget
* aTarget
) {
565 RefPtr
<SimpleTimer
> t(new SimpleTimer());
566 if (NS_FAILED(t
->Init(aTask
, aTimeoutMs
, aTarget
))) {
572 void LogToBrowserConsole(const nsAString
& aMsg
) {
573 if (!NS_IsMainThread()) {
575 nsCOMPtr
<nsIRunnable
> task
= NS_NewRunnableFunction(
576 "LogToBrowserConsole", [msg
]() { LogToBrowserConsole(msg
); });
577 SystemGroup::Dispatch(TaskCategory::Other
, task
.forget());
580 nsCOMPtr
<nsIConsoleService
> console(
581 do_GetService("@mozilla.org/consoleservice;1"));
583 NS_WARNING("Failed to log message to console.");
586 nsAutoString
msg(aMsg
);
587 console
->LogStringMessage(msg
.get());
590 bool ParseCodecsString(const nsAString
& aCodecs
,
591 nsTArray
<nsString
>& aOutCodecs
) {
593 bool expectMoreTokens
= false;
594 nsCharSeparatedTokenizer
tokenizer(aCodecs
, ',');
595 while (tokenizer
.hasMoreTokens()) {
596 const nsAString
& token
= tokenizer
.nextToken();
597 expectMoreTokens
= tokenizer
.separatorAfterCurrentToken();
598 aOutCodecs
.AppendElement(token
);
600 if (expectMoreTokens
) {
601 // Last codec name was empty
607 bool ParseMIMETypeString(const nsAString
& aMIMEType
,
608 nsString
& aOutContainerType
,
609 nsTArray
<nsString
>& aOutCodecs
) {
610 nsContentTypeParser
parser(aMIMEType
);
611 nsresult rv
= parser
.GetType(aOutContainerType
);
617 parser
.GetParameter("codecs", codecsStr
);
618 return ParseCodecsString(codecsStr
, aOutCodecs
);
622 static bool StartsWith(const nsACString
& string
, const char (&prefix
)[N
]) {
623 if (N
- 1 > string
.Length()) {
626 return memcmp(string
.Data(), prefix
, N
- 1) == 0;
629 bool IsH264CodecString(const nsAString
& aCodec
) {
631 uint8_t constraint
= 0;
633 return ExtractH264CodecDetails(aCodec
, profile
, constraint
, level
);
636 bool IsAACCodecString(const nsAString
& aCodec
) {
637 return aCodec
.EqualsLiteral("mp4a.40.2") || // MPEG4 AAC-LC
638 aCodec
.EqualsLiteral(
639 "mp4a.40.02") || // MPEG4 AAC-LC(for compatibility)
640 aCodec
.EqualsLiteral("mp4a.40.5") || // MPEG4 HE-AAC
641 aCodec
.EqualsLiteral(
642 "mp4a.40.05") || // MPEG4 HE-AAC(for compatibility)
643 aCodec
.EqualsLiteral("mp4a.67") || // MPEG2 AAC-LC
644 aCodec
.EqualsLiteral("mp4a.40.29"); // MPEG4 HE-AACv2
647 bool IsVP8CodecString(const nsAString
& aCodec
) {
650 uint8_t bitDepth
= 0;
651 return aCodec
.EqualsLiteral("vp8") || aCodec
.EqualsLiteral("vp8.0") ||
652 (StartsWith(NS_ConvertUTF16toUTF8(aCodec
), "vp08") &&
653 ExtractVPXCodecDetails(aCodec
, profile
, level
, bitDepth
));
656 bool IsVP9CodecString(const nsAString
& aCodec
) {
659 uint8_t bitDepth
= 0;
660 return aCodec
.EqualsLiteral("vp9") || aCodec
.EqualsLiteral("vp9.0") ||
661 (StartsWith(NS_ConvertUTF16toUTF8(aCodec
), "vp09") &&
662 ExtractVPXCodecDetails(aCodec
, profile
, level
, bitDepth
));
665 bool IsAV1CodecString(const nsAString
& aCodec
) {
666 return aCodec
.EqualsLiteral("av1") ||
667 StartsWith(NS_ConvertUTF16toUTF8(aCodec
), "av01");
670 UniquePtr
<TrackInfo
> CreateTrackInfoWithMIMEType(
671 const nsACString
& aCodecMIMEType
) {
672 UniquePtr
<TrackInfo
> trackInfo
;
673 if (StartsWith(aCodecMIMEType
, "audio/")) {
674 trackInfo
.reset(new AudioInfo());
675 trackInfo
->mMimeType
= aCodecMIMEType
;
676 } else if (StartsWith(aCodecMIMEType
, "video/")) {
677 trackInfo
.reset(new VideoInfo());
678 trackInfo
->mMimeType
= aCodecMIMEType
;
683 UniquePtr
<TrackInfo
> CreateTrackInfoWithMIMETypeAndContainerTypeExtraParameters(
684 const nsACString
& aCodecMIMEType
,
685 const MediaContainerType
& aContainerType
) {
686 UniquePtr
<TrackInfo
> trackInfo
= CreateTrackInfoWithMIMEType(aCodecMIMEType
);
688 VideoInfo
* videoInfo
= trackInfo
->GetAsVideoInfo();
690 Maybe
<int32_t> maybeWidth
= aContainerType
.ExtendedType().GetWidth();
691 if (maybeWidth
&& *maybeWidth
> 0) {
692 videoInfo
->mImage
.width
= *maybeWidth
;
693 videoInfo
->mDisplay
.width
= *maybeWidth
;
695 Maybe
<int32_t> maybeHeight
= aContainerType
.ExtendedType().GetHeight();
696 if (maybeHeight
&& *maybeHeight
> 0) {
697 videoInfo
->mImage
.height
= *maybeHeight
;
698 videoInfo
->mDisplay
.height
= *maybeHeight
;
700 } else if (trackInfo
->GetAsAudioInfo()) {
701 AudioInfo
* audioInfo
= trackInfo
->GetAsAudioInfo();
702 Maybe
<int32_t> maybeChannels
=
703 aContainerType
.ExtendedType().GetChannels();
704 if (maybeChannels
&& *maybeChannels
> 0) {
705 audioInfo
->mChannels
= *maybeChannels
;
707 Maybe
<int32_t> maybeSamplerate
=
708 aContainerType
.ExtendedType().GetSamplerate();
709 if (maybeSamplerate
&& *maybeSamplerate
> 0) {
710 audioInfo
->mRate
= *maybeSamplerate
;
717 } // end namespace mozilla