2 * routines (with C-linkage) that interface between MPlayer
3 * and the "LIVE555 Streaming Media" libraries
5 * This file is part of MPlayer.
7 * MPlayer is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
12 * MPlayer is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
17 * You should have received a copy of the GNU General Public License along
18 * with MPlayer; if not, write to the Free Software Foundation, Inc.,
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23 // on MinGW, we must include windows.h before the things it conflicts
24 #ifdef __MINGW32__ // with. they are each protected from
25 #include <windows.h> // windows.h, but not the other way around.
27 #include "demux_rtp.h"
31 #include "demux_rtp_internal.h"
33 #include "BasicUsageEnvironment.hh"
34 #include "liveMedia.hh"
35 #include "GroupsockHelper.hh"
38 // A data structure representing input data for each stream:
39 class ReadBufferQueue
{
41 ReadBufferQueue(MediaSubsession
* subsession
, demuxer_t
* demuxer
,
43 virtual ~ReadBufferQueue();
45 FramedSource
* readSource() const { return fReadSource
; }
46 RTPSource
* rtpSource() const { return fRTPSource
; }
47 demuxer_t
* ourDemuxer() const { return fOurDemuxer
; }
48 char const* tag() const { return fTag
; }
50 char blockingFlag
; // used to implement synchronous reads
52 // For A/V synchronization:
53 Boolean prevPacketWasSynchronized
;
55 ReadBufferQueue
** otherQueue
;
57 // The 'queue' actually consists of just a single "demux_packet_t"
58 // (because the underlying OS does the actual queueing/buffering):
61 // However, we sometimes inspect buffers before delivering them.
62 // For this, we maintain a queue of pending buffers:
63 void savePendingBuffer(demux_packet_t
* dp
);
64 demux_packet_t
* getPendingBuffer();
66 // For H264 over rtsp using AVParser, the next packet has to be saved
67 demux_packet_t
* nextpacket
;
70 demux_packet_t
* pendingDPHead
;
71 demux_packet_t
* pendingDPTail
;
73 FramedSource
* fReadSource
;
74 RTPSource
* fRTPSource
;
75 demuxer_t
* fOurDemuxer
;
76 char const* fTag
; // used for debugging
79 // A structure of RTP-specific state, kept so that we can cleanly
82 char const* sdpDescription
;
83 RTSPClient
* rtspClient
;
85 MediaSession
* mediaSession
;
86 ReadBufferQueue
* audioBufferQueue
;
87 ReadBufferQueue
* videoBufferQueue
;
89 struct timeval firstSyncTime
;
92 extern "C" char* network_username
;
93 extern "C" char* network_password
;
94 static char* openURL_rtsp(RTSPClient
* client
, char const* url
) {
95 // If we were given a user name (and optional password), then use them:
96 if (network_username
!= NULL
) {
97 char const* password
= network_password
== NULL
? "" : network_password
;
98 return client
->describeWithPassword(url
, network_username
, password
);
100 return client
->describeURL(url
);
104 static char* openURL_sip(SIPClient
* client
, char const* url
) {
105 // If we were given a user name (and optional password), then use them:
106 if (network_username
!= NULL
) {
107 char const* password
= network_password
== NULL
? "" : network_password
;
108 return client
->inviteWithPassword(url
, network_username
, password
);
110 return client
->invite(url
);
114 #ifdef CONFIG_LIBNEMESI
115 extern int rtsp_transport_tcp
;
116 extern int rtsp_transport_http
;
118 int rtsp_transport_tcp
= 0;
119 int rtsp_transport_http
= 0;
122 extern int rtsp_port
;
124 extern AVCodecContext
*avcctx
;
127 extern "C" demuxer_t
* demux_open_rtp(demuxer_t
* demuxer
) {
128 struct MPOpts
*opts
= demuxer
->opts
;
129 Boolean success
= False
;
131 TaskScheduler
* scheduler
= BasicTaskScheduler::createNew();
132 if (scheduler
== NULL
) break;
133 UsageEnvironment
* env
= BasicUsageEnvironment::createNew(*scheduler
);
134 if (env
== NULL
) break;
136 RTSPClient
* rtspClient
= NULL
;
137 SIPClient
* sipClient
= NULL
;
139 if (demuxer
== NULL
|| demuxer
->stream
== NULL
) break; // shouldn't happen
140 demuxer
->stream
->eof
= 0; // just in case
142 // Look at the stream's 'priv' field to see if we were initiated
143 // via a SDP description:
144 char* sdpDescription
= (char*)(demuxer
->stream
->priv
);
145 if (sdpDescription
== NULL
) {
146 // We weren't given a SDP description directly, so assume that
147 // we were given a RTSP or SIP URL:
148 char const* protocol
= demuxer
->stream
->streaming_ctrl
->url
->protocol
;
149 char const* url
= demuxer
->stream
->streaming_ctrl
->url
->url
;
151 if (strcmp(protocol
, "rtsp") == 0) {
152 if (rtsp_transport_http
== 1) {
153 rtsp_transport_http
= demuxer
->stream
->streaming_ctrl
->url
->port
;
154 rtsp_transport_tcp
= 1;
156 rtspClient
= RTSPClient::createNew(*env
, verbose
, "MPlayer", rtsp_transport_http
);
157 if (rtspClient
== NULL
) {
158 fprintf(stderr
, "Failed to create RTSP client: %s\n",
159 env
->getResultMsg());
162 sdpDescription
= openURL_rtsp(rtspClient
, url
);
164 unsigned char desiredAudioType
= 0; // PCMU (use 3 for GSM)
165 sipClient
= SIPClient::createNew(*env
, desiredAudioType
, NULL
,
167 if (sipClient
== NULL
) {
168 fprintf(stderr
, "Failed to create SIP client: %s\n",
169 env
->getResultMsg());
172 sipClient
->setClientStartPortNum(8000);
173 sdpDescription
= openURL_sip(sipClient
, url
);
176 if (sdpDescription
== NULL
) {
177 fprintf(stderr
, "Failed to get a SDP description from URL \"%s\": %s\n",
178 url
, env
->getResultMsg());
183 // Now that we have a SDP description, create a MediaSession from it:
184 MediaSession
* mediaSession
= MediaSession::createNew(*env
, sdpDescription
);
185 if (mediaSession
== NULL
) break;
188 // Create a 'RTPState' structure containing the state that we just created,
189 // and store it in the demuxer's 'priv' field, for future reference:
190 RTPState
* rtpState
= new RTPState
;
191 rtpState
->sdpDescription
= sdpDescription
;
192 rtpState
->rtspClient
= rtspClient
;
193 rtpState
->sipClient
= sipClient
;
194 rtpState
->mediaSession
= mediaSession
;
195 rtpState
->audioBufferQueue
= rtpState
->videoBufferQueue
= NULL
;
197 rtpState
->firstSyncTime
.tv_sec
= rtpState
->firstSyncTime
.tv_usec
= 0;
198 demuxer
->priv
= rtpState
;
200 int audiofound
= 0, videofound
= 0;
201 // Create RTP receivers (sources) for each subsession:
202 MediaSubsessionIterator
iter(*mediaSession
);
203 MediaSubsession
* subsession
;
204 unsigned desiredReceiveBufferSize
;
205 while ((subsession
= iter
.next()) != NULL
) {
206 // Ignore any subsession that's not audio or video:
207 if (strcmp(subsession
->mediumName(), "audio") == 0) {
209 fprintf(stderr
, "Additional subsession \"audio/%s\" skipped\n", subsession
->codecName());
212 desiredReceiveBufferSize
= 100000;
213 } else if (strcmp(subsession
->mediumName(), "video") == 0) {
215 fprintf(stderr
, "Additional subsession \"video/%s\" skipped\n", subsession
->codecName());
218 desiredReceiveBufferSize
= 2000000;
224 subsession
->setClientPortNum (rtsp_port
);
226 if (!subsession
->initiate()) {
227 fprintf(stderr
, "Failed to initiate \"%s/%s\" RTP subsession: %s\n", subsession
->mediumName(), subsession
->codecName(), env
->getResultMsg());
229 fprintf(stderr
, "Initiated \"%s/%s\" RTP subsession on port %d\n", subsession
->mediumName(), subsession
->codecName(), subsession
->clientPortNum());
231 // Set the OS's socket receive buffer sufficiently large to avoid
232 // incoming packets getting dropped between successive reads from this
233 // subsession's demuxer. Depending on the bitrate(s) that you expect,
234 // you may wish to tweak the "desiredReceiveBufferSize" values above.
235 int rtpSocketNum
= subsession
->rtpSource()->RTPgs()->socketNum();
236 int receiveBufferSize
237 = increaseReceiveBufferTo(*env
, rtpSocketNum
,
238 desiredReceiveBufferSize
);
240 fprintf(stderr
, "Increased %s socket receive buffer to %d bytes \n",
241 subsession
->mediumName(), receiveBufferSize
);
244 if (rtspClient
!= NULL
) {
245 // Issue a RTSP "SETUP" command on the chosen subsession:
246 if (!rtspClient
->setupMediaSubsession(*subsession
, False
,
247 rtsp_transport_tcp
)) break;
248 if (!strcmp(subsession
->mediumName(), "audio"))
250 if (!strcmp(subsession
->mediumName(), "video"))
256 if (rtspClient
!= NULL
) {
257 // Issue a RTSP aggregate "PLAY" command on the whole session:
258 if (!rtspClient
->playMediaSession(*mediaSession
)) break;
259 } else if (sipClient
!= NULL
) {
260 sipClient
->sendACK(); // to start the stream flowing
263 // Now that the session is ready to be read, do additional
264 // MPlayer codec-specific initialization on each subsession:
266 while ((subsession
= iter
.next()) != NULL
) {
267 if (subsession
->readSource() == NULL
) continue; // not reading this
270 if (strcmp(subsession
->mediumName(), "audio") == 0) {
271 rtpState
->audioBufferQueue
272 = new ReadBufferQueue(subsession
, demuxer
, "audio");
273 rtpState
->audioBufferQueue
->otherQueue
= &(rtpState
->videoBufferQueue
);
274 rtpCodecInitialize_audio(demuxer
, subsession
, flags
);
275 } else if (strcmp(subsession
->mediumName(), "video") == 0) {
276 rtpState
->videoBufferQueue
277 = new ReadBufferQueue(subsession
, demuxer
, "video");
278 rtpState
->videoBufferQueue
->otherQueue
= &(rtpState
->audioBufferQueue
);
279 rtpCodecInitialize_video(demuxer
, subsession
, flags
);
281 rtpState
->flags
|= flags
;
285 if (!success
) return NULL
; // an error occurred
287 // Hack: If audio and video are demuxed together on a single RTP stream,
288 // then create a new "demuxer_t" structure to allow the higher-level
289 // code to recognize this:
290 if (demux_is_multiplexed_rtp_stream(demuxer
)) {
291 stream_t
* s
= new_ds_stream(demuxer
->video
);
292 demuxer_t
* od
= demux_open(opts
, s
, DEMUXER_TYPE_UNKNOWN
,
293 opts
->audio_id
, opts
->video_id
, opts
->sub_id
,
295 demuxer
= new_demuxers_demuxer(od
, od
, od
);
301 extern "C" int demux_is_mpeg_rtp_stream(demuxer_t
* demuxer
) {
302 // Get the RTP state that was stored in the demuxer's 'priv' field:
303 RTPState
* rtpState
= (RTPState
*)(demuxer
->priv
);
305 return (rtpState
->flags
&RTPSTATE_IS_MPEG12_VIDEO
) != 0;
308 extern "C" int demux_is_multiplexed_rtp_stream(demuxer_t
* demuxer
) {
309 // Get the RTP state that was stored in the demuxer's 'priv' field:
310 RTPState
* rtpState
= (RTPState
*)(demuxer
->priv
);
312 return (rtpState
->flags
&RTPSTATE_IS_MULTIPLEXED
) != 0;
315 static demux_packet_t
* getBuffer(demuxer_t
* demuxer
, demux_stream_t
* ds
,
316 Boolean mustGetNewData
,
317 float& ptsBehind
); // forward
319 extern "C" int demux_rtp_fill_buffer(demuxer_t
* demuxer
, demux_stream_t
* ds
) {
320 // Get a filled-in "demux_packet" from the RTP source, and deliver it.
321 // Note that this is called as a synchronous read operation, so it needs
322 // to block in the (hopefully infrequent) case where no packet is
323 // immediately available.
327 demux_packet_t
* dp
= getBuffer(demuxer
, ds
, False
, ptsBehind
); // blocking
328 if (dp
== NULL
) return 0;
330 if (demuxer
->stream
->eof
) return 0; // source stream has closed down
332 // Before using this packet, check to make sure that its presentation
333 // time is not far behind the other stream (if any). If it is,
334 // then we discard this packet, and get another instead. (The rest of
335 // MPlayer doesn't always do a good job of synchronizing when the
336 // audio and video streams get this far apart.)
337 // (We don't do this when streaming over TCP, because then the audio and
338 // video streams are interleaved.)
339 // (Also, if the stream is *excessively* far behind, then we allow
340 // the packet, because in this case it probably means that there was
341 // an error in the source's timestamp synchronization.)
342 const float ptsBehindThreshold
= 1.0; // seconds
343 const float ptsBehindLimit
= 60.0; // seconds
344 if (ptsBehind
< ptsBehindThreshold
||
345 ptsBehind
> ptsBehindLimit
||
346 rtsp_transport_tcp
) { // packet's OK
347 ds_add_packet(ds
, dp
);
351 #ifdef DEBUG_PRINT_DISCARDED_PACKETS
352 RTPState
* rtpState
= (RTPState
*)(demuxer
->priv
);
353 ReadBufferQueue
* bufferQueue
= ds
== demuxer
->video
? rtpState
->videoBufferQueue
: rtpState
->audioBufferQueue
;
354 fprintf(stderr
, "Discarding %s packet (%fs behind)\n", bufferQueue
->tag(), ptsBehind
);
356 free_demux_packet(dp
); // give back this packet, and get another one
362 Boolean
awaitRTPPacket(demuxer_t
* demuxer
, demux_stream_t
* ds
,
363 unsigned char*& packetData
, unsigned& packetDataLen
,
365 // Similar to "demux_rtp_fill_buffer()", except that the "demux_packet"
366 // is not delivered to the "demux_stream".
368 demux_packet_t
* dp
= getBuffer(demuxer
, ds
, True
, ptsBehind
); // blocking
369 if (dp
== NULL
) return False
;
371 packetData
= dp
->buffer
;
372 packetDataLen
= dp
->len
;
378 static void teardownRTSPorSIPSession(RTPState
* rtpState
); // forward
380 extern "C" void demux_close_rtp(demuxer_t
* demuxer
) {
381 // Reclaim all RTP-related state:
383 // Get the RTP state that was stored in the demuxer's 'priv' field:
384 RTPState
* rtpState
= (RTPState
*)(demuxer
->priv
);
385 if (rtpState
== NULL
) return;
387 teardownRTSPorSIPSession(rtpState
);
389 UsageEnvironment
* env
= NULL
;
390 TaskScheduler
* scheduler
= NULL
;
391 if (rtpState
->mediaSession
!= NULL
) {
392 env
= &(rtpState
->mediaSession
->envir());
393 scheduler
= &(env
->taskScheduler());
395 Medium::close(rtpState
->mediaSession
);
396 Medium::close(rtpState
->rtspClient
);
397 Medium::close(rtpState
->sipClient
);
398 delete rtpState
->audioBufferQueue
;
399 delete rtpState
->videoBufferQueue
;
400 delete[] rtpState
->sdpDescription
;
406 env
->reclaim(); delete scheduler
;
409 ////////// Extra routines that help implement the above interface functions:
411 #define MAX_RTP_FRAME_SIZE 5000000
412 // >= the largest conceivable frame composed from one or more RTP packets
414 static void afterReading(void* clientData
, unsigned frameSize
,
415 unsigned /*numTruncatedBytes*/,
416 struct timeval presentationTime
,
417 unsigned /*durationInMicroseconds*/) {
419 if (frameSize
>= MAX_RTP_FRAME_SIZE
) {
420 fprintf(stderr
, "Saw an input frame too large (>=%d). Increase MAX_RTP_FRAME_SIZE in \"demux_rtp.cpp\".\n",
423 ReadBufferQueue
* bufferQueue
= (ReadBufferQueue
*)clientData
;
424 demuxer_t
* demuxer
= bufferQueue
->ourDemuxer();
425 RTPState
* rtpState
= (RTPState
*)(demuxer
->priv
);
427 if (frameSize
> 0) demuxer
->stream
->eof
= 0;
429 demux_packet_t
* dp
= bufferQueue
->dp
;
431 if (bufferQueue
->readSource()->isAMRAudioSource())
433 else if (bufferQueue
== rtpState
->videoBufferQueue
&&
434 ((sh_video_t
*)demuxer
->video
->sh
)->format
== mmioFOURCC('H','2','6','4')) {
441 resize_demux_packet(dp
, frameSize
+ headersize
);
443 // Set the packet's presentation time stamp, depending on whether or
444 // not our RTP source's timestamps have been synchronized yet:
445 Boolean hasBeenSynchronized
446 = bufferQueue
->rtpSource()->hasBeenSynchronizedUsingRTCP();
447 if (hasBeenSynchronized
) {
448 if (verbose
> 0 && !bufferQueue
->prevPacketWasSynchronized
) {
449 fprintf(stderr
, "%s stream has been synchronized using RTCP \n",
453 struct timeval
* fst
= &(rtpState
->firstSyncTime
); // abbrev
454 if (fst
->tv_sec
== 0 && fst
->tv_usec
== 0) {
455 *fst
= presentationTime
;
458 // For the "pts" field, use the time differential from the first
459 // synchronized time, rather than absolute time, in order to avoid
460 // round-off errors when converting to a float:
461 dp
->pts
= presentationTime
.tv_sec
- fst
->tv_sec
462 + (presentationTime
.tv_usec
- fst
->tv_usec
)/1000000.0;
463 bufferQueue
->prevPacketPTS
= dp
->pts
;
465 if (verbose
> 0 && bufferQueue
->prevPacketWasSynchronized
) {
466 fprintf(stderr
, "%s stream is no longer RTCP-synchronized \n",
470 // use the previous packet's "pts" once again:
471 dp
->pts
= bufferQueue
->prevPacketPTS
;
473 bufferQueue
->prevPacketWasSynchronized
= hasBeenSynchronized
;
475 dp
->pos
= demuxer
->filepos
;
476 demuxer
->filepos
+= frameSize
+ headersize
;
478 // Signal any pending 'doEventLoop()' call on this queue:
479 bufferQueue
->blockingFlag
= ~0;
482 static void onSourceClosure(void* clientData
) {
483 ReadBufferQueue
* bufferQueue
= (ReadBufferQueue
*)clientData
;
484 demuxer_t
* demuxer
= bufferQueue
->ourDemuxer();
486 demuxer
->stream
->eof
= 1;
488 // Signal any pending 'doEventLoop()' call on this queue:
489 bufferQueue
->blockingFlag
= ~0;
492 static demux_packet_t
* getBuffer(demuxer_t
* demuxer
, demux_stream_t
* ds
,
493 Boolean mustGetNewData
,
495 // Begin by finding the buffer queue that we want to read from:
496 // (Get this from the RTP state, which we stored in
497 // the demuxer's 'priv' field)
498 RTPState
* rtpState
= (RTPState
*)(demuxer
->priv
);
499 ReadBufferQueue
* bufferQueue
= NULL
;
503 if (demuxer
->stream
->eof
) return NULL
;
505 if (ds
== demuxer
->video
) {
506 bufferQueue
= rtpState
->videoBufferQueue
;
507 if (((sh_video_t
*)ds
->sh
)->format
== mmioFOURCC('H','2','6','4'))
509 } else if (ds
== demuxer
->audio
) {
510 bufferQueue
= rtpState
->audioBufferQueue
;
511 if (bufferQueue
->readSource()->isAMRAudioSource())
514 fprintf(stderr
, "(demux_rtp)getBuffer: internal error: unknown stream\n");
518 if (bufferQueue
== NULL
|| bufferQueue
->readSource() == NULL
) {
519 fprintf(stderr
, "(demux_rtp)getBuffer failed: no appropriate RTP subsession has been set up\n");
523 demux_packet_t
* dp
= NULL
;
524 if (!mustGetNewData
) {
525 // Check whether we have a previously-saved buffer that we can use:
526 dp
= bufferQueue
->getPendingBuffer();
528 ptsBehind
= 0.0; // so that we always accept this data
533 // Allocate a new packet buffer, and arrange to read into it:
534 if (!bufferQueue
->nextpacket
) {
535 dp
= new_demux_packet(MAX_RTP_FRAME_SIZE
);
536 bufferQueue
->dp
= dp
;
537 if (dp
== NULL
) return NULL
;
541 extern AVCodecParserContext
* h264parserctx
;
542 int consumed
, poutbuf_size
= 1;
543 const uint8_t *poutbuf
= NULL
;
547 if (!bufferQueue
->nextpacket
) {
549 // Schedule the read operation:
550 bufferQueue
->blockingFlag
= 0;
551 bufferQueue
->readSource()->getNextFrame(&dp
->buffer
[headersize
], MAX_RTP_FRAME_SIZE
- headersize
,
552 afterReading
, bufferQueue
,
553 onSourceClosure
, bufferQueue
);
554 // Block ourselves until data becomes available:
555 TaskScheduler
& scheduler
556 = bufferQueue
->readSource()->envir().taskScheduler();
557 int delay
= 10000000;
558 if (bufferQueue
->prevPacketPTS
* 1.05 > rtpState
->mediaSession
->playEndTime())
560 task
= scheduler
.scheduleDelayedTask(delay
, onSourceClosure
, bufferQueue
);
561 scheduler
.doEventLoop(&bufferQueue
->blockingFlag
);
562 scheduler
.unscheduleDelayedTask(task
);
563 if (demuxer
->stream
->eof
) {
564 free_demux_packet(dp
);
568 if (headersize
== 1) // amr
570 ((AMRAudioSource
*)bufferQueue
->readSource())->lastFrameHeader();
573 bufferQueue
->dp
= dp
= bufferQueue
->nextpacket
;
574 bufferQueue
->nextpacket
= NULL
;
576 if (headersize
== 3 && h264parserctx
) { // h264
577 consumed
= h264parserctx
->parser
->parser_parse(h264parserctx
,
579 &poutbuf
, &poutbuf_size
,
580 dp
->buffer
, dp
->len
);
582 if (!consumed
&& !poutbuf_size
)
587 free_demux_packet(dp
);
588 bufferQueue
->dp
= dp
= new_demux_packet(MAX_RTP_FRAME_SIZE
);
590 bufferQueue
->nextpacket
= dp
;
591 bufferQueue
->dp
= dp
= new_demux_packet(poutbuf_size
);
592 memcpy(dp
->buffer
, poutbuf
, poutbuf_size
);
596 } while (!poutbuf_size
);
599 // Set the "ptsBehind" result parameter:
600 if (bufferQueue
->prevPacketPTS
!= 0.0
601 && bufferQueue
->prevPacketWasSynchronized
602 && *(bufferQueue
->otherQueue
) != NULL
603 && (*(bufferQueue
->otherQueue
))->prevPacketPTS
!= 0.0
604 && (*(bufferQueue
->otherQueue
))->prevPacketWasSynchronized
) {
605 ptsBehind
= (*(bufferQueue
->otherQueue
))->prevPacketPTS
606 - bufferQueue
->prevPacketPTS
;
611 if (mustGetNewData
) {
612 // Save this buffer for future reads:
613 bufferQueue
->savePendingBuffer(dp
);
619 static void teardownRTSPorSIPSession(RTPState
* rtpState
) {
620 MediaSession
* mediaSession
= rtpState
->mediaSession
;
621 if (mediaSession
== NULL
) return;
622 if (rtpState
->rtspClient
!= NULL
) {
623 rtpState
->rtspClient
->teardownMediaSession(*mediaSession
);
624 } else if (rtpState
->sipClient
!= NULL
) {
625 rtpState
->sipClient
->sendBYE();
629 ////////// "ReadBuffer" and "ReadBufferQueue" implementation:
631 ReadBufferQueue::ReadBufferQueue(MediaSubsession
* subsession
,
632 demuxer_t
* demuxer
, char const* tag
)
633 : prevPacketWasSynchronized(False
), prevPacketPTS(0.0), otherQueue(NULL
),
634 dp(NULL
), nextpacket(NULL
),
635 pendingDPHead(NULL
), pendingDPTail(NULL
),
636 fReadSource(subsession
== NULL
? NULL
: subsession
->readSource()),
637 fRTPSource(subsession
== NULL
? NULL
: subsession
->rtpSource()),
638 fOurDemuxer(demuxer
), fTag(strdup(tag
)) {
641 ReadBufferQueue::~ReadBufferQueue() {
644 // Free any pending buffers (that never got delivered):
645 demux_packet_t
* dp
= pendingDPHead
;
647 demux_packet_t
* dpNext
= dp
->next
;
649 free_demux_packet(dp
);
654 void ReadBufferQueue::savePendingBuffer(demux_packet_t
* dp
) {
655 // Keep this buffer around, until MPlayer asks for it later:
656 if (pendingDPTail
== NULL
) {
657 pendingDPHead
= pendingDPTail
= dp
;
659 pendingDPTail
->next
= dp
;
665 demux_packet_t
* ReadBufferQueue::getPendingBuffer() {
666 demux_packet_t
* dp
= pendingDPHead
;
668 pendingDPHead
= dp
->next
;
669 if (pendingDPHead
== NULL
) pendingDPTail
= NULL
;
677 static int demux_rtp_control(struct demuxer
*demuxer
, int cmd
, void *arg
) {
678 double endpts
= ((RTPState
*)demuxer
->priv
)->mediaSession
->playEndTime();
681 case DEMUXER_CTRL_GET_TIME_LENGTH
:
683 return DEMUXER_CTRL_DONTKNOW
;
684 *((double *)arg
) = endpts
;
685 return DEMUXER_CTRL_OK
;
687 case DEMUXER_CTRL_GET_PERCENT_POS
:
689 return DEMUXER_CTRL_DONTKNOW
;
690 *((int *)arg
) = (int)(((RTPState
*)demuxer
->priv
)->videoBufferQueue
->prevPacketPTS
*100/endpts
);
691 return DEMUXER_CTRL_OK
;
694 return DEMUXER_CTRL_NOTIMPL
;
698 demuxer_desc_t demuxer_desc_rtp
= {
699 "LIVE555 RTP demuxer",
703 "requires LIVE555 Streaming Media library",
707 demux_rtp_fill_buffer
,