From 4f3110de8a2843a84b905a298b1731d68c5cbd47 Mon Sep 17 00:00:00 2001 From: Eric Date: Wed, 14 Jun 2017 21:04:50 +0800 Subject: [PATCH] Add librtsp library. --- AnyCore/lib_rtsp/.vs/XRtspLive/v14/.suo | Bin 0 -> 18944 bytes .../BasicUsageEnvironment/BasicHashTable.cpp | 277 ++ .../BasicTaskScheduler.cpp | 250 ++ .../BasicTaskScheduler0.cpp | 240 ++ .../BasicUsageEnvironment.cpp | 107 + .../BasicUsageEnvironment.mak | 94 + .../BasicUsageEnvironment.vcproj | 226 ++ ...cUsageEnvironment.vcproj.Eric-PC.Eric.user | 65 + .../BasicUsageEnvironment.vcxproj | 108 + .../BasicUsageEnvironment.vcxproj.filters | 60 + .../BasicUsageEnvironment0.cpp | 88 + .../lib_rtsp/BasicUsageEnvironment/COPYING | 1 + .../BasicUsageEnvironment/DelayQueue.cpp | 233 ++ .../lib_rtsp/BasicUsageEnvironment/Makefile | 81 + .../BasicUsageEnvironment/Makefile.head | 4 + .../BasicUsageEnvironment/Makefile.tail | 43 + .../include/BasicHashTable.hh | 104 + .../include/BasicUsageEnvironment.hh | 86 + .../include/BasicUsageEnvironment0.hh | 115 + .../include/BasicUsageEnvironment_version.hh | 10 + .../include/DelayQueue.hh | 182 ++ .../include/HandlerSet.hh | 77 + AnyCore/lib_rtsp/groupsock/COPYING | 1 + AnyCore/lib_rtsp/groupsock/GroupEId.cpp | 104 + AnyCore/lib_rtsp/groupsock/Groupsock.cpp | 635 +++++ .../lib_rtsp/groupsock/GroupsockHelper.cpp | 820 ++++++ AnyCore/lib_rtsp/groupsock/IOHandlers.cpp | 46 + AnyCore/lib_rtsp/groupsock/Makefile | 83 + AnyCore/lib_rtsp/groupsock/Makefile.head | 4 + AnyCore/lib_rtsp/groupsock/Makefile.tail | 45 + AnyCore/lib_rtsp/groupsock/NetAddress.cpp | 312 +++ AnyCore/lib_rtsp/groupsock/NetInterface.cpp | 174 ++ AnyCore/lib_rtsp/groupsock/groupsock.mak | 96 + AnyCore/lib_rtsp/groupsock/groupsock.vcproj | 239 ++ .../groupsock.vcproj.Eric-PC.Eric.user | 65 + AnyCore/lib_rtsp/groupsock/groupsock.vcxproj | 109 + .../groupsock/groupsock.vcxproj.filters | 72 + .../lib_rtsp/groupsock/include/GroupEId.hh | 98 + .../lib_rtsp/groupsock/include/Groupsock.hh | 203 ++ .../groupsock/include/GroupsockHelper.hh | 142 + .../lib_rtsp/groupsock/include/IOHandlers.hh | 31 + .../lib_rtsp/groupsock/include/NetAddress.hh | 162 ++ .../lib_rtsp/groupsock/include/NetCommon.h | 131 + .../groupsock/include/NetInterface.hh | 149 ++ .../groupsock/include/TunnelEncaps.hh | 101 + .../groupsock/include/groupsock_version.hh | 10 + AnyCore/lib_rtsp/groupsock/inet.c | 451 ++++ .../AC3AudioFileServerMediaSubsession.cpp | 61 + .../lib_rtsp/liveMedia/AC3AudioRTPSink.cpp | 97 + .../lib_rtsp/liveMedia/AC3AudioRTPSource.cpp | 66 + .../liveMedia/AC3AudioStreamFramer.cpp | 340 +++ .../ADTSAudioFileServerMediaSubsession.cpp | 60 + .../liveMedia/ADTSAudioFileSource.cpp | 171 ++ .../AMRAudioFileServerMediaSubsession.cpp | 59 + .../lib_rtsp/liveMedia/AMRAudioFileSink.cpp | 101 + .../lib_rtsp/liveMedia/AMRAudioFileSource.cpp | 174 ++ .../lib_rtsp/liveMedia/AMRAudioRTPSink.cpp | 134 + .../lib_rtsp/liveMedia/AMRAudioRTPSource.cpp | 747 ++++++ AnyCore/lib_rtsp/liveMedia/AMRAudioSource.cpp | 38 + AnyCore/lib_rtsp/liveMedia/AVIFileSink.cpp | 784 ++++++ .../lib_rtsp/liveMedia/AudioInputDevice.cpp | 45 + AnyCore/lib_rtsp/liveMedia/AudioRTPSink.cpp | 37 + AnyCore/lib_rtsp/liveMedia/Base64.cpp | 122 + AnyCore/lib_rtsp/liveMedia/BasicUDPSink.cpp | 100 + AnyCore/lib_rtsp/liveMedia/BasicUDPSource.cpp | 73 + AnyCore/lib_rtsp/liveMedia/BitVector.cpp | 174 ++ .../liveMedia/ByteStreamFileSource.cpp | 184 ++ .../ByteStreamMemoryBufferSource.cpp | 118 + .../liveMedia/ByteStreamMultiFileSource.cpp | 133 + AnyCore/lib_rtsp/liveMedia/COPYING | 1 + .../DVVideoFileServerMediaSubsession.cpp | 103 + AnyCore/lib_rtsp/liveMedia/DVVideoRTPSink.cpp | 95 + .../lib_rtsp/liveMedia/DVVideoRTPSource.cpp | 65 + .../liveMedia/DVVideoStreamFramer.cpp | 220 ++ AnyCore/lib_rtsp/liveMedia/DarwinInjector.cpp | 349 +++ AnyCore/lib_rtsp/liveMedia/DeviceSource.cpp | 156 ++ .../liveMedia/DigestAuthentication.cpp | 157 ++ AnyCore/lib_rtsp/liveMedia/EBMLNumber.cpp | 150 ++ AnyCore/lib_rtsp/liveMedia/EBMLNumber.hh | 142 + .../liveMedia/FileServerMediaSubsession.cpp | 34 + AnyCore/lib_rtsp/liveMedia/FileSink.cpp | 151 ++ .../lib_rtsp/liveMedia/FramedFileSource.cpp | 30 + AnyCore/lib_rtsp/liveMedia/FramedFilter.cpp | 59 + AnyCore/lib_rtsp/liveMedia/FramedSource.cpp | 125 + .../lib_rtsp/liveMedia/GSMAudioRTPSink.cpp | 40 + .../lib_rtsp/liveMedia/H261VideoRTPSource.cpp | 67 + ...H263plusVideoFileServerMediaSubsession.cpp | 64 + .../liveMedia/H263plusVideoRTPSink.cpp | 91 + .../liveMedia/H263plusVideoRTPSource.cpp | 106 + .../liveMedia/H263plusVideoStreamFramer.cpp | 129 + .../liveMedia/H263plusVideoStreamParser.cpp | 859 ++++++ .../liveMedia/H263plusVideoStreamParser.hh | 127 + .../H264VideoFileServerMediaSubsession.cpp | 119 + .../lib_rtsp/liveMedia/H264VideoFileSink.cpp | 59 + .../lib_rtsp/liveMedia/H264VideoRTPSink.cpp | 131 + .../lib_rtsp/liveMedia/H264VideoRTPSource.cpp | 199 ++ .../H264VideoStreamDiscreteFramer.cpp | 41 + .../liveMedia/H264VideoStreamFramer.cpp | 38 + .../liveMedia/H264or5VideoFileSink.cpp | 65 + .../liveMedia/H264or5VideoRTPSink.cpp | 286 ++ .../H264or5VideoStreamDiscreteFramer.cpp | 94 + .../liveMedia/H264or5VideoStreamFramer.cpp | 1071 ++++++++ .../H265VideoFileServerMediaSubsession.cpp | 120 + .../lib_rtsp/liveMedia/H265VideoFileSink.cpp | 63 + .../lib_rtsp/liveMedia/H265VideoRTPSink.cpp | 182 ++ .../lib_rtsp/liveMedia/H265VideoRTPSource.cpp | 218 ++ .../H265VideoStreamDiscreteFramer.cpp | 41 + .../liveMedia/H265VideoStreamFramer.cpp | 38 + AnyCore/lib_rtsp/liveMedia/InputFile.cpp | 112 + .../lib_rtsp/liveMedia/JPEGVideoRTPSink.cpp | 145 ++ .../lib_rtsp/liveMedia/JPEGVideoRTPSource.cpp | 465 ++++ .../lib_rtsp/liveMedia/JPEGVideoSource.cpp | 45 + AnyCore/lib_rtsp/liveMedia/Locale.cpp | 60 + AnyCore/lib_rtsp/liveMedia/MP3ADU.cpp | 634 +++++ AnyCore/lib_rtsp/liveMedia/MP3ADURTPSink.cpp | 119 + .../lib_rtsp/liveMedia/MP3ADURTPSource.cpp | 80 + .../lib_rtsp/liveMedia/MP3ADUTranscoder.cpp | 92 + .../lib_rtsp/liveMedia/MP3ADUdescriptor.cpp | 65 + .../lib_rtsp/liveMedia/MP3ADUdescriptor.hh | 43 + .../lib_rtsp/liveMedia/MP3ADUinterleaving.cpp | 517 ++++ .../MP3AudioFileServerMediaSubsession.cpp | 179 ++ ...AudioMatroskaFileServerMediaSubsession.cpp | 57 + ...3AudioMatroskaFileServerMediaSubsession.hh | 57 + AnyCore/lib_rtsp/liveMedia/MP3FileSource.cpp | 178 ++ AnyCore/lib_rtsp/liveMedia/MP3Internals.cpp | 808 ++++++ AnyCore/lib_rtsp/liveMedia/MP3Internals.hh | 143 + .../liveMedia/MP3InternalsHuffman.cpp | 976 +++++++ .../lib_rtsp/liveMedia/MP3InternalsHuffman.hh | 82 + .../liveMedia/MP3InternalsHuffmanTable.cpp | 1548 +++++++++++ AnyCore/lib_rtsp/liveMedia/MP3StreamState.cpp | 438 ++++ AnyCore/lib_rtsp/liveMedia/MP3StreamState.hh | 90 + AnyCore/lib_rtsp/liveMedia/MP3Transcoder.cpp | 52 + .../liveMedia/MPEG1or2AudioRTPSink.cpp | 63 + .../liveMedia/MPEG1or2AudioRTPSource.cpp | 62 + .../liveMedia/MPEG1or2AudioStreamFramer.cpp | 210 ++ AnyCore/lib_rtsp/liveMedia/MPEG1or2Demux.cpp | 756 ++++++ .../MPEG1or2DemuxedElementaryStream.cpp | 88 + .../MPEG1or2DemuxedServerMediaSubsession.cpp | 134 + .../liveMedia/MPEG1or2FileServerDemux.cpp | 264 ++ ...MPEG1or2VideoFileServerMediaSubsession.cpp | 69 + .../liveMedia/MPEG1or2VideoRTPSink.cpp | 175 ++ .../liveMedia/MPEG1or2VideoRTPSource.cpp | 82 + .../MPEG1or2VideoStreamDiscreteFramer.cpp | 203 ++ .../liveMedia/MPEG1or2VideoStreamFramer.cpp | 478 ++++ .../MPEG2IndexFromTransportStream.cpp | 683 +++++ ...PEG2TransportFileServerMediaSubsession.cpp | 352 +++ .../liveMedia/MPEG2TransportStreamFramer.cpp | 290 +++ .../MPEG2TransportStreamFromESSource.cpp | 260 ++ .../MPEG2TransportStreamFromPESSource.cpp | 74 + .../MPEG2TransportStreamIndexFile.cpp | 349 +++ .../MPEG2TransportStreamMultiplexor.cpp | 441 ++++ .../MPEG2TransportStreamTrickModeFilter.cpp | 266 ++ ...MPEG2TransportUDPServerMediaSubsession.cpp | 75 + .../liveMedia/MPEG4ESVideoRTPSink.cpp | 142 + .../liveMedia/MPEG4ESVideoRTPSource.cpp | 65 + .../liveMedia/MPEG4GenericRTPSink.cpp | 142 + .../liveMedia/MPEG4GenericRTPSource.cpp | 234 ++ .../liveMedia/MPEG4LATMAudioRTPSink.cpp | 95 + .../liveMedia/MPEG4LATMAudioRTPSource.cpp | 264 ++ .../MPEG4VideoFileServerMediaSubsession.cpp | 125 + .../MPEG4VideoStreamDiscreteFramer.cpp | 252 ++ .../liveMedia/MPEG4VideoStreamFramer.cpp | 681 +++++ .../liveMedia/MPEGVideoStreamFramer.cpp | 180 ++ .../liveMedia/MPEGVideoStreamParser.cpp | 45 + .../liveMedia/MPEGVideoStreamParser.hh | 122 + AnyCore/lib_rtsp/liveMedia/Makefile | 433 +++ AnyCore/lib_rtsp/liveMedia/Makefile.head | 4 + AnyCore/lib_rtsp/liveMedia/Makefile.tail | 395 +++ .../liveMedia/MatroskaDemuxedTrack.cpp | 47 + .../liveMedia/MatroskaDemuxedTrack.hh | 64 + AnyCore/lib_rtsp/liveMedia/MatroskaFile.cpp | 879 +++++++ .../lib_rtsp/liveMedia/MatroskaFileParser.cpp | 1433 ++++++++++ .../lib_rtsp/liveMedia/MatroskaFileParser.hh | 134 + .../liveMedia/MatroskaFileServerDemux.cpp | 121 + .../MatroskaFileServerMediaSubsession.cpp | 65 + .../MatroskaFileServerMediaSubsession.hh | 55 + AnyCore/lib_rtsp/liveMedia/Media.cpp | 167 ++ AnyCore/lib_rtsp/liveMedia/MediaSession.cpp | 1446 ++++++++++ AnyCore/lib_rtsp/liveMedia/MediaSink.cpp | 225 ++ AnyCore/lib_rtsp/liveMedia/MediaSource.cpp | 88 + .../lib_rtsp/liveMedia/MultiFramedRTPSink.cpp | 423 +++ .../liveMedia/MultiFramedRTPSource.cpp | 627 +++++ .../lib_rtsp/liveMedia/OggDemuxedTrack.cpp | 43 + AnyCore/lib_rtsp/liveMedia/OggDemuxedTrack.hh | 58 + AnyCore/lib_rtsp/liveMedia/OggFile.cpp | 328 +++ AnyCore/lib_rtsp/liveMedia/OggFileParser.cpp | 1029 ++++++++ AnyCore/lib_rtsp/liveMedia/OggFileParser.hh | 91 + .../lib_rtsp/liveMedia/OggFileServerDemux.cpp | 109 + .../OggFileServerMediaSubsession.cpp | 54 + .../liveMedia/OggFileServerMediaSubsession.hh | 53 + AnyCore/lib_rtsp/liveMedia/OggFileSink.cpp | 273 ++ .../OnDemandServerMediaSubsession.cpp | 570 ++++ AnyCore/lib_rtsp/liveMedia/OutputFile.cpp | 60 + .../PassiveServerMediaSubsession.cpp | 221 ++ .../liveMedia/ProxyServerMediaSession.cpp | 793 ++++++ .../liveMedia/QCELPAudioRTPSource.cpp | 504 ++++ .../lib_rtsp/liveMedia/QuickTimeFileSink.cpp | 2317 +++++++++++++++++ .../liveMedia/QuickTimeGenericRTPSource.cpp | 274 ++ AnyCore/lib_rtsp/liveMedia/RTCP.cpp | 1073 ++++++++ .../AC3AudioFileServerMediaSubsession.hh | 48 + .../liveMedia/include/AC3AudioRTPSink.hh | 57 + .../liveMedia/include/AC3AudioRTPSource.hh | 51 + .../liveMedia/include/AC3AudioStreamFramer.hh | 70 + .../ADTSAudioFileServerMediaSubsession.hh | 48 + .../liveMedia/include/ADTSAudioFileSource.hh | 56 + .../AMRAudioFileServerMediaSubsession.hh | 48 + .../liveMedia/include/AMRAudioFileSink.hh | 51 + .../liveMedia/include/AMRAudioFileSource.hh | 48 + .../liveMedia/include/AMRAudioRTPSink.hh | 65 + .../liveMedia/include/AMRAudioRTPSource.hh | 53 + .../liveMedia/include/AMRAudioSource.hh | 52 + .../lib_rtsp/liveMedia/include/AVIFileSink.hh | 115 + .../liveMedia/include/AudioInputDevice.hh | 71 + .../liveMedia/include/AudioRTPSink.hh | 42 + AnyCore/lib_rtsp/liveMedia/include/Base64.hh | 43 + .../liveMedia/include/BasicUDPSink.hh | 62 + .../liveMedia/include/BasicUDPSource.hh | 55 + .../lib_rtsp/liveMedia/include/BitVector.hh | 66 + .../liveMedia/include/ByteStreamFileSource.hh | 82 + .../include/ByteStreamMemoryBufferSource.hh | 70 + .../include/ByteStreamMultiFileSource.hh | 67 + .../DVVideoFileServerMediaSubsession.hh | 51 + .../liveMedia/include/DVVideoRTPSink.hh | 57 + .../liveMedia/include/DVVideoRTPSource.hh | 51 + .../liveMedia/include/DVVideoStreamFramer.hh | 72 + .../liveMedia/include/DarwinInjector.hh | 106 + .../liveMedia/include/DeviceSource.hh | 66 + .../liveMedia/include/DigestAuthentication.hh | 74 + .../include/FileServerMediaSubsession.hh | 40 + .../lib_rtsp/liveMedia/include/FileSink.hh | 71 + .../liveMedia/include/FramedFileSource.hh | 37 + .../liveMedia/include/FramedFilter.hh | 52 + .../liveMedia/include/FramedSource.hh | 95 + .../liveMedia/include/GSMAudioRTPSink.hh | 44 + .../liveMedia/include/H261VideoRTPSource.hh | 56 + .../H263plusVideoFileServerMediaSubsession.hh | 48 + .../liveMedia/include/H263plusVideoRTPSink.hh | 54 + .../include/H263plusVideoRTPSource.hh | 60 + .../include/H263plusVideoStreamFramer.hh | 64 + .../H264VideoFileServerMediaSubsession.hh | 61 + .../liveMedia/include/H264VideoFileSink.hh | 47 + .../liveMedia/include/H264VideoRTPSink.hh | 59 + .../liveMedia/include/H264VideoRTPSource.hh | 70 + .../include/H264VideoStreamDiscreteFramer.hh | 46 + .../include/H264VideoStreamFramer.hh | 43 + .../liveMedia/include/H264or5VideoFileSink.hh | 46 + .../liveMedia/include/H264or5VideoRTPSink.hh | 60 + .../H264or5VideoStreamDiscreteFramer.hh | 52 + .../include/H264or5VideoStreamFramer.hh | 87 + .../H265VideoFileServerMediaSubsession.hh | 61 + .../liveMedia/include/H265VideoFileSink.hh | 51 + .../liveMedia/include/H265VideoRTPSink.hh | 62 + .../liveMedia/include/H265VideoRTPSource.hh | 67 + .../include/H265VideoStreamDiscreteFramer.hh | 46 + .../include/H265VideoStreamFramer.hh | 42 + .../lib_rtsp/liveMedia/include/InputFile.hh | 66 + .../liveMedia/include/JPEGVideoRTPSink.hh | 52 + .../liveMedia/include/JPEGVideoRTPSource.hh | 59 + .../liveMedia/include/JPEGVideoSource.hh | 55 + AnyCore/lib_rtsp/liveMedia/include/Locale.hh | 70 + AnyCore/lib_rtsp/liveMedia/include/MP3ADU.hh | 97 + .../liveMedia/include/MP3ADURTPSink.hh | 55 + .../liveMedia/include/MP3ADURTPSource.hh | 49 + .../liveMedia/include/MP3ADUTranscoder.hh | 64 + .../liveMedia/include/MP3ADUinterleaving.hh | 129 + .../MP3AudioFileServerMediaSubsession.hh | 73 + .../liveMedia/include/MP3FileSource.hh | 69 + .../liveMedia/include/MP3Transcoder.hh | 44 + .../liveMedia/include/MPEG1or2AudioRTPSink.hh | 48 + .../include/MPEG1or2AudioRTPSource.hh | 51 + .../include/MPEG1or2AudioStreamFramer.hh | 70 + .../liveMedia/include/MPEG1or2Demux.hh | 150 ++ .../MPEG1or2DemuxedElementaryStream.hh | 69 + .../MPEG1or2DemuxedServerMediaSubsession.hh | 63 + .../include/MPEG1or2FileServerDemux.hh | 67 + .../MPEG1or2VideoFileServerMediaSubsession.hh | 59 + .../liveMedia/include/MPEG1or2VideoRTPSink.hh | 69 + .../include/MPEG1or2VideoRTPSource.hh | 53 + .../MPEG1or2VideoStreamDiscreteFramer.hh | 76 + .../include/MPEG1or2VideoStreamFramer.hh | 56 + .../include/MPEG2IndexFromTransportStream.hh | 95 + ...MPEG2TransportFileServerMediaSubsession.hh | 131 + .../include/MPEG2TransportStreamFramer.hh | 78 + .../MPEG2TransportStreamFromESSource.hh | 60 + .../MPEG2TransportStreamFromPESSource.hh | 62 + .../include/MPEG2TransportStreamIndexFile.hh | 96 + .../MPEG2TransportStreamMultiplexor.hh | 88 + .../MPEG2TransportStreamTrickModeFilter.hh | 99 + .../MPEG2TransportUDPServerMediaSubsession.hh | 55 + .../liveMedia/include/MPEG4ESVideoRTPSink.hh | 72 + .../include/MPEG4ESVideoRTPSource.hh | 51 + .../liveMedia/include/MPEG4GenericRTPSink.hh | 70 + .../include/MPEG4GenericRTPSource.hh | 80 + .../include/MPEG4LATMAudioRTPSink.hh | 69 + .../include/MPEG4LATMAudioRTPSource.hh | 101 + .../MPEG4VideoFileServerMediaSubsession.hh | 61 + .../include/MPEG4VideoStreamDiscreteFramer.hh | 73 + .../include/MPEG4VideoStreamFramer.hh | 75 + .../include/MPEGVideoStreamFramer.hh | 84 + .../liveMedia/include/MatroskaFile.hh | 181 ++ .../include/MatroskaFileServerDemux.hh | 84 + AnyCore/lib_rtsp/liveMedia/include/Media.hh | 138 + .../liveMedia/include/MediaSession.hh | 337 +++ .../lib_rtsp/liveMedia/include/MediaSink.hh | 135 + .../lib_rtsp/liveMedia/include/MediaSource.hh | 58 + .../liveMedia/include/MultiFramedRTPSink.hh | 140 + .../liveMedia/include/MultiFramedRTPSource.hh | 159 ++ AnyCore/lib_rtsp/liveMedia/include/OggFile.hh | 177 ++ .../liveMedia/include/OggFileServerDemux.hh | 81 + .../lib_rtsp/liveMedia/include/OggFileSink.hh | 79 + .../include/OnDemandServerMediaSubsession.hh | 199 ++ .../lib_rtsp/liveMedia/include/OutputFile.hh | 31 + .../include/PassiveServerMediaSubsession.hh | 82 + .../include/ProxyServerMediaSession.hh | 211 ++ .../liveMedia/include/QCELPAudioRTPSource.hh | 39 + .../liveMedia/include/QuickTimeFileSink.hh | 188 ++ .../include/QuickTimeGenericRTPSource.hh | 68 + AnyCore/lib_rtsp/liveMedia/include/RTCP.hh | 206 ++ .../liveMedia/include/RTPInterface.hh | 114 + AnyCore/lib_rtsp/liveMedia/include/RTPSink.hh | 232 ++ .../lib_rtsp/liveMedia/include/RTPSource.hh | 266 ++ .../lib_rtsp/liveMedia/include/RTSPClient.hh | 369 +++ .../lib_rtsp/liveMedia/include/RTSPCommon.hh | 67 + .../liveMedia/include/RTSPRegisterSender.hh | 77 + .../lib_rtsp/liveMedia/include/RTSPServer.hh | 411 +++ .../RTSPServerSupportingHTTPStreaming.hh | 72 + .../lib_rtsp/liveMedia/include/SIPClient.hh | 149 ++ .../liveMedia/include/ServerMediaSession.hh | 201 ++ .../liveMedia/include/SimpleRTPSink.hh | 76 + .../liveMedia/include/SimpleRTPSource.hh | 67 + .../liveMedia/include/StreamReplicator.hh | 84 + .../liveMedia/include/T140TextRTPSink.hh | 103 + .../liveMedia/include/TCPStreamSink.hh | 67 + .../lib_rtsp/liveMedia/include/TextRTPSink.hh | 41 + .../liveMedia/include/TheoraVideoRTPSink.hh | 72 + .../liveMedia/include/TheoraVideoRTPSource.hh | 53 + .../liveMedia/include/VP8VideoRTPSink.hh | 50 + .../liveMedia/include/VP8VideoRTPSource.hh | 50 + .../liveMedia/include/VideoRTPSink.hh | 41 + .../liveMedia/include/VorbisAudioRTPSink.hh | 85 + .../liveMedia/include/VorbisAudioRTPSource.hh | 66 + .../WAVAudioFileServerMediaSubsession.hh | 68 + .../liveMedia/include/WAVAudioFileSource.hh | 86 + .../lib_rtsp/liveMedia/include/liveMedia.hh | 129 + .../liveMedia/include/liveMedia_version.hh | 10 + AnyCore/lib_rtsp/liveMedia/include/ourMD5.hh | 38 + .../liveMedia/include/uLawAudioFilter.hh | 208 ++ AnyCore/lib_rtsp/liveMedia/liveMedia.mak | 446 ++++ AnyCore/lib_rtsp/liveMedia/liveMedia.vcproj | 894 +++++++ .../liveMedia.vcproj.Eric-PC.Eric.user | 65 + AnyCore/lib_rtsp/liveMedia/liveMedia.vcxproj | 275 ++ .../liveMedia/liveMedia.vcxproj.filters | 559 ++++ AnyCore/lib_rtsp/liveMedia/ourMD5.cpp | 325 +++ AnyCore/lib_rtsp/liveMedia/rtcp_from_spec.c | 289 ++ README.md | 20 +- 355 files changed, 61139 insertions(+), 9 deletions(-) create mode 100644 AnyCore/lib_rtsp/.vs/XRtspLive/v14/.suo create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/BasicHashTable.cpp create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/BasicTaskScheduler.cpp create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/BasicTaskScheduler0.cpp create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.cpp create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.mak create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.vcproj create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.vcproj.Eric-PC.Eric.user create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.vcxproj create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.vcxproj.filters create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment0.cpp create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/COPYING create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/DelayQueue.cpp create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/Makefile create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/Makefile.head create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/Makefile.tail create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/include/BasicHashTable.hh create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/include/BasicUsageEnvironment.hh create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/include/BasicUsageEnvironment0.hh create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/include/BasicUsageEnvironment_version.hh create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/include/DelayQueue.hh create mode 100644 AnyCore/lib_rtsp/BasicUsageEnvironment/include/HandlerSet.hh create mode 100644 AnyCore/lib_rtsp/groupsock/COPYING create mode 100644 AnyCore/lib_rtsp/groupsock/GroupEId.cpp create mode 100644 AnyCore/lib_rtsp/groupsock/Groupsock.cpp create mode 100644 AnyCore/lib_rtsp/groupsock/GroupsockHelper.cpp create mode 100644 AnyCore/lib_rtsp/groupsock/IOHandlers.cpp create mode 100644 AnyCore/lib_rtsp/groupsock/Makefile create mode 100644 AnyCore/lib_rtsp/groupsock/Makefile.head create mode 100644 AnyCore/lib_rtsp/groupsock/Makefile.tail create mode 100644 AnyCore/lib_rtsp/groupsock/NetAddress.cpp create mode 100644 AnyCore/lib_rtsp/groupsock/NetInterface.cpp create mode 100644 AnyCore/lib_rtsp/groupsock/groupsock.mak create mode 100644 AnyCore/lib_rtsp/groupsock/groupsock.vcproj create mode 100644 AnyCore/lib_rtsp/groupsock/groupsock.vcproj.Eric-PC.Eric.user create mode 100644 AnyCore/lib_rtsp/groupsock/groupsock.vcxproj create mode 100644 AnyCore/lib_rtsp/groupsock/groupsock.vcxproj.filters create mode 100644 AnyCore/lib_rtsp/groupsock/include/GroupEId.hh create mode 100644 AnyCore/lib_rtsp/groupsock/include/Groupsock.hh create mode 100644 AnyCore/lib_rtsp/groupsock/include/GroupsockHelper.hh create mode 100644 AnyCore/lib_rtsp/groupsock/include/IOHandlers.hh create mode 100644 AnyCore/lib_rtsp/groupsock/include/NetAddress.hh create mode 100644 AnyCore/lib_rtsp/groupsock/include/NetCommon.h create mode 100644 AnyCore/lib_rtsp/groupsock/include/NetInterface.hh create mode 100644 AnyCore/lib_rtsp/groupsock/include/TunnelEncaps.hh create mode 100644 AnyCore/lib_rtsp/groupsock/include/groupsock_version.hh create mode 100644 AnyCore/lib_rtsp/groupsock/inet.c create mode 100644 AnyCore/lib_rtsp/liveMedia/AC3AudioFileServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/AC3AudioRTPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/AC3AudioRTPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/AC3AudioStreamFramer.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/ADTSAudioFileServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/ADTSAudioFileSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/AMRAudioFileServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/AMRAudioFileSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/AMRAudioFileSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/AMRAudioRTPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/AMRAudioRTPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/AMRAudioSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/AVIFileSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/AudioInputDevice.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/AudioRTPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/Base64.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/BasicUDPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/BasicUDPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/BitVector.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/ByteStreamFileSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/ByteStreamMemoryBufferSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/ByteStreamMultiFileSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/COPYING create mode 100644 AnyCore/lib_rtsp/liveMedia/DVVideoFileServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/DVVideoRTPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/DVVideoRTPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/DVVideoStreamFramer.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/DarwinInjector.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/DeviceSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/DigestAuthentication.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/EBMLNumber.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/EBMLNumber.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/FileServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/FileSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/FramedFileSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/FramedFilter.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/FramedSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/GSMAudioRTPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H261VideoRTPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H263plusVideoFileServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H263plusVideoRTPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H263plusVideoRTPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H263plusVideoStreamFramer.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H263plusVideoStreamParser.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H263plusVideoStreamParser.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/H264VideoFileServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H264VideoFileSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H264VideoRTPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H264VideoRTPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H264VideoStreamDiscreteFramer.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H264VideoStreamFramer.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H264or5VideoFileSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H264or5VideoRTPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H264or5VideoStreamDiscreteFramer.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H264or5VideoStreamFramer.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H265VideoFileServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H265VideoFileSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H265VideoRTPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H265VideoRTPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H265VideoStreamDiscreteFramer.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/H265VideoStreamFramer.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/InputFile.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/JPEGVideoRTPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/JPEGVideoRTPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/JPEGVideoSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/Locale.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3ADU.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3ADURTPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3ADURTPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3ADUTranscoder.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3ADUdescriptor.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3ADUdescriptor.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3ADUinterleaving.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3AudioFileServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3AudioMatroskaFileServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3AudioMatroskaFileServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3FileSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3Internals.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3Internals.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3InternalsHuffman.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3InternalsHuffman.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3InternalsHuffmanTable.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3StreamState.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3StreamState.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/MP3Transcoder.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG1or2AudioRTPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG1or2AudioRTPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG1or2AudioStreamFramer.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG1or2Demux.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG1or2DemuxedElementaryStream.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG1or2DemuxedServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG1or2FileServerDemux.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoFileServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoRTPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoRTPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoStreamDiscreteFramer.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoStreamFramer.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG2IndexFromTransportStream.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG2TransportFileServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamFramer.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamFromESSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamFromPESSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamIndexFile.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamMultiplexor.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamTrickModeFilter.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG2TransportUDPServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG4ESVideoRTPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG4ESVideoRTPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG4GenericRTPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG4GenericRTPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG4LATMAudioRTPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG4LATMAudioRTPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG4VideoFileServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG4VideoStreamDiscreteFramer.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEG4VideoStreamFramer.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEGVideoStreamFramer.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEGVideoStreamParser.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MPEGVideoStreamParser.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/Makefile create mode 100644 AnyCore/lib_rtsp/liveMedia/Makefile.head create mode 100644 AnyCore/lib_rtsp/liveMedia/Makefile.tail create mode 100644 AnyCore/lib_rtsp/liveMedia/MatroskaDemuxedTrack.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MatroskaDemuxedTrack.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/MatroskaFile.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MatroskaFileParser.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MatroskaFileParser.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/MatroskaFileServerDemux.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MatroskaFileServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MatroskaFileServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/Media.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MediaSession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MediaSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MediaSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MultiFramedRTPSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/MultiFramedRTPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/OggDemuxedTrack.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/OggDemuxedTrack.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/OggFile.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/OggFileParser.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/OggFileParser.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/OggFileServerDemux.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/OggFileServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/OggFileServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/OggFileSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/OnDemandServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/OutputFile.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/PassiveServerMediaSubsession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/ProxyServerMediaSession.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/QCELPAudioRTPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/QuickTimeFileSink.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/QuickTimeGenericRTPSource.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/RTCP.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/include/AC3AudioFileServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/AC3AudioRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/AC3AudioRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/AC3AudioStreamFramer.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/ADTSAudioFileServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/ADTSAudioFileSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/AMRAudioFileServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/AMRAudioFileSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/AMRAudioFileSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/AMRAudioRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/AMRAudioRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/AMRAudioSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/AVIFileSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/AudioInputDevice.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/AudioRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/Base64.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/BasicUDPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/BasicUDPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/BitVector.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/ByteStreamFileSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/ByteStreamMemoryBufferSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/ByteStreamMultiFileSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/DVVideoFileServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/DVVideoRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/DVVideoRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/DVVideoStreamFramer.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/DarwinInjector.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/DeviceSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/DigestAuthentication.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/FileServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/FileSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/FramedFileSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/FramedFilter.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/FramedSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/GSMAudioRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H261VideoRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H263plusVideoFileServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H263plusVideoRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H263plusVideoRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H263plusVideoStreamFramer.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H264VideoFileServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H264VideoFileSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H264VideoRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H264VideoRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H264VideoStreamDiscreteFramer.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H264VideoStreamFramer.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H264or5VideoFileSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H264or5VideoRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H264or5VideoStreamDiscreteFramer.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H264or5VideoStreamFramer.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H265VideoFileServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H265VideoFileSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H265VideoRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H265VideoRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H265VideoStreamDiscreteFramer.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/H265VideoStreamFramer.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/InputFile.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/JPEGVideoRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/JPEGVideoRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/JPEGVideoSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/Locale.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MP3ADU.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MP3ADURTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MP3ADURTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MP3ADUTranscoder.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MP3ADUinterleaving.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MP3AudioFileServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MP3FileSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MP3Transcoder.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG1or2AudioRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG1or2AudioRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG1or2AudioStreamFramer.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG1or2Demux.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG1or2DemuxedElementaryStream.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG1or2DemuxedServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG1or2FileServerDemux.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoFileServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoStreamDiscreteFramer.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoStreamFramer.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG2IndexFromTransportStream.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportFileServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamFramer.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamFromESSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamFromPESSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamIndexFile.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamMultiplexor.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamTrickModeFilter.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportUDPServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG4ESVideoRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG4ESVideoRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG4GenericRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG4GenericRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG4LATMAudioRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG4LATMAudioRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG4VideoFileServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG4VideoStreamDiscreteFramer.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEG4VideoStreamFramer.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MPEGVideoStreamFramer.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MatroskaFile.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MatroskaFileServerDemux.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/Media.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MediaSession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MediaSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MediaSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MultiFramedRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/MultiFramedRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/OggFile.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/OggFileServerDemux.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/OggFileSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/OnDemandServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/OutputFile.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/PassiveServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/ProxyServerMediaSession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/QCELPAudioRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/QuickTimeFileSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/QuickTimeGenericRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/RTCP.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/RTPInterface.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/RTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/RTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/RTSPClient.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/RTSPCommon.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/RTSPRegisterSender.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/RTSPServer.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/RTSPServerSupportingHTTPStreaming.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/SIPClient.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/ServerMediaSession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/SimpleRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/SimpleRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/StreamReplicator.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/T140TextRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/TCPStreamSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/TextRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/TheoraVideoRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/TheoraVideoRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/VP8VideoRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/VP8VideoRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/VideoRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/VorbisAudioRTPSink.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/VorbisAudioRTPSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/WAVAudioFileServerMediaSubsession.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/WAVAudioFileSource.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/liveMedia.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/liveMedia_version.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/ourMD5.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/include/uLawAudioFilter.hh create mode 100644 AnyCore/lib_rtsp/liveMedia/liveMedia.mak create mode 100644 AnyCore/lib_rtsp/liveMedia/liveMedia.vcproj create mode 100644 AnyCore/lib_rtsp/liveMedia/liveMedia.vcproj.Eric-PC.Eric.user create mode 100644 AnyCore/lib_rtsp/liveMedia/liveMedia.vcxproj create mode 100644 AnyCore/lib_rtsp/liveMedia/liveMedia.vcxproj.filters create mode 100644 AnyCore/lib_rtsp/liveMedia/ourMD5.cpp create mode 100644 AnyCore/lib_rtsp/liveMedia/rtcp_from_spec.c diff --git a/AnyCore/lib_rtsp/.vs/XRtspLive/v14/.suo b/AnyCore/lib_rtsp/.vs/XRtspLive/v14/.suo new file mode 100644 index 0000000000000000000000000000000000000000..1059dd3d29d851bd4ffb1b53ec2ab2f4a24de756 GIT binary patch literal 18944 zcmeI4YiwLc701VUP^a%l2+)Rf)1(htd+~a0$8MSwdu_)d&(=;-h=aS{y>>QU@0#6p z9&J-1BviyprQ-1gP=Ek}4^)*goq3xtzsYxPZZziI+$-<( zOP4O`k*>L1NpBzyJZ#eZ=Yb29hq%7PXA_{pxH{r<(K>+~?- zmN*E-v^imRLw$m)0dv}YQ08%iSv5yrY%k}vk5CBpwaKIJm>9k9H;&1ggqfr@4lOI4 z0%ldyT8do-%BAm(7uD|3SHEXW5)Nc6-kDr zJ`ODB_&@)fvvcGB2U04Mr~fsK|0e0M$1z)JiNljTUm5={lRUny;Z}A)woG4K?_8$ATF(c5;LU%8H&BBRcbKK6s zIs5@}CYvv}*h&4@RC+Bam;U3F)!0%x422QPgQOYfe3}->^EgR+VkKvPWA32;@~1R@ z%ccKVzC0)BN3g+DcsygK*ZM#SXgk&qbN!(iS~zAxCTQC9!o1W*|79Dr7ATj#Y=~El z{2qBZ8T2oT9uCs)B=_05nZfnUBJf83sbh}?soW2JzklM@yVTiFzn^*~>FIpUN1&fF zadJ=5f;8<&Ks8~h7pN9$SN}b~r`-O_!}jK%rRX1mx;WEsJ;7*!sP8HWefeOr(dE+j z_kWn4C9S5reRTOeL60ZN%~;Cr6CY9EWefVJzDS?TrGJpJ8`WUa0kr;v*$1@@npiC3 z!=~FUw8RnpZ<7Bhf3#fs`zZU-e4P<`FAI=mBqZSWsmofCFSy958vonR|3he-y#6RU z=K#+iaiA|#CrT2U(1%KttjAG7vX>)=^yd-mq_VX1NNwM+c3 zwEw-l{IPrm3y*)r0b~~ruqIu~`Wa@0%K863XnXdrSl>sWI}6uybMb(T^$ff=45j(>`)Gqh8>ZlV8r{^Kw^Tds zfqsB`w7xD^{~w_2mw?iI`Wo-@J4(sjecKrH&#wKo&M%k#cFKDG&wTot|26+gFE#&^ z@_{`4SB#-t`r`dF3T%)Tr+)^`lSh_x3ode0|0|Wh-~P>~zYpF|LsfA&zb~?w4pd|R zG+&P7`!k>Z!uscLNiicjs?dM=D~rkBr6ddBM#6g0#2Nepc?R;tlDnsful2&H7EB~RXZJDg4{?4N3FPh>w?WA(SL%V~AvQEZpe@kHTf`x`u!r5?Wd?MW4*%gh4 zyE>e3=dojw*t*pm*66UppvxE}wp^9h;^s(tdM282GR;HL?D65mV-B&?<_&pWIgp7b zQqkmG8D2E&JYQvvE*2>%-MwZkF_}tdoJch5m?j&R)BFF|C7&zci%JHVl}nOP?-dFrBj^)?<@bQMXA;$cXvu*iQ!`m_qU&Wuy6~db=tFr7B z(v}?lfy{yfw0*0|q4r{I#}UX2>zo_xGfr6@@93gOU6v`1Q$p+yN+ERx&j8ZI}r zN)cm4NfV4VcMe_t@utX068aOgr(hMm`$kxRg_9qR^wn%2Le#4g~_J>{X;q9=~|9_fL_&m-@c<;BWVQ{`>vc`Jesk)s5@7pa06>7gAep>rL-} z*8l8xf4kxQ*S@)D&nGu;dg({^{`BkqXSTKVIXp!*+P^^cYs|g+8mG8tj5tn~YeizU zFIKxTul3&d=^qX~ z_p9$*yyY85ZrE$C|H-Y%|E}9|;OR?G{rlnz7Zg$?cKFBjcfR=SmMy!U|HCV1?;fA- za0w2Z?uqp;y#B(by%)Y5|Lzwz{k5mE_p5C7+-q9*?n8-_)|P3ts(n~mv(C)zMKETE z-QQ)*b{k`T9KUUlNNa>+n4AG@+W>!Eh`*Kl2w&A?`MP0iyS?fl4%@~X1C$8yRv%>t z$T`buI8Lmxn6CTP*-urwJBrH`e>YayW*axIXWOm*$0qz$jZH)oN)fCpC_&K zLOFw6^;52c`yRBQkDLzv+Q=w$vgC@x2XB4qDBY*7w5UUN|nR zD2_`e-5lHPhzY@YjgtrxX{Vg*W-r&3jfr|vtyL2BY)dVY&}`HRXZzr(Ym+ql$l^#6 z(ie?{Zg{Ox+)62p9d~9LL3}n@ywM@J0%8?NH=A=DIuE{sZbL2q*|*a>e-g+Bylw}wtsJT1W!ZXm*q2Q z?Xz&L+ND)G-AECA`?ub>9P(vOk6cvms!vj#xg@q$q2gs7Qub$o$_W1c{)zzmieh-2LLC8g^E(|5U&Kt@XQs+j{pXxqHc< zzg*WIZJjHnIiJtsuKp841;77lk`E|fpb3{|6HByUPFo literal 0 HcmV?d00001 diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicHashTable.cpp b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicHashTable.cpp new file mode 100644 index 0000000..6a86612 --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicHashTable.cpp @@ -0,0 +1,277 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Basic Hash Table implementation +// Implementation + +#include "BasicHashTable.hh" +#include "strDup.hh" + +#if defined(__WIN32__) || defined(_WIN32) +#else +#include +#endif +#include +#include + +// When there are this many entries per bucket, on average, rebuild +// the table to increase the number of buckets +#define REBUILD_MULTIPLIER 3 + +BasicHashTable::BasicHashTable(int keyType) + : fBuckets(fStaticBuckets), fNumBuckets(SMALL_HASH_TABLE_SIZE), + fNumEntries(0), fRebuildSize(SMALL_HASH_TABLE_SIZE*REBUILD_MULTIPLIER), + fDownShift(28), fMask(0x3), fKeyType(keyType) { + for (unsigned i = 0; i < SMALL_HASH_TABLE_SIZE; ++i) { + fStaticBuckets[i] = NULL; + } +} + +BasicHashTable::~BasicHashTable() { + // Free all the entries in the table: + for (unsigned i = 0; i < fNumBuckets; ++i) { + TableEntry* entry; + while ((entry = fBuckets[i]) != NULL) { + deleteEntry(i, entry); + } + } + + // Also free the bucket array, if it was dynamically allocated: + if (fBuckets != fStaticBuckets) delete[] fBuckets; +} + +void* BasicHashTable::Add(char const* key, void* value) { + void* oldValue; + unsigned index; + TableEntry* entry = lookupKey(key, index); + if (entry != NULL) { + // There's already an item with this key + oldValue = entry->value; + } else { + // There's no existing entry; create a new one: + entry = insertNewEntry(index, key); + oldValue = NULL; + } + entry->value = value; + + // If the table has become too large, rebuild it with more buckets: + if (fNumEntries >= fRebuildSize) rebuild(); + + return oldValue; +} + +Boolean BasicHashTable::Remove(char const* key) { + unsigned index; + TableEntry* entry = lookupKey(key, index); + if (entry == NULL) return False; // no such entry + + deleteEntry(index, entry); + + return True; +} + +void* BasicHashTable::Lookup(char const* key) const { + unsigned index; + TableEntry* entry = lookupKey(key, index); + if (entry == NULL) return NULL; // no such entry + + return entry->value; +} + +unsigned BasicHashTable::numEntries() const { + return fNumEntries; +} + +BasicHashTable::Iterator::Iterator(BasicHashTable const& table) + : fTable(table), fNextIndex(0), fNextEntry(NULL) { +} + +void* BasicHashTable::Iterator::next(char const*& key) { + while (fNextEntry == NULL) { + if (fNextIndex >= fTable.fNumBuckets) return NULL; + + fNextEntry = fTable.fBuckets[fNextIndex++]; + } + + BasicHashTable::TableEntry* entry = fNextEntry; + fNextEntry = entry->fNext; + + key = entry->key; + return entry->value; +} + +////////// Implementation of HashTable creation functions ////////// + +HashTable* HashTable::create(int keyType) { + return new BasicHashTable(keyType); +} + +HashTable::Iterator* HashTable::Iterator::create(HashTable const& hashTable) { + // "hashTable" is assumed to be a BasicHashTable + return new BasicHashTable::Iterator((BasicHashTable const&)hashTable); +} + +////////// Implementation of internal member functions ////////// + +BasicHashTable::TableEntry* BasicHashTable +::lookupKey(char const* key, unsigned& index) const { + TableEntry* entry; + index = hashIndexFromKey(key); + + for (entry = fBuckets[index]; entry != NULL; entry = entry->fNext) { + if (keyMatches(key, entry->key)) break; + } + + return entry; +} + +Boolean BasicHashTable +::keyMatches(char const* key1, char const* key2) const { + // The way we check the keys for a match depends upon their type: + if (fKeyType == STRING_HASH_KEYS) { + return (strcmp(key1, key2) == 0); + } else if (fKeyType == ONE_WORD_HASH_KEYS) { + return (key1 == key2); + } else { + unsigned* k1 = (unsigned*)key1; + unsigned* k2 = (unsigned*)key2; + + for (int i = 0; i < fKeyType; ++i) { + if (k1[i] != k2[i]) return False; // keys differ + } + return True; + } +} + +BasicHashTable::TableEntry* BasicHashTable +::insertNewEntry(unsigned index, char const* key) { + TableEntry* entry = new TableEntry(); + entry->fNext = fBuckets[index]; + fBuckets[index] = entry; + + ++fNumEntries; + assignKey(entry, key); + + return entry; +} + +void BasicHashTable::assignKey(TableEntry* entry, char const* key) { + // The way we assign the key depends upon its type: + if (fKeyType == STRING_HASH_KEYS) { + entry->key = strDup(key); + } else if (fKeyType == ONE_WORD_HASH_KEYS) { + entry->key = key; + } else if (fKeyType > 0) { + unsigned* keyFrom = (unsigned*)key; + unsigned* keyTo = new unsigned[fKeyType]; + for (int i = 0; i < fKeyType; ++i) keyTo[i] = keyFrom[i]; + + entry->key = (char const*)keyTo; + } +} + +void BasicHashTable::deleteEntry(unsigned index, TableEntry* entry) { + TableEntry** ep = &fBuckets[index]; + + Boolean foundIt = False; + while (*ep != NULL) { + if (*ep == entry) { + foundIt = True; + *ep = entry->fNext; + break; + } + ep = &((*ep)->fNext); + } + + if (!foundIt) { // shouldn't happen +#ifdef DEBUG + fprintf(stderr, "BasicHashTable[%p]::deleteEntry(%d,%p): internal error - not found (first entry %p", this, index, entry, fBuckets[index]); + if (fBuckets[index] != NULL) fprintf(stderr, ", next entry %p", fBuckets[index]->fNext); + fprintf(stderr, ")\n"); +#endif + } + + --fNumEntries; + deleteKey(entry); + delete entry; +} + +void BasicHashTable::deleteKey(TableEntry* entry) { + // The way we delete the key depends upon its type: + if (fKeyType == ONE_WORD_HASH_KEYS) { + entry->key = NULL; + } else { + delete[] (char*)entry->key; + entry->key = NULL; + } +} + +void BasicHashTable::rebuild() { + // Remember the existing table size: + unsigned oldSize = fNumBuckets; + TableEntry** oldBuckets = fBuckets; + + // Create the new sized table: + fNumBuckets *= 4; + fBuckets = new TableEntry*[fNumBuckets]; + for (unsigned i = 0; i < fNumBuckets; ++i) { + fBuckets[i] = NULL; + } + fRebuildSize *= 4; + fDownShift -= 2; + fMask = (fMask<<2)|0x3; + + // Rehash the existing entries into the new table: + for (TableEntry** oldChainPtr = oldBuckets; oldSize > 0; + --oldSize, ++oldChainPtr) { + for (TableEntry* hPtr = *oldChainPtr; hPtr != NULL; + hPtr = *oldChainPtr) { + *oldChainPtr = hPtr->fNext; + + unsigned index = hashIndexFromKey(hPtr->key); + + hPtr->fNext = fBuckets[index]; + fBuckets[index] = hPtr; + } + } + + // Free the old bucket array, if it was dynamically allocated: + if (oldBuckets != fStaticBuckets) delete[] oldBuckets; +} + +unsigned BasicHashTable::hashIndexFromKey(char const* key) const { + unsigned result = 0; + + if (fKeyType == STRING_HASH_KEYS) { + while (1) { + char c = *key++; + if (c == 0) break; + result += (result<<3) + (unsigned)c; + } + result &= fMask; + } else if (fKeyType == ONE_WORD_HASH_KEYS) { + result = randomIndex((uintptr_t)key); + } else { + unsigned* k = (unsigned*)key; + uintptr_t sum = 0; + for (int i = 0; i < fKeyType; ++i) { + sum += k[i]; + } + result = randomIndex(sum); + } + + return result; +} diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicTaskScheduler.cpp b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicTaskScheduler.cpp new file mode 100644 index 0000000..057ef90 --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicTaskScheduler.cpp @@ -0,0 +1,250 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Basic Usage Environment: for a simple, non-scripted, console application +// Implementation + + +#include "BasicUsageEnvironment.hh" +#include "HandlerSet.hh" +#include +#if defined(_QNX4) +#include +#include +#endif + +////////// BasicTaskScheduler ////////// + +BasicTaskScheduler* BasicTaskScheduler::createNew(unsigned maxSchedulerGranularity) { + return new BasicTaskScheduler(maxSchedulerGranularity); +} + +BasicTaskScheduler::BasicTaskScheduler(unsigned maxSchedulerGranularity) + : fMaxSchedulerGranularity(maxSchedulerGranularity), fMaxNumSockets(0) +#if defined(__WIN32__) || defined(_WIN32) + , fDummySocketNum(-1) +#endif +{ + FD_ZERO(&fReadSet); + FD_ZERO(&fWriteSet); + FD_ZERO(&fExceptionSet); + + if (maxSchedulerGranularity > 0) schedulerTickTask(); // ensures that we handle events frequently +} + +BasicTaskScheduler::~BasicTaskScheduler() { +#if defined(__WIN32__) || defined(_WIN32) + if (fDummySocketNum >= 0) closeSocket(fDummySocketNum); +#endif +} + +void BasicTaskScheduler::schedulerTickTask(void* clientData) { + ((BasicTaskScheduler*)clientData)->schedulerTickTask(); +} + +void BasicTaskScheduler::schedulerTickTask() { + scheduleDelayedTask(fMaxSchedulerGranularity, schedulerTickTask, this); +} + +#ifndef MILLION +#define MILLION 1000000 +#endif + +void BasicTaskScheduler::SingleStep(unsigned maxDelayTime) { + fd_set readSet = fReadSet; // make a copy for this select() call + fd_set writeSet = fWriteSet; // ditto + fd_set exceptionSet = fExceptionSet; // ditto + + DelayInterval const& timeToDelay = fDelayQueue.timeToNextAlarm(); + struct timeval tv_timeToDelay; + tv_timeToDelay.tv_sec = timeToDelay.seconds(); + tv_timeToDelay.tv_usec = timeToDelay.useconds(); + // Very large "tv_sec" values cause select() to fail. + // Don't make it any larger than 1 million seconds (11.5 days) + const long MAX_TV_SEC = MILLION; + if (tv_timeToDelay.tv_sec > MAX_TV_SEC) { + tv_timeToDelay.tv_sec = MAX_TV_SEC; + } + // Also check our "maxDelayTime" parameter (if it's > 0): + if (maxDelayTime > 0 && + (tv_timeToDelay.tv_sec > (long)maxDelayTime/MILLION || + (tv_timeToDelay.tv_sec == (long)maxDelayTime/MILLION && + tv_timeToDelay.tv_usec > (long)maxDelayTime%MILLION))) { + tv_timeToDelay.tv_sec = maxDelayTime/MILLION; + tv_timeToDelay.tv_usec = maxDelayTime%MILLION; + } + + int selectResult = select(fMaxNumSockets, &readSet, &writeSet, &exceptionSet, &tv_timeToDelay); + if (selectResult < 0) { +#if defined(__WIN32__) || defined(_WIN32) + int err = WSAGetLastError(); + // For some unknown reason, select() in Windoze sometimes fails with WSAEINVAL if + // it was called with no entries set in "readSet". If this happens, ignore it: + if (err == WSAEINVAL && readSet.fd_count == 0) { + err = EINTR; + // To stop this from happening again, create a dummy socket: + if (fDummySocketNum >= 0) closeSocket(fDummySocketNum); + fDummySocketNum = socket(AF_INET, SOCK_DGRAM, 0); + FD_SET((unsigned)fDummySocketNum, &fReadSet); + } + if (err != EINTR) { +#else + if (errno != EINTR && errno != EAGAIN) { +#endif + // Unexpected error - treat this as fatal: +#if !defined(_WIN32_WCE) + perror("BasicTaskScheduler::SingleStep(): select() fails"); + // Because this failure is often "Bad file descriptor" - which is caused by an invalid socket number (i.e., a socket number + // that had already been closed) being used in "select()" - we print out the sockets that were being used in "select()", + // to assist in debugging: + fprintf(stderr, "socket numbers used in the select() call:"); + for (int i = 0; i < 10000; ++i) { + if (FD_ISSET(i, &fReadSet) || FD_ISSET(i, &fWriteSet) || FD_ISSET(i, &fExceptionSet)) { + fprintf(stderr, " %d(", i); + if (FD_ISSET(i, &fReadSet)) fprintf(stderr, "r"); + if (FD_ISSET(i, &fWriteSet)) fprintf(stderr, "w"); + if (FD_ISSET(i, &fExceptionSet)) fprintf(stderr, "e"); + fprintf(stderr, ")"); + } + } + fprintf(stderr, "\n"); +#endif + internalError(); + } + } + + // Call the handler function for one readable socket: + HandlerIterator iter(*fHandlers); + HandlerDescriptor* handler; + // To ensure forward progress through the handlers, begin past the last + // socket number that we handled: + if (fLastHandledSocketNum >= 0) { + while ((handler = iter.next()) != NULL) { + if (handler->socketNum == fLastHandledSocketNum) break; + } + if (handler == NULL) { + fLastHandledSocketNum = -1; + iter.reset(); // start from the beginning instead + } + } + while ((handler = iter.next()) != NULL) { + int sock = handler->socketNum; // alias + int resultConditionSet = 0; + if (FD_ISSET(sock, &readSet) && FD_ISSET(sock, &fReadSet)/*sanity check*/) resultConditionSet |= SOCKET_READABLE; + if (FD_ISSET(sock, &writeSet) && FD_ISSET(sock, &fWriteSet)/*sanity check*/) resultConditionSet |= SOCKET_WRITABLE; + if (FD_ISSET(sock, &exceptionSet) && FD_ISSET(sock, &fExceptionSet)/*sanity check*/) resultConditionSet |= SOCKET_EXCEPTION; + if ((resultConditionSet&handler->conditionSet) != 0 && handler->handlerProc != NULL) { + fLastHandledSocketNum = sock; + // Note: we set "fLastHandledSocketNum" before calling the handler, + // in case the handler calls "doEventLoop()" reentrantly. + (*handler->handlerProc)(handler->clientData, resultConditionSet); + break; + } + } + if (handler == NULL && fLastHandledSocketNum >= 0) { + // We didn't call a handler, but we didn't get to check all of them, + // so try again from the beginning: + iter.reset(); + while ((handler = iter.next()) != NULL) { + int sock = handler->socketNum; // alias + int resultConditionSet = 0; + if (FD_ISSET(sock, &readSet) && FD_ISSET(sock, &fReadSet)/*sanity check*/) resultConditionSet |= SOCKET_READABLE; + if (FD_ISSET(sock, &writeSet) && FD_ISSET(sock, &fWriteSet)/*sanity check*/) resultConditionSet |= SOCKET_WRITABLE; + if (FD_ISSET(sock, &exceptionSet) && FD_ISSET(sock, &fExceptionSet)/*sanity check*/) resultConditionSet |= SOCKET_EXCEPTION; + if ((resultConditionSet&handler->conditionSet) != 0 && handler->handlerProc != NULL) { + fLastHandledSocketNum = sock; + // Note: we set "fLastHandledSocketNum" before calling the handler, + // in case the handler calls "doEventLoop()" reentrantly. + (*handler->handlerProc)(handler->clientData, resultConditionSet); + break; + } + } + if (handler == NULL) fLastHandledSocketNum = -1;//because we didn't call a handler + } + + // Also handle any newly-triggered event (Note that we do this *after* calling a socket handler, + // in case the triggered event handler modifies The set of readable sockets.) + if (fTriggersAwaitingHandling != 0) { + if (fTriggersAwaitingHandling == fLastUsedTriggerMask) { + // Common-case optimization for a single event trigger: + fTriggersAwaitingHandling = 0; + if (fTriggeredEventHandlers[fLastUsedTriggerNum] != NULL) { + (*fTriggeredEventHandlers[fLastUsedTriggerNum])(fTriggeredEventClientDatas[fLastUsedTriggerNum]); + } + } else { + // Look for an event trigger that needs handling (making sure that we make forward progress through all possible triggers): + unsigned i = fLastUsedTriggerNum; + EventTriggerId mask = fLastUsedTriggerMask; + + do { + i = (i+1)%MAX_NUM_EVENT_TRIGGERS; + mask >>= 1; + if (mask == 0) mask = 0x80000000; + + if ((fTriggersAwaitingHandling&mask) != 0) { + fTriggersAwaitingHandling &=~ mask; + if (fTriggeredEventHandlers[i] != NULL) { + (*fTriggeredEventHandlers[i])(fTriggeredEventClientDatas[i]); + } + + fLastUsedTriggerMask = mask; + fLastUsedTriggerNum = i; + break; + } + } while (i != fLastUsedTriggerNum); + } + } + + // Also handle any delayed event that may have come due. + fDelayQueue.handleAlarm(); +} + +void BasicTaskScheduler + ::setBackgroundHandling(int socketNum, int conditionSet, BackgroundHandlerProc* handlerProc, void* clientData) { + if (socketNum < 0) return; + FD_CLR((unsigned)socketNum, &fReadSet); + FD_CLR((unsigned)socketNum, &fWriteSet); + FD_CLR((unsigned)socketNum, &fExceptionSet); + if (conditionSet == 0) { + fHandlers->clearHandler(socketNum); + if (socketNum+1 == fMaxNumSockets) { + --fMaxNumSockets; + } + } else { + fHandlers->assignHandler(socketNum, conditionSet, handlerProc, clientData); + if (socketNum+1 > fMaxNumSockets) { + fMaxNumSockets = socketNum+1; + } + if (conditionSet&SOCKET_READABLE) FD_SET((unsigned)socketNum, &fReadSet); + if (conditionSet&SOCKET_WRITABLE) FD_SET((unsigned)socketNum, &fWriteSet); + if (conditionSet&SOCKET_EXCEPTION) FD_SET((unsigned)socketNum, &fExceptionSet); + } +} + +void BasicTaskScheduler::moveSocketHandling(int oldSocketNum, int newSocketNum) { + if (oldSocketNum < 0 || newSocketNum < 0) return; // sanity check + if (FD_ISSET(oldSocketNum, &fReadSet)) {FD_CLR((unsigned)oldSocketNum, &fReadSet); FD_SET((unsigned)newSocketNum, &fReadSet);} + if (FD_ISSET(oldSocketNum, &fWriteSet)) {FD_CLR((unsigned)oldSocketNum, &fWriteSet); FD_SET((unsigned)newSocketNum, &fWriteSet);} + if (FD_ISSET(oldSocketNum, &fExceptionSet)) {FD_CLR((unsigned)oldSocketNum, &fExceptionSet); FD_SET((unsigned)newSocketNum, &fExceptionSet);} + fHandlers->moveHandler(oldSocketNum, newSocketNum); + + if (oldSocketNum+1 == fMaxNumSockets) { + --fMaxNumSockets; + } + if (newSocketNum+1 > fMaxNumSockets) { + fMaxNumSockets = newSocketNum+1; + } +} diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicTaskScheduler0.cpp b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicTaskScheduler0.cpp new file mode 100644 index 0000000..fe6e3e5 --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicTaskScheduler0.cpp @@ -0,0 +1,240 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Basic Usage Environment: for a simple, non-scripted, console application +// Implementation + +#include "BasicUsageEnvironment0.hh" +#include "HandlerSet.hh" + +////////// A subclass of DelayQueueEntry, +////////// used to implement BasicTaskScheduler0::scheduleDelayedTask() + +class AlarmHandler: public DelayQueueEntry { +public: + AlarmHandler(TaskFunc* proc, void* clientData, DelayInterval timeToDelay) + : DelayQueueEntry(timeToDelay), fProc(proc), fClientData(clientData) { + } + +private: // redefined virtual functions + virtual void handleTimeout() { + (*fProc)(fClientData); + DelayQueueEntry::handleTimeout(); + } + +private: + TaskFunc* fProc; + void* fClientData; +}; + + +////////// BasicTaskScheduler0 ////////// + +BasicTaskScheduler0::BasicTaskScheduler0() + : fLastHandledSocketNum(-1), fTriggersAwaitingHandling(0), fLastUsedTriggerMask(1), fLastUsedTriggerNum(MAX_NUM_EVENT_TRIGGERS-1) { + fHandlers = new HandlerSet; + for (unsigned i = 0; i < MAX_NUM_EVENT_TRIGGERS; ++i) { + fTriggeredEventHandlers[i] = NULL; + fTriggeredEventClientDatas[i] = NULL; + } +} + +BasicTaskScheduler0::~BasicTaskScheduler0() { + delete fHandlers; +} + +TaskToken BasicTaskScheduler0::scheduleDelayedTask(int64_t microseconds, + TaskFunc* proc, + void* clientData) { + if (microseconds < 0) microseconds = 0; + DelayInterval timeToDelay((long)(microseconds/1000000), (long)(microseconds%1000000)); + AlarmHandler* alarmHandler = new AlarmHandler(proc, clientData, timeToDelay); + fDelayQueue.addEntry(alarmHandler); + + return (void*)(alarmHandler->token()); +} + +void BasicTaskScheduler0::unscheduleDelayedTask(TaskToken& prevTask) { + DelayQueueEntry* alarmHandler = fDelayQueue.removeEntry((intptr_t)prevTask); + prevTask = NULL; + delete alarmHandler; +} + +void BasicTaskScheduler0::doEventOnce() +{ + SingleStep(); +} + +void BasicTaskScheduler0::doEventLoop(char* watchVariable) { + // Repeatedly loop, handling readble sockets and timed events: + while (1) { + if (watchVariable != NULL && *watchVariable != 0) break; + SingleStep(); + } +} + +EventTriggerId BasicTaskScheduler0::createEventTrigger(TaskFunc* eventHandlerProc) { + unsigned i = fLastUsedTriggerNum; + EventTriggerId mask = fLastUsedTriggerMask; + + do { + i = (i+1)%MAX_NUM_EVENT_TRIGGERS; + mask >>= 1; + if (mask == 0) mask = 0x80000000; + + if (fTriggeredEventHandlers[i] == NULL) { + // This trigger number is free; use it: + fTriggeredEventHandlers[i] = eventHandlerProc; + fTriggeredEventClientDatas[i] = NULL; // sanity + + fLastUsedTriggerMask = mask; + fLastUsedTriggerNum = i; + + return mask; + } + } while (i != fLastUsedTriggerNum); + + // All available event triggers are allocated; return 0 instead: + return 0; +} + +void BasicTaskScheduler0::deleteEventTrigger(EventTriggerId eventTriggerId) { + fTriggersAwaitingHandling &=~ eventTriggerId; + + if (eventTriggerId == fLastUsedTriggerMask) { // common-case optimization: + fTriggeredEventHandlers[fLastUsedTriggerNum] = NULL; + fTriggeredEventClientDatas[fLastUsedTriggerNum] = NULL; + } else { + // "eventTriggerId" should have just one bit set. + // However, we do the reasonable thing if the user happened to 'or' together two or more "EventTriggerId"s: + EventTriggerId mask = 0x80000000; + for (unsigned i = 0; i < MAX_NUM_EVENT_TRIGGERS; ++i) { + if ((eventTriggerId&mask) != 0) { + fTriggeredEventHandlers[i] = NULL; + fTriggeredEventClientDatas[i] = NULL; + } + mask >>= 1; + } + } +} + +void BasicTaskScheduler0::triggerEvent(EventTriggerId eventTriggerId, void* clientData) { + // First, record the "clientData". (Note that we allow "eventTriggerId" to be a combination of bits for multiple events.) + EventTriggerId mask = 0x80000000; + for (unsigned i = 0; i < MAX_NUM_EVENT_TRIGGERS; ++i) { + if ((eventTriggerId&mask) != 0) { + fTriggeredEventClientDatas[i] = clientData; + } + mask >>= 1; + } + + // Then, note this event as being ready to be handled. + // (Note that because this function (unlike others in the library) can be called from an external thread, we do this last, to + // reduce the risk of a race condition.) + fTriggersAwaitingHandling |= eventTriggerId; +} + + +////////// HandlerSet (etc.) implementation ////////// + +HandlerDescriptor::HandlerDescriptor(HandlerDescriptor* nextHandler) + : conditionSet(0), handlerProc(NULL) { + // Link this descriptor into a doubly-linked list: + if (nextHandler == this) { // initialization + fNextHandler = fPrevHandler = this; + } else { + fNextHandler = nextHandler; + fPrevHandler = nextHandler->fPrevHandler; + nextHandler->fPrevHandler = this; + fPrevHandler->fNextHandler = this; + } +} + +HandlerDescriptor::~HandlerDescriptor() { + // Unlink this descriptor from a doubly-linked list: + fNextHandler->fPrevHandler = fPrevHandler; + fPrevHandler->fNextHandler = fNextHandler; +} + +HandlerSet::HandlerSet() + : fHandlers(&fHandlers) { + fHandlers.socketNum = -1; // shouldn't ever get looked at, but in case... +} + +HandlerSet::~HandlerSet() { + // Delete each handler descriptor: + while (fHandlers.fNextHandler != &fHandlers) { + delete fHandlers.fNextHandler; // changes fHandlers->fNextHandler + } +} + +void HandlerSet +::assignHandler(int socketNum, int conditionSet, TaskScheduler::BackgroundHandlerProc* handlerProc, void* clientData) { + // First, see if there's already a handler for this socket: + HandlerDescriptor* handler = lookupHandler(socketNum); + if (handler == NULL) { // No existing handler, so create a new descr: + handler = new HandlerDescriptor(fHandlers.fNextHandler); + handler->socketNum = socketNum; + } + + handler->conditionSet = conditionSet; + handler->handlerProc = handlerProc; + handler->clientData = clientData; +} + +void HandlerSet::clearHandler(int socketNum) { + HandlerDescriptor* handler = lookupHandler(socketNum); + delete handler; +} + +void HandlerSet::moveHandler(int oldSocketNum, int newSocketNum) { + HandlerDescriptor* handler = lookupHandler(oldSocketNum); + if (handler != NULL) { + handler->socketNum = newSocketNum; + } +} + +HandlerDescriptor* HandlerSet::lookupHandler(int socketNum) { + HandlerDescriptor* handler; + HandlerIterator iter(*this); + while ((handler = iter.next()) != NULL) { + if (handler->socketNum == socketNum) break; + } + return handler; +} + +HandlerIterator::HandlerIterator(HandlerSet& handlerSet) + : fOurSet(handlerSet) { + reset(); +} + +HandlerIterator::~HandlerIterator() { +} + +void HandlerIterator::reset() { + fNextPtr = fOurSet.fHandlers.fNextHandler; +} + +HandlerDescriptor* HandlerIterator::next() { + HandlerDescriptor* result = fNextPtr; + if (result == &fOurSet.fHandlers) { // no more + result = NULL; + } else { + fNextPtr = fNextPtr->fNextHandler; + } + + return result; +} diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.cpp b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.cpp new file mode 100644 index 0000000..010ca75 --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.cpp @@ -0,0 +1,107 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Basic Usage Environment: for a simple, non-scripted, console application +// Implementation + +#include "BasicUsageEnvironment.hh" +#include +#ifdef ANDROID +#include +#define TAG "XRTSP" +#define LGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__) +#define LGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__) +#define LGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__) +#endif + +////////// BasicUsageEnvironment ////////// + +#if defined(__WIN32__) || defined(_WIN32) +extern "C" int initializeWinsockIfNecessary(); +#endif + +BasicUsageEnvironment::BasicUsageEnvironment(TaskScheduler& taskScheduler) +: BasicUsageEnvironment0(taskScheduler) { +#if defined(__WIN32__) || defined(_WIN32) + if (!initializeWinsockIfNecessary()) { + setResultErrMsg("Failed to initialize 'winsock': "); + reportBackgroundError(); + internalError(); + } +#endif +} + +BasicUsageEnvironment::~BasicUsageEnvironment() { +} + +BasicUsageEnvironment* +BasicUsageEnvironment::createNew(TaskScheduler& taskScheduler) { + return new BasicUsageEnvironment(taskScheduler); +} + +int BasicUsageEnvironment::getErrno() const { +#if defined(__WIN32__) || defined(_WIN32) || defined(_WIN32_WCE) + return WSAGetLastError(); +#else + return errno; +#endif +} + +UsageEnvironment& BasicUsageEnvironment::operator<<(char const* str) { + if (str == NULL) str = "(NULL)"; // sanity check +#ifdef ANDROID + LGI("%s", str); +#else + fprintf(stderr, "%s", str); +#endif + return *this; +} + +UsageEnvironment& BasicUsageEnvironment::operator<<(int i) { +#ifdef ANDROID + LGI("%d", i); +#else + fprintf(stderr, "%d", i); +#endif + return *this; +} + +UsageEnvironment& BasicUsageEnvironment::operator<<(unsigned u) { +#ifdef ANDROID + LGI("%u", u); +#else + fprintf(stderr, "%u", u); +#endif + return *this; +} + +UsageEnvironment& BasicUsageEnvironment::operator<<(double d) { +#ifdef ANDROID + LGI("%f", d); +#else + fprintf(stderr, "%f", d); +#endif + return *this; +} + +UsageEnvironment& BasicUsageEnvironment::operator<<(void* p) { +#ifdef ANDROID + LGI("%p", p); +#else + fprintf(stderr, "%p", p); +#endif + return *this; +} diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.mak b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.mak new file mode 100644 index 0000000..c9c76ef --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.mak @@ -0,0 +1,94 @@ +INCLUDES = -Iinclude -I../UsageEnvironment/include -I../groupsock/include +PREFIX = /usr/local +LIBDIR = $(PREFIX)/lib +##### Change the following for your environment: +# Comment out the following line to produce Makefiles that generate debuggable code: +NODEBUG=1 + +# The following definition ensures that we are properly matching +# the WinSock2 library file with the correct header files. +# (will link with "ws2_32.lib" and include "winsock2.h" & "Ws2tcpip.h") +TARGETOS = WINNT + +# If for some reason you wish to use WinSock1 instead, uncomment the +# following two definitions. +# (will link with "wsock32.lib" and include "winsock.h") +#TARGETOS = WIN95 +#APPVER = 4.0 + +!include + +UI_OPTS = $(guilflags) $(guilibsdll) +# Use the following to get a console (e.g., for debugging): +CONSOLE_UI_OPTS = $(conlflags) $(conlibsdll) +CPU=i386 + +TOOLS32 = c:\Program Files\DevStudio\Vc +COMPILE_OPTS = $(INCLUDES) $(cdebug) $(cflags) $(cvarsdll) -I. -I"$(TOOLS32)\include" +C = c +C_COMPILER = "$(TOOLS32)\bin\cl" +C_FLAGS = $(COMPILE_OPTS) +CPP = cpp +CPLUSPLUS_COMPILER = $(C_COMPILER) +CPLUSPLUS_FLAGS = $(COMPILE_OPTS) +OBJ = obj +LINK = $(link) -out: +LIBRARY_LINK = lib -out: +LINK_OPTS_0 = $(linkdebug) msvcirt.lib +LIBRARY_LINK_OPTS = +LINK_OPTS = $(LINK_OPTS_0) $(UI_OPTS) +CONSOLE_LINK_OPTS = $(LINK_OPTS_0) $(CONSOLE_UI_OPTS) +SERVICE_LINK_OPTS = kernel32.lib advapi32.lib shell32.lib -subsystem:console,$(APPVER) +LIB_SUFFIX = lib +LIBS_FOR_CONSOLE_APPLICATION = +LIBS_FOR_GUI_APPLICATION = +MULTIMEDIA_LIBS = winmm.lib +EXE = .exe +PLATFORM = Windows + +rc32 = "$(TOOLS32)\bin\rc" +.rc.res: + $(rc32) $< +##### End of variables to change + +NAME = libBasicUsageEnvironment +LIB = $(NAME).$(LIB_SUFFIX) +ALL = $(LIB) +all: $(ALL) + +OBJS = BasicUsageEnvironment0.$(OBJ) BasicUsageEnvironment.$(OBJ) \ + BasicTaskScheduler0.$(OBJ) BasicTaskScheduler.$(OBJ) \ + DelayQueue.$(OBJ) BasicHashTable.$(OBJ) + +libBasicUsageEnvironment.$(LIB_SUFFIX): $(OBJS) + $(LIBRARY_LINK)$@ $(LIBRARY_LINK_OPTS) \ + $(OBJS) + +.$(C).$(OBJ): + $(C_COMPILER) -c $(C_FLAGS) $< + +.$(CPP).$(OBJ): + $(CPLUSPLUS_COMPILER) -c $(CPLUSPLUS_FLAGS) $< + +BasicUsageEnvironment0.$(CPP): include/BasicUsageEnvironment0.hh +include/BasicUsageEnvironment0.hh: include/BasicUsageEnvironment_version.hh include/DelayQueue.hh +BasicUsageEnvironment.$(CPP): include/BasicUsageEnvironment.hh +include/BasicUsageEnvironment.hh: include/BasicUsageEnvironment0.hh +BasicTaskScheduler0.$(CPP): include/BasicUsageEnvironment0.hh include/HandlerSet.hh +BasicTaskScheduler.$(CPP): include/BasicUsageEnvironment.hh include/HandlerSet.hh +DelayQueue.$(CPP): include/DelayQueue.hh +BasicHashTable.$(CPP): include/BasicHashTable.hh + +clean: + -rm -rf *.$(OBJ) $(ALL) core *.core *~ include/*~ + +install: install1 $(INSTALL2) +install1: libBasicUsageEnvironment.$(LIB_SUFFIX) + install -d $(DESTDIR)$(PREFIX)/include/BasicUsageEnvironment $(DESTDIR)$(LIBDIR) + install -m 644 include/*.hh $(DESTDIR)$(PREFIX)/include/BasicUsageEnvironment + install -m 644 libBasicUsageEnvironment.$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR) +install_shared_libraries: libBasicUsageEnvironment.$(LIB_SUFFIX) + ln -s $(NAME).$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/$(NAME).$(SHORT_LIB_SUFFIX) + ln -s $(NAME).$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/$(NAME).so + +##### Any additional, platform-specific rules come here: diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.vcproj b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.vcproj new file mode 100644 index 0000000..9fa82b9 --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.vcproj @@ -0,0 +1,226 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.vcproj.Eric-PC.Eric.user b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.vcproj.Eric-PC.Eric.user new file mode 100644 index 0000000..c3c730f --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.vcproj.Eric-PC.Eric.user @@ -0,0 +1,65 @@ + + + + + + + + + + + diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.vcxproj b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.vcxproj new file mode 100644 index 0000000..e67c51d --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.vcxproj @@ -0,0 +1,108 @@ +锘 + + + + Debug + Win32 + + + Release + Win32 + + + + {E9417ED1-4243-42D7-B43F-DE4B95BF440D} + BasicUsageEnvironment + Win32Proj + + + + StaticLibrary + v140 + Unicode + true + + + StaticLibrary + v140 + Unicode + + + + + + + + + + + + + <_ProjectFileVersion>14.0.25431.1 + + + $(SolutionDir)$(Configuration)\ + $(Configuration)\ + + + $(SolutionDir)$(Configuration)\ + $(Configuration)\ + + + + Disabled + ./include;../UsageEnvironment/include;../groupsock/include;%(AdditionalIncludeDirectories) + WIN32;_DEBUG;_LIB;%(PreprocessorDefinitions) + true + EnableFastChecks + MultiThreadedDebugDLL + + $(IntDir) + $(IntDir)vc90.pdb + $(IntDir) + Level3 + EditAndContinue + + + + + + + + MaxSpeed + true + ./include;../UsageEnvironment/include;../groupsock/include;%(AdditionalIncludeDirectories) + WIN32;NDEBUG;_LIB;%(PreprocessorDefinitions) + MultiThreadedDLL + true + + Level3 + ProgramDatabase + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.vcxproj.filters b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.vcxproj.filters new file mode 100644 index 0000000..f8eb730 --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment.vcxproj.filters @@ -0,0 +1,60 @@ +锘 + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + {93995380-89BD-4b04-88EB-625FBE52EBFB} + h;hpp;hxx;hm;inl;inc;xsd + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav + + + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + + + + \ No newline at end of file diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment0.cpp b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment0.cpp new file mode 100644 index 0000000..473b409 --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/BasicUsageEnvironment0.cpp @@ -0,0 +1,88 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Basic Usage Environment: for a simple, non-scripted, console application +// Implementation + +#include "BasicUsageEnvironment0.hh" +#include + +////////// BasicUsageEnvironment ////////// + +BasicUsageEnvironment0::BasicUsageEnvironment0(TaskScheduler& taskScheduler) + : UsageEnvironment(taskScheduler), + fBufferMaxSize(RESULT_MSG_BUFFER_MAX) { + reset(); +} + +BasicUsageEnvironment0::~BasicUsageEnvironment0() { +} + +void BasicUsageEnvironment0::reset() { + fCurBufferSize = 0; + fResultMsgBuffer[fCurBufferSize] = '\0'; +} + + +// Implementation of virtual functions: + +char const* BasicUsageEnvironment0::getResultMsg() const { + return fResultMsgBuffer; +} + +void BasicUsageEnvironment0::setResultMsg(MsgString msg) { + reset(); + appendToResultMsg(msg); +} + +void BasicUsageEnvironment0::setResultMsg(MsgString msg1, MsgString msg2) { + setResultMsg(msg1); + appendToResultMsg(msg2); +} + +void BasicUsageEnvironment0::setResultMsg(MsgString msg1, MsgString msg2, + MsgString msg3) { + setResultMsg(msg1, msg2); + appendToResultMsg(msg3); +} + +void BasicUsageEnvironment0::setResultErrMsg(MsgString msg, int err) { + setResultMsg(msg); + +#ifndef _WIN32_WCE + appendToResultMsg(strerror(err == 0 ? getErrno() : err)); +#endif +} + +void BasicUsageEnvironment0::appendToResultMsg(MsgString msg) { + char* curPtr = &fResultMsgBuffer[fCurBufferSize]; + unsigned spaceAvailable = fBufferMaxSize - fCurBufferSize; + unsigned msgLength = strlen(msg); + + // Copy only enough of "msg" as will fit: + if (msgLength > spaceAvailable-1) { + msgLength = spaceAvailable-1; + } + + memmove(curPtr, (char*)msg, msgLength); + fCurBufferSize += msgLength; + fResultMsgBuffer[fCurBufferSize] = '\0'; +} + +void BasicUsageEnvironment0::reportBackgroundError() { + fputs(getResultMsg(), stderr); +} + diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/COPYING b/AnyCore/lib_rtsp/BasicUsageEnvironment/COPYING new file mode 100644 index 0000000..012065c --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/COPYING @@ -0,0 +1 @@ +../COPYING \ No newline at end of file diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/DelayQueue.cpp b/AnyCore/lib_rtsp/BasicUsageEnvironment/DelayQueue.cpp new file mode 100644 index 0000000..8e5c366 --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/DelayQueue.cpp @@ -0,0 +1,233 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// Copyright (c) 1996-2014, Live Networks, Inc. All rights reserved +// Help by Carlo Bonamico to get working for Windows +// Delay queue +// Implementation + +#include "DelayQueue.hh" +#include "GroupsockHelper.hh" + +static const int MILLION = 1000000; + +///// Timeval ///// + +int Timeval::operator>=(const Timeval& arg2) const { + return seconds() > arg2.seconds() + || (seconds() == arg2.seconds() + && useconds() >= arg2.useconds()); +} + +void Timeval::operator+=(const DelayInterval& arg2) { + secs() += arg2.seconds(); usecs() += arg2.useconds(); + if (useconds() >= MILLION) { + usecs() -= MILLION; + ++secs(); + } +} + +void Timeval::operator-=(const DelayInterval& arg2) { + secs() -= arg2.seconds(); usecs() -= arg2.useconds(); + if ((int)useconds() < 0) { + usecs() += MILLION; + --secs(); + } + if ((int)seconds() < 0) + secs() = usecs() = 0; + +} + +DelayInterval operator-(const Timeval& arg1, const Timeval& arg2) { + time_base_seconds secs = arg1.seconds() - arg2.seconds(); + time_base_seconds usecs = arg1.useconds() - arg2.useconds(); + + if ((int)usecs < 0) { + usecs += MILLION; + --secs; + } + if ((int)secs < 0) + return DELAY_ZERO; + else + return DelayInterval(secs, usecs); +} + + +///// DelayInterval ///// + +DelayInterval operator*(short arg1, const DelayInterval& arg2) { + time_base_seconds result_seconds = arg1*arg2.seconds(); + time_base_seconds result_useconds = arg1*arg2.useconds(); + + time_base_seconds carry = result_useconds/MILLION; + result_useconds -= carry*MILLION; + result_seconds += carry; + + return DelayInterval(result_seconds, result_useconds); +} + +#ifndef INT_MAX +#define INT_MAX 0x7FFFFFFF +#endif +const DelayInterval DELAY_ZERO(0, 0); +const DelayInterval DELAY_SECOND(1, 0); +const DelayInterval DELAY_MINUTE = 60*DELAY_SECOND; +const DelayInterval DELAY_HOUR = 60*DELAY_MINUTE; +const DelayInterval DELAY_DAY = 24*DELAY_HOUR; +const DelayInterval ETERNITY(INT_MAX, MILLION-1); +// used internally to make the implementation work + + +///// DelayQueueEntry ///// + +intptr_t DelayQueueEntry::tokenCounter = 0; + +DelayQueueEntry::DelayQueueEntry(DelayInterval delay) + : fDeltaTimeRemaining(delay) { + fNext = fPrev = this; + fToken = ++tokenCounter; +} + +DelayQueueEntry::~DelayQueueEntry() { +} + +void DelayQueueEntry::handleTimeout() { + delete this; +} + + +///// DelayQueue ///// + +DelayQueue::DelayQueue() + : DelayQueueEntry(ETERNITY) { + fLastSyncTime = TimeNow(); +} + +DelayQueue::~DelayQueue() { + while (fNext != this) { + DelayQueueEntry* entryToRemove = fNext; + removeEntry(entryToRemove); + delete entryToRemove; + } +} + +void DelayQueue::addEntry(DelayQueueEntry* newEntry) { + synchronize(); + + DelayQueueEntry* cur = head(); + while (newEntry->fDeltaTimeRemaining >= cur->fDeltaTimeRemaining) { + newEntry->fDeltaTimeRemaining -= cur->fDeltaTimeRemaining; + cur = cur->fNext; + } + + cur->fDeltaTimeRemaining -= newEntry->fDeltaTimeRemaining; + + // Add "newEntry" to the queue, just before "cur": + newEntry->fNext = cur; + newEntry->fPrev = cur->fPrev; + cur->fPrev = newEntry->fPrev->fNext = newEntry; +} + +void DelayQueue::updateEntry(DelayQueueEntry* entry, DelayInterval newDelay) { + if (entry == NULL) return; + + removeEntry(entry); + entry->fDeltaTimeRemaining = newDelay; + addEntry(entry); +} + +void DelayQueue::updateEntry(intptr_t tokenToFind, DelayInterval newDelay) { + DelayQueueEntry* entry = findEntryByToken(tokenToFind); + updateEntry(entry, newDelay); +} + +void DelayQueue::removeEntry(DelayQueueEntry* entry) { + if (entry == NULL || entry->fNext == NULL) return; + + entry->fNext->fDeltaTimeRemaining += entry->fDeltaTimeRemaining; + entry->fPrev->fNext = entry->fNext; + entry->fNext->fPrev = entry->fPrev; + entry->fNext = entry->fPrev = NULL; + // in case we should try to remove it again +} + +DelayQueueEntry* DelayQueue::removeEntry(intptr_t tokenToFind) { + DelayQueueEntry* entry = findEntryByToken(tokenToFind); + removeEntry(entry); + return entry; +} + +DelayInterval const& DelayQueue::timeToNextAlarm() { + if (head()->fDeltaTimeRemaining == DELAY_ZERO) return DELAY_ZERO; // a common case + + synchronize(); + return head()->fDeltaTimeRemaining; +} + +void DelayQueue::handleAlarm() { + if (head()->fDeltaTimeRemaining != DELAY_ZERO) synchronize(); + + if (head()->fDeltaTimeRemaining == DELAY_ZERO) { + // This event is due to be handled: + DelayQueueEntry* toRemove = head(); + removeEntry(toRemove); // do this first, in case handler accesses queue + + toRemove->handleTimeout(); + } +} + +DelayQueueEntry* DelayQueue::findEntryByToken(intptr_t tokenToFind) { + DelayQueueEntry* cur = head(); + while (cur != this) { + if (cur->token() == tokenToFind) return cur; + cur = cur->fNext; + } + + return NULL; +} + +void DelayQueue::synchronize() { + // First, figure out how much time has elapsed since the last sync: + EventTime timeNow = TimeNow(); + if (timeNow < fLastSyncTime) { + // The system clock has apparently gone back in time; reset our sync time and return: + fLastSyncTime = timeNow; + return; + } + DelayInterval timeSinceLastSync = timeNow - fLastSyncTime; + fLastSyncTime = timeNow; + + // Then, adjust the delay queue for any entries whose time is up: + DelayQueueEntry* curEntry = head(); + while (timeSinceLastSync >= curEntry->fDeltaTimeRemaining) { + timeSinceLastSync -= curEntry->fDeltaTimeRemaining; + curEntry->fDeltaTimeRemaining = DELAY_ZERO; + curEntry = curEntry->fNext; + } + curEntry->fDeltaTimeRemaining -= timeSinceLastSync; +} + + +///// EventTime ///// + +EventTime TimeNow() { + struct timeval tvNow; + + gettimeofday(&tvNow, NULL); + + return EventTime(tvNow.tv_sec, tvNow.tv_usec); +} + +const EventTime THE_END_OF_TIME(INT_MAX); diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/Makefile b/AnyCore/lib_rtsp/BasicUsageEnvironment/Makefile new file mode 100644 index 0000000..86fcbcc --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/Makefile @@ -0,0 +1,81 @@ +INCLUDES = -Iinclude -I../UsageEnvironment/include -I../groupsock/include +PREFIX = /usr/local +LIBDIR = $(PREFIX)/lib +##### Change the following for your environment: +#------------------------------------------------------------------------------------------------- +# +# Create BY zhang zhiwei 2014.07.03 +# sunfrank2012@gmail.com +# +#------------------------------------------------------------------------------------------------- +# NDK ROOT +ANDROID_NDK_ROOT=/cygdrive/c/Android/NDK/android-ndk-r8e/ +# For sysroot | arm-linux-androideabi-gcc can't find cygwin path +NDK_ROOT=c:/Android/NDK/android-ndk-r8e/ +# For x86 +PREBUILT=$(ANDROID_NDK_ROOT)/toolchains/arm-linux-androideabi-4.6/prebuilt/windows +# Fro x86_64 +#PREBUILT=$(ANDROID_NDK_ROOT)/toolchains/arm-linux-androideabi-4.6/prebuilt/windows-x86_64 +PLATFORM=android-14 + +CROSS_COMPILE= $(PREBUILT)/bin/arm-linux-androideabi- +COMPILE_OPTS = $(INCLUDES) -fPIC -DANDROID -std=c99 -mfpu=neon -mfloat-abi=softfp -I. -O2 -DXLOCALE_NOT_USED -DANDROID -DSOCKLEN_T=socklen_t -DNO_SSTREAM=1 -D_LARGEFILE_SOURCE=1 -D_FILE_OFFSET_BITS=64 -DNULL=0 --sysroot=${NDK_ROOT}/platforms/${PLATFORM}/arch-arm/ -I${ANDROID_NDK_ROOT}/platforms/${PLATFORM}/arch-arm/usr/include +C = c +C_COMPILER = $(CROSS_COMPILE)gcc +C_FLAGS = $(COMPILE_OPTS) +CPP = cpp +CPLUSPLUS_COMPILER = $(CROSS_COMPILE)g++ +CPLUSPLUS_FLAGS = $(COMPILE_OPTS) -Wall -DBSD=1 -fexceptions +OBJ = o +LINK = $(CROSS_COMPILE)g++ -o +LINK_OPTS = +CONSOLE_LINK_OPTS = $(LINK_OPTS) +LIBRARY_LINK = $(CROSS_COMPILE)ar cr +LIBRARY_LINK_OPTS = $(LINK_OPTS) +LIB_SUFFIX = a +LIBS_FOR_CONSOLE_APPLICATION =-lc -lm -lz -L./ -lsupc++ -fexceptions -L${ANDROID_NDK_ROOT}/platforms/${PLATFORM}/arch-arm/usr/lib -lstdc++ +LIBS_FOR_GUI_APPLICATION = +EXE = +##### End of variables to change + +NAME = libBasicUsageEnvironment +LIB = $(NAME).$(LIB_SUFFIX) +ALL = $(LIB) +all: $(ALL) + +OBJS = BasicUsageEnvironment0.$(OBJ) BasicUsageEnvironment.$(OBJ) \ + BasicTaskScheduler0.$(OBJ) BasicTaskScheduler.$(OBJ) \ + DelayQueue.$(OBJ) BasicHashTable.$(OBJ) + +libBasicUsageEnvironment.$(LIB_SUFFIX): $(OBJS) + $(LIBRARY_LINK)$@ $(LIBRARY_LINK_OPTS) \ + $(OBJS) + +.$(C).$(OBJ): + $(C_COMPILER) -c $(C_FLAGS) $< + +.$(CPP).$(OBJ): + $(CPLUSPLUS_COMPILER) -c $(CPLUSPLUS_FLAGS) $< + +BasicUsageEnvironment0.$(CPP): include/BasicUsageEnvironment0.hh +include/BasicUsageEnvironment0.hh: include/BasicUsageEnvironment_version.hh include/DelayQueue.hh +BasicUsageEnvironment.$(CPP): include/BasicUsageEnvironment.hh +include/BasicUsageEnvironment.hh: include/BasicUsageEnvironment0.hh +BasicTaskScheduler0.$(CPP): include/BasicUsageEnvironment0.hh include/HandlerSet.hh +BasicTaskScheduler.$(CPP): include/BasicUsageEnvironment.hh include/HandlerSet.hh +DelayQueue.$(CPP): include/DelayQueue.hh +BasicHashTable.$(CPP): include/BasicHashTable.hh + +clean: + -rm -rf *.$(OBJ) $(ALL) core *.core *~ include/*~ + +install: install1 $(INSTALL2) +install1: libBasicUsageEnvironment.$(LIB_SUFFIX) + install -d $(DESTDIR)$(PREFIX)/include/BasicUsageEnvironment $(DESTDIR)$(LIBDIR) + install -m 644 include/*.hh $(DESTDIR)$(PREFIX)/include/BasicUsageEnvironment + install -m 644 libBasicUsageEnvironment.$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR) +install_shared_libraries: libBasicUsageEnvironment.$(LIB_SUFFIX) + ln -s $(NAME).$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/$(NAME).$(SHORT_LIB_SUFFIX) + ln -s $(NAME).$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/$(NAME).so + +##### Any additional, platform-specific rules come here: diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/Makefile.head b/AnyCore/lib_rtsp/BasicUsageEnvironment/Makefile.head new file mode 100644 index 0000000..f4e4414 --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/Makefile.head @@ -0,0 +1,4 @@ +INCLUDES = -Iinclude -I../UsageEnvironment/include -I../groupsock/include +PREFIX = /usr/local +LIBDIR = $(PREFIX)/lib +##### Change the following for your environment: diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/Makefile.tail b/AnyCore/lib_rtsp/BasicUsageEnvironment/Makefile.tail new file mode 100644 index 0000000..84de5df --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/Makefile.tail @@ -0,0 +1,43 @@ +##### End of variables to change + +NAME = libBasicUsageEnvironment +LIB = $(NAME).$(LIB_SUFFIX) +ALL = $(LIB) +all: $(ALL) + +OBJS = BasicUsageEnvironment0.$(OBJ) BasicUsageEnvironment.$(OBJ) \ + BasicTaskScheduler0.$(OBJ) BasicTaskScheduler.$(OBJ) \ + DelayQueue.$(OBJ) BasicHashTable.$(OBJ) + +libBasicUsageEnvironment.$(LIB_SUFFIX): $(OBJS) + $(LIBRARY_LINK)$@ $(LIBRARY_LINK_OPTS) \ + $(OBJS) + +.$(C).$(OBJ): + $(C_COMPILER) -c $(C_FLAGS) $< + +.$(CPP).$(OBJ): + $(CPLUSPLUS_COMPILER) -c $(CPLUSPLUS_FLAGS) $< + +BasicUsageEnvironment0.$(CPP): include/BasicUsageEnvironment0.hh +include/BasicUsageEnvironment0.hh: include/BasicUsageEnvironment_version.hh include/DelayQueue.hh +BasicUsageEnvironment.$(CPP): include/BasicUsageEnvironment.hh +include/BasicUsageEnvironment.hh: include/BasicUsageEnvironment0.hh +BasicTaskScheduler0.$(CPP): include/BasicUsageEnvironment0.hh include/HandlerSet.hh +BasicTaskScheduler.$(CPP): include/BasicUsageEnvironment.hh include/HandlerSet.hh +DelayQueue.$(CPP): include/DelayQueue.hh +BasicHashTable.$(CPP): include/BasicHashTable.hh + +clean: + -rm -rf *.$(OBJ) $(ALL) core *.core *~ include/*~ + +install: install1 $(INSTALL2) +install1: libBasicUsageEnvironment.$(LIB_SUFFIX) + install -d $(DESTDIR)$(PREFIX)/include/BasicUsageEnvironment $(DESTDIR)$(LIBDIR) + install -m 644 include/*.hh $(DESTDIR)$(PREFIX)/include/BasicUsageEnvironment + install -m 644 libBasicUsageEnvironment.$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR) +install_shared_libraries: libBasicUsageEnvironment.$(LIB_SUFFIX) + ln -s $(NAME).$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/$(NAME).$(SHORT_LIB_SUFFIX) + ln -s $(NAME).$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/$(NAME).so + +##### Any additional, platform-specific rules come here: diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/include/BasicHashTable.hh b/AnyCore/lib_rtsp/BasicUsageEnvironment/include/BasicHashTable.hh new file mode 100644 index 0000000..8e56a23 --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/include/BasicHashTable.hh @@ -0,0 +1,104 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Basic Hash Table implementation +// C++ header + +#ifndef _BASIC_HASH_TABLE_HH +#define _BASIC_HASH_TABLE_HH + +#ifndef _HASH_TABLE_HH +#include "HashTable.hh" +#endif +#ifndef _NET_COMMON_H +#include // to ensure that "uintptr_t" is defined +#endif + +// A simple hash table implementation, inspired by the hash table +// implementation used in Tcl 7.6: + +#define SMALL_HASH_TABLE_SIZE 4 + +class BasicHashTable: public HashTable { +private: + class TableEntry; // forward + +public: + BasicHashTable(int keyType); + virtual ~BasicHashTable(); + + // Used to iterate through the members of the table: + class Iterator; friend class Iterator; // to make Sun's C++ compiler happy + class Iterator: public HashTable::Iterator { + public: + Iterator(BasicHashTable const& table); + + private: // implementation of inherited pure virtual functions + void* next(char const*& key); // returns 0 if none + + private: + BasicHashTable const& fTable; + unsigned fNextIndex; // index of next bucket to be enumerated after this + TableEntry* fNextEntry; // next entry in the current bucket + }; + +private: // implementation of inherited pure virtual functions + virtual void* Add(char const* key, void* value); + // Returns the old value if different, otherwise 0 + virtual Boolean Remove(char const* key); + virtual void* Lookup(char const* key) const; + // Returns 0 if not found + virtual unsigned numEntries() const; + +private: + class TableEntry { + public: + TableEntry* fNext; + char const* key; + void* value; + }; + + TableEntry* lookupKey(char const* key, unsigned& index) const; + // returns entry matching "key", or NULL if none + Boolean keyMatches(char const* key1, char const* key2) const; + // used to implement "lookupKey()" + + TableEntry* insertNewEntry(unsigned index, char const* key); + // creates a new entry, and inserts it in the table + void assignKey(TableEntry* entry, char const* key); + // used to implement "insertNewEntry()" + + void deleteEntry(unsigned index, TableEntry* entry); + void deleteKey(TableEntry* entry); + // used to implement "deleteEntry()" + + void rebuild(); // rebuilds the table as its size increases + + unsigned hashIndexFromKey(char const* key) const; + // used to implement many of the routines above + + unsigned randomIndex(uintptr_t i) const { + return (unsigned)(((i*1103515245) >> fDownShift) & fMask); + } + +private: + TableEntry** fBuckets; // pointer to bucket array + TableEntry* fStaticBuckets[SMALL_HASH_TABLE_SIZE];// used for small tables + unsigned fNumBuckets, fNumEntries, fRebuildSize, fDownShift, fMask; + int fKeyType; +}; + +#endif diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/include/BasicUsageEnvironment.hh b/AnyCore/lib_rtsp/BasicUsageEnvironment/include/BasicUsageEnvironment.hh new file mode 100644 index 0000000..9c6874b --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/include/BasicUsageEnvironment.hh @@ -0,0 +1,86 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Basic Usage Environment: for a simple, non-scripted, console application +// C++ header + +#ifndef _BASIC_USAGE_ENVIRONMENT_HH +#define _BASIC_USAGE_ENVIRONMENT_HH + +#ifndef _BASIC_USAGE_ENVIRONMENT0_HH +#include "BasicUsageEnvironment0.hh" +#endif + +class BasicUsageEnvironment: public BasicUsageEnvironment0 { +public: + static BasicUsageEnvironment* createNew(TaskScheduler& taskScheduler); + + // redefined virtual functions: + virtual int getErrno() const; + + virtual UsageEnvironment& operator<<(char const* str); + virtual UsageEnvironment& operator<<(int i); + virtual UsageEnvironment& operator<<(unsigned u); + virtual UsageEnvironment& operator<<(double d); + virtual UsageEnvironment& operator<<(void* p); + +protected: + BasicUsageEnvironment(TaskScheduler& taskScheduler); + // called only by "createNew()" (or subclass constructors) + virtual ~BasicUsageEnvironment(); +}; + + +class BasicTaskScheduler: public BasicTaskScheduler0 { +public: + static BasicTaskScheduler* createNew(unsigned maxSchedulerGranularity = 10000/*microseconds*/); + // "maxSchedulerGranularity" (default value: 10 ms) specifies the maximum time that we wait (in "select()") before + // returning to the event loop to handle non-socket or non-timer-based events, such as 'triggered events'. + // You can change this is you wish (but only if you know what you're doing!), or set it to 0, to specify no such maximum time. + // (You should set it to 0 only if you know that you will not be using 'event triggers'.) + virtual ~BasicTaskScheduler(); + +protected: + BasicTaskScheduler(unsigned maxSchedulerGranularity); + // called only by "createNew()" + + static void schedulerTickTask(void* clientData); + void schedulerTickTask(); + +protected: + // Redefined virtual functions: + virtual void SingleStep(unsigned maxDelayTime); + + virtual void setBackgroundHandling(int socketNum, int conditionSet, BackgroundHandlerProc* handlerProc, void* clientData); + virtual void moveSocketHandling(int oldSocketNum, int newSocketNum); + +protected: + unsigned fMaxSchedulerGranularity; + + // To implement background operations: + int fMaxNumSockets; + fd_set fReadSet; + fd_set fWriteSet; + fd_set fExceptionSet; + +private: +#if defined(__WIN32__) || defined(_WIN32) + // Hack to work around a bug in Windows' "select()" implementation: + int fDummySocketNum; +#endif +}; + +#endif diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/include/BasicUsageEnvironment0.hh b/AnyCore/lib_rtsp/BasicUsageEnvironment/include/BasicUsageEnvironment0.hh new file mode 100644 index 0000000..4979a54 --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/include/BasicUsageEnvironment0.hh @@ -0,0 +1,115 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Basic Usage Environment: for a simple, non-scripted, console application +// C++ header + +#ifndef _BASIC_USAGE_ENVIRONMENT0_HH +#define _BASIC_USAGE_ENVIRONMENT0_HH + +#ifndef _BASICUSAGEENVIRONMENT_VERSION_HH +#include "BasicUsageEnvironment_version.hh" +#endif + +#ifndef _USAGE_ENVIRONMENT_HH +#include "UsageEnvironment.hh" +#endif + +#ifndef _DELAY_QUEUE_HH +#include "DelayQueue.hh" +#endif + +#define RESULT_MSG_BUFFER_MAX 1000 + +// An abstract base class, useful for subclassing +// (e.g., to redefine the implementation of "operator<<") +class BasicUsageEnvironment0: public UsageEnvironment { +public: + // redefined virtual functions: + virtual MsgString getResultMsg() const; + + virtual void setResultMsg(MsgString msg); + virtual void setResultMsg(MsgString msg1, + MsgString msg2); + virtual void setResultMsg(MsgString msg1, + MsgString msg2, + MsgString msg3); + virtual void setResultErrMsg(MsgString msg, int err = 0); + + virtual void appendToResultMsg(MsgString msg); + + virtual void reportBackgroundError(); + +protected: + BasicUsageEnvironment0(TaskScheduler& taskScheduler); + virtual ~BasicUsageEnvironment0(); + +private: + void reset(); + + char fResultMsgBuffer[RESULT_MSG_BUFFER_MAX]; + unsigned fCurBufferSize; + unsigned fBufferMaxSize; +}; + +class HandlerSet; // forward + +#define MAX_NUM_EVENT_TRIGGERS 32 + +// An abstract base class, useful for subclassing +// (e.g., to redefine the implementation of socket event handling) +class BasicTaskScheduler0: public TaskScheduler { +public: + virtual ~BasicTaskScheduler0(); + + virtual void SingleStep(unsigned maxDelayTime = 0) = 0; + // "maxDelayTime" is in microseconds. It allows a subclass to impose a limit + // on how long "select()" can delay, in case it wants to also do polling. + // 0 (the default value) means: There's no maximum; just look at the delay queue + +public: + // Redefined virtual functions: + virtual TaskToken scheduleDelayedTask(int64_t microseconds, TaskFunc* proc, + void* clientData); + virtual void unscheduleDelayedTask(TaskToken& prevTask); + + virtual void doEventOnce(); + + virtual void doEventLoop(char* watchVariable); + + virtual EventTriggerId createEventTrigger(TaskFunc* eventHandlerProc); + virtual void deleteEventTrigger(EventTriggerId eventTriggerId); + virtual void triggerEvent(EventTriggerId eventTriggerId, void* clientData = NULL); + +protected: + BasicTaskScheduler0(); + +protected: + // To implement delayed operations: + DelayQueue fDelayQueue; + + // To implement background reads: + HandlerSet* fHandlers; + int fLastHandledSocketNum; + + // To implement event triggers: + EventTriggerId fTriggersAwaitingHandling, fLastUsedTriggerMask; // implemented as 32-bit bitmaps + TaskFunc* fTriggeredEventHandlers[MAX_NUM_EVENT_TRIGGERS]; + void* fTriggeredEventClientDatas[MAX_NUM_EVENT_TRIGGERS]; + unsigned fLastUsedTriggerNum; // in the range [0,MAX_NUM_EVENT_TRIGGERS) +}; + +#endif diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/include/BasicUsageEnvironment_version.hh b/AnyCore/lib_rtsp/BasicUsageEnvironment/include/BasicUsageEnvironment_version.hh new file mode 100644 index 0000000..474f8c9 --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/include/BasicUsageEnvironment_version.hh @@ -0,0 +1,10 @@ +// Version information for the "BasicUsageEnvironment" library +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. + +#ifndef _BASICUSAGEENVIRONMENT_VERSION_HH +#define _BASICUSAGEENVIRONMENT_VERSION_HH + +#define BASICUSAGEENVIRONMENT_LIBRARY_VERSION_STRING "2014.09.22" +#define BASICUSAGEENVIRONMENT_LIBRARY_VERSION_INT 1411344000 + +#endif diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/include/DelayQueue.hh b/AnyCore/lib_rtsp/BasicUsageEnvironment/include/DelayQueue.hh new file mode 100644 index 0000000..f6bb2ed --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/include/DelayQueue.hh @@ -0,0 +1,182 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ + // Copyright (c) 1996-2014, Live Networks, Inc. All rights reserved +// Delay queue +// C++ header + +#ifndef _DELAY_QUEUE_HH +#define _DELAY_QUEUE_HH + +#ifndef _NET_COMMON_H +#include "NetCommon.h" +#endif + +#ifdef TIME_BASE +typedef TIME_BASE time_base_seconds; +#else +typedef long time_base_seconds; +#endif + +///// A "Timeval" can be either an absolute time, or a time interval ///// + +class Timeval { +public: + time_base_seconds seconds() const { + return fTv.tv_sec; + } + time_base_seconds seconds() { + return fTv.tv_sec; + } + time_base_seconds useconds() const { + return fTv.tv_usec; + } + time_base_seconds useconds() { + return fTv.tv_usec; + } + + int operator>=(Timeval const& arg2) const; + int operator<=(Timeval const& arg2) const { + return arg2 >= *this; + } + int operator<(Timeval const& arg2) const { + return !(*this >= arg2); + } + int operator>(Timeval const& arg2) const { + return arg2 < *this; + } + int operator==(Timeval const& arg2) const { + return *this >= arg2 && arg2 >= *this; + } + int operator!=(Timeval const& arg2) const { + return !(*this == arg2); + } + + void operator+=(class DelayInterval const& arg2); + void operator-=(class DelayInterval const& arg2); + // returns ZERO iff arg2 >= arg1 + +protected: + Timeval(time_base_seconds seconds, time_base_seconds useconds) { + fTv.tv_sec = seconds; fTv.tv_usec = useconds; + } + +private: + time_base_seconds& secs() { + return (time_base_seconds&)fTv.tv_sec; + } + time_base_seconds& usecs() { + return (time_base_seconds&)fTv.tv_usec; + } + + struct timeval fTv; +}; + +#ifndef max +inline Timeval max(Timeval const& arg1, Timeval const& arg2) { + return arg1 >= arg2 ? arg1 : arg2; +} +#endif +#ifndef min +inline Timeval min(Timeval const& arg1, Timeval const& arg2) { + return arg1 <= arg2 ? arg1 : arg2; +} +#endif + +class DelayInterval operator-(Timeval const& arg1, Timeval const& arg2); +// returns ZERO iff arg2 >= arg1 + + +///// DelayInterval ///// + +class DelayInterval: public Timeval { +public: + DelayInterval(time_base_seconds seconds, time_base_seconds useconds) + : Timeval(seconds, useconds) {} +}; + +DelayInterval operator*(short arg1, DelayInterval const& arg2); + +extern DelayInterval const DELAY_ZERO; +extern DelayInterval const DELAY_SECOND; +extern DelayInterval const DELAY_MINUTE; +extern DelayInterval const DELAY_HOUR; +extern DelayInterval const DELAY_DAY; + +///// EventTime ///// + +class EventTime: public Timeval { +public: + EventTime(unsigned secondsSinceEpoch = 0, + unsigned usecondsSinceEpoch = 0) + // We use the Unix standard epoch: January 1, 1970 + : Timeval(secondsSinceEpoch, usecondsSinceEpoch) {} +}; + +EventTime TimeNow(); + +extern EventTime const THE_END_OF_TIME; + + +///// DelayQueueEntry ///// + +class DelayQueueEntry { +public: + virtual ~DelayQueueEntry(); + + intptr_t token() { + return fToken; + } + +protected: // abstract base class + DelayQueueEntry(DelayInterval delay); + + virtual void handleTimeout(); + +private: + friend class DelayQueue; + DelayQueueEntry* fNext; + DelayQueueEntry* fPrev; + DelayInterval fDeltaTimeRemaining; + + intptr_t fToken; + static intptr_t tokenCounter; +}; + +///// DelayQueue ///// + +class DelayQueue: public DelayQueueEntry { +public: + DelayQueue(); + virtual ~DelayQueue(); + + void addEntry(DelayQueueEntry* newEntry); // returns a token for the entry + void updateEntry(DelayQueueEntry* entry, DelayInterval newDelay); + void updateEntry(intptr_t tokenToFind, DelayInterval newDelay); + void removeEntry(DelayQueueEntry* entry); // but doesn't delete it + DelayQueueEntry* removeEntry(intptr_t tokenToFind); // but doesn't delete it + + DelayInterval const& timeToNextAlarm(); + void handleAlarm(); + +private: + DelayQueueEntry* head() { return fNext; } + DelayQueueEntry* findEntryByToken(intptr_t token); + void synchronize(); // bring the 'time remaining' fields up-to-date + + EventTime fLastSyncTime; +}; + +#endif diff --git a/AnyCore/lib_rtsp/BasicUsageEnvironment/include/HandlerSet.hh b/AnyCore/lib_rtsp/BasicUsageEnvironment/include/HandlerSet.hh new file mode 100644 index 0000000..dbd00e3 --- /dev/null +++ b/AnyCore/lib_rtsp/BasicUsageEnvironment/include/HandlerSet.hh @@ -0,0 +1,77 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Basic Usage Environment: for a simple, non-scripted, console application +// C++ header + +#ifndef _HANDLER_SET_HH +#define _HANDLER_SET_HH + +#ifndef _BOOLEAN_HH +#include "Boolean.hh" +#endif + +////////// HandlerSet (etc.) definition ////////// + +class HandlerDescriptor { + HandlerDescriptor(HandlerDescriptor* nextHandler); + virtual ~HandlerDescriptor(); + +public: + int socketNum; + int conditionSet; + TaskScheduler::BackgroundHandlerProc* handlerProc; + void* clientData; + +private: + // Descriptors are linked together in a doubly-linked list: + friend class HandlerSet; + friend class HandlerIterator; + HandlerDescriptor* fNextHandler; + HandlerDescriptor* fPrevHandler; +}; + +class HandlerSet { +public: + HandlerSet(); + virtual ~HandlerSet(); + + void assignHandler(int socketNum, int conditionSet, TaskScheduler::BackgroundHandlerProc* handlerProc, void* clientData); + void clearHandler(int socketNum); + void moveHandler(int oldSocketNum, int newSocketNum); + +private: + HandlerDescriptor* lookupHandler(int socketNum); + +private: + friend class HandlerIterator; + HandlerDescriptor fHandlers; +}; + +class HandlerIterator { +public: + HandlerIterator(HandlerSet& handlerSet); + virtual ~HandlerIterator(); + + HandlerDescriptor* next(); // returns NULL if none + void reset(); + +private: + HandlerSet& fOurSet; + HandlerDescriptor* fNextPtr; +}; + +#endif diff --git a/AnyCore/lib_rtsp/groupsock/COPYING b/AnyCore/lib_rtsp/groupsock/COPYING new file mode 100644 index 0000000..012065c --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/COPYING @@ -0,0 +1 @@ +../COPYING \ No newline at end of file diff --git a/AnyCore/lib_rtsp/groupsock/GroupEId.cpp b/AnyCore/lib_rtsp/groupsock/GroupEId.cpp new file mode 100644 index 0000000..686e547 --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/GroupEId.cpp @@ -0,0 +1,104 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// Copyright (c) 1996-2014, Live Networks, Inc. All rights reserved +// "Group Endpoint Id" +// Implementation + +#include "GroupEId.hh" +#include "strDup.hh" +#include + +////////// Scope ////////// + +void Scope::assign(u_int8_t ttl, const char* publicKey) { + fTTL = ttl; + + fPublicKey = strDup(publicKey == NULL ? "nokey" : publicKey); +} + +void Scope::clean() { + delete[] fPublicKey; + fPublicKey = NULL; +} + + +Scope::Scope(u_int8_t ttl, const char* publicKey) { + assign(ttl, publicKey); +} + +Scope::Scope(const Scope& orig) { + assign(orig.ttl(), orig.publicKey()); +} + +Scope& Scope::operator=(const Scope& rightSide) { + if (&rightSide != this) { + if (publicKey() == NULL + || strcmp(publicKey(), rightSide.publicKey()) != 0) { + clean(); + assign(rightSide.ttl(), rightSide.publicKey()); + } else { // need to assign TTL only + fTTL = rightSide.ttl(); + } + } + + return *this; +} + +Scope::~Scope() { + clean(); +} + +unsigned Scope::publicKeySize() const { + return fPublicKey == NULL ? 0 : strlen(fPublicKey); +} + +////////// GroupEId ////////// + +GroupEId::GroupEId(struct in_addr const& groupAddr, + portNumBits portNum, Scope const& scope, + unsigned numSuccessiveGroupAddrs) { + struct in_addr sourceFilterAddr; + sourceFilterAddr.s_addr = ~0; // indicates no source filter + + init(groupAddr, sourceFilterAddr, portNum, scope, numSuccessiveGroupAddrs); +} + +GroupEId::GroupEId(struct in_addr const& groupAddr, + struct in_addr const& sourceFilterAddr, + portNumBits portNum, + unsigned numSuccessiveGroupAddrs) { + init(groupAddr, sourceFilterAddr, portNum, 255, numSuccessiveGroupAddrs); +} + +GroupEId::GroupEId() { +} + +Boolean GroupEId::isSSM() const { + return fSourceFilterAddress.s_addr != netAddressBits(~0); +} + + +void GroupEId::init(struct in_addr const& groupAddr, + struct in_addr const& sourceFilterAddr, + portNumBits portNum, + Scope const& scope, + unsigned numSuccessiveGroupAddrs) { + fGroupAddress = groupAddr; + fSourceFilterAddress = sourceFilterAddr; + fNumSuccessiveGroupAddrs = numSuccessiveGroupAddrs; + fPortNum = portNum; + fScope = scope; +} diff --git a/AnyCore/lib_rtsp/groupsock/Groupsock.cpp b/AnyCore/lib_rtsp/groupsock/Groupsock.cpp new file mode 100644 index 0000000..d3c67d7 --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/Groupsock.cpp @@ -0,0 +1,635 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// 'Group sockets' +// Implementation + +#include "Groupsock.hh" +#include "GroupsockHelper.hh" +//##### Eventually fix the following #include; we shouldn't know about tunnels +#include "TunnelEncaps.hh" + +#ifndef NO_SSTREAM +#include +#endif +#include + +///////// OutputSocket ////////// + +OutputSocket::OutputSocket(UsageEnvironment& env) + : Socket(env, 0 /* let kernel choose port */), + fSourcePort(0), fLastSentTTL(256/*hack: a deliberately invalid value*/) { +} + +OutputSocket::OutputSocket(UsageEnvironment& env, Port port) + : Socket(env, port), + fSourcePort(0), fLastSentTTL(256/*hack: a deliberately invalid value*/) { +} + +OutputSocket::~OutputSocket() { +} + +Boolean OutputSocket::write(netAddressBits address, Port port, u_int8_t ttl, + unsigned char* buffer, unsigned bufferSize) { + struct in_addr destAddr; destAddr.s_addr = address; + if ((unsigned)ttl == fLastSentTTL) { + // Optimization: Don't do a 'set TTL' system call again + if (!writeSocket(env(), socketNum(), destAddr, port, buffer, bufferSize)) return False; + } else { + if (!writeSocket(env(), socketNum(), destAddr, port, ttl, buffer, bufferSize)) return False; + fLastSentTTL = (unsigned)ttl; + } + + if (sourcePortNum() == 0) { + // Now that we've sent a packet, we can find out what the + // kernel chose as our ephemeral source port number: + if (!getSourcePort(env(), socketNum(), fSourcePort)) { + if (DebugLevel >= 1) + env() << *this + << ": failed to get source port: " + << env().getResultMsg() << "\n"; + return False; + } + } + + return True; +} + +// By default, we don't do reads: +Boolean OutputSocket +::handleRead(unsigned char* /*buffer*/, unsigned /*bufferMaxSize*/, + unsigned& /*bytesRead*/, struct sockaddr_in& /*fromAddress*/) { + return True; +} + + +///////// destRecord ////////// + +destRecord +::destRecord(struct in_addr const& addr, Port const& port, u_int8_t ttl, + destRecord* next) + : fNext(next), fGroupEId(addr, port.num(), ttl), fPort(port) { +} + +destRecord::~destRecord() { + delete fNext; +} + + +///////// Groupsock ////////// + +NetInterfaceTrafficStats Groupsock::statsIncoming; +NetInterfaceTrafficStats Groupsock::statsOutgoing; +NetInterfaceTrafficStats Groupsock::statsRelayedIncoming; +NetInterfaceTrafficStats Groupsock::statsRelayedOutgoing; + +// Constructor for a source-independent multicast group +Groupsock::Groupsock(UsageEnvironment& env, struct in_addr const& groupAddr, + Port port, u_int8_t ttl) + : OutputSocket(env, port), + deleteIfNoMembers(False), isSlave(False), + fIncomingGroupEId(groupAddr, port.num(), ttl), fDests(NULL), fTTL(ttl) { + addDestination(groupAddr, port); + + if (!socketJoinGroup(env, socketNum(), groupAddr.s_addr)) { + if (DebugLevel >= 1) { + env << *this << ": failed to join group: " + << env.getResultMsg() << "\n"; + } + } + + // Make sure we can get our source address: + if (ourIPAddress(env) == 0) { + if (DebugLevel >= 0) { // this is a fatal error + env << "Unable to determine our source address: " + << env.getResultMsg() << "\n"; + } + } + + if (DebugLevel >= 2) env << *this << ": created\n"; +} + +// Constructor for a source-specific multicast group +Groupsock::Groupsock(UsageEnvironment& env, struct in_addr const& groupAddr, + struct in_addr const& sourceFilterAddr, + Port port) + : OutputSocket(env, port), + deleteIfNoMembers(False), isSlave(False), + fIncomingGroupEId(groupAddr, sourceFilterAddr, port.num()), + fDests(NULL), fTTL(255) { + addDestination(groupAddr, port); + + // First try a SSM join. If that fails, try a regular join: + if (!socketJoinGroupSSM(env, socketNum(), groupAddr.s_addr, + sourceFilterAddr.s_addr)) { + if (DebugLevel >= 3) { + env << *this << ": SSM join failed: " + << env.getResultMsg(); + env << " - trying regular join instead\n"; + } + if (!socketJoinGroup(env, socketNum(), groupAddr.s_addr)) { + if (DebugLevel >= 1) { + env << *this << ": failed to join group: " + << env.getResultMsg() << "\n"; + } + } + } + + if (DebugLevel >= 2) env << *this << ": created\n"; +} + +Groupsock::~Groupsock() { + if (isSSM()) { + if (!socketLeaveGroupSSM(env(), socketNum(), groupAddress().s_addr, + sourceFilterAddress().s_addr)) { + socketLeaveGroup(env(), socketNum(), groupAddress().s_addr); + } + } else { + socketLeaveGroup(env(), socketNum(), groupAddress().s_addr); + } + + delete fDests; + + if (DebugLevel >= 2) env() << *this << ": deleting\n"; +} + +void +Groupsock::changeDestinationParameters(struct in_addr const& newDestAddr, + Port newDestPort, int newDestTTL) { + if (fDests == NULL) return; + + struct in_addr destAddr = fDests->fGroupEId.groupAddress(); + if (newDestAddr.s_addr != 0) { + if (newDestAddr.s_addr != destAddr.s_addr + && IsMulticastAddress(newDestAddr.s_addr)) { + // If the new destination is a multicast address, then we assume that + // we want to join it also. (If this is not in fact the case, then + // call "multicastSendOnly()" afterwards.) + socketLeaveGroup(env(), socketNum(), destAddr.s_addr); + socketJoinGroup(env(), socketNum(), newDestAddr.s_addr); + } + destAddr.s_addr = newDestAddr.s_addr; + } + + portNumBits destPortNum = fDests->fGroupEId.portNum(); + if (newDestPort.num() != 0) { + if (newDestPort.num() != destPortNum + && IsMulticastAddress(destAddr.s_addr)) { + // Also bind to the new port number: + changePort(newDestPort); + // And rejoin the multicast group: + socketJoinGroup(env(), socketNum(), destAddr.s_addr); + } + destPortNum = newDestPort.num(); + fDests->fPort = newDestPort; + } + + u_int8_t destTTL = ttl(); + if (newDestTTL != ~0) destTTL = (u_int8_t)newDestTTL; + + fDests->fGroupEId = GroupEId(destAddr, destPortNum, destTTL); +} + +void Groupsock::addDestination(struct in_addr const& addr, Port const& port) { + // Check whether this destination is already known: + for (destRecord* dests = fDests; dests != NULL; dests = dests->fNext) { + if (addr.s_addr == dests->fGroupEId.groupAddress().s_addr + && port.num() == dests->fPort.num()) { + return; + } + } + + fDests = new destRecord(addr, port, ttl(), fDests); +} + +void Groupsock::removeDestination(struct in_addr const& addr, Port const& port) { + for (destRecord** destsPtr = &fDests; *destsPtr != NULL; + destsPtr = &((*destsPtr)->fNext)) { + if (addr.s_addr == (*destsPtr)->fGroupEId.groupAddress().s_addr + && port.num() == (*destsPtr)->fPort.num()) { + // Remove the record pointed to by *destsPtr : + destRecord* next = (*destsPtr)->fNext; + (*destsPtr)->fNext = NULL; + delete (*destsPtr); + *destsPtr = next; + return; + } + } +} + +void Groupsock::removeAllDestinations() { + delete fDests; fDests = NULL; +} + +void Groupsock::multicastSendOnly() { + // We disable this code for now, because - on some systems - leaving the multicast group seems to cause sent packets + // to not be received by other applications (at least, on the same host). +#if 0 + socketLeaveGroup(env(), socketNum(), fIncomingGroupEId.groupAddress().s_addr); + for (destRecord* dests = fDests; dests != NULL; dests = dests->fNext) { + socketLeaveGroup(env(), socketNum(), dests->fGroupEId.groupAddress().s_addr); + } +#endif +} + +Boolean Groupsock::output(UsageEnvironment& env, u_int8_t ttlToSend, + unsigned char* buffer, unsigned bufferSize, + DirectedNetInterface* interfaceNotToFwdBackTo) { + do { + // First, do the datagram send, to each destination: + Boolean writeSuccess = True; + for (destRecord* dests = fDests; dests != NULL; dests = dests->fNext) { + if (!write(dests->fGroupEId.groupAddress().s_addr, dests->fPort, ttlToSend, + buffer, bufferSize)) { + writeSuccess = False; + break; + } + } + if (!writeSuccess) break; + statsOutgoing.countPacket(bufferSize); + statsGroupOutgoing.countPacket(bufferSize); + + // Then, forward to our members: + int numMembers = 0; + if (!members().IsEmpty()) { + numMembers = + outputToAllMembersExcept(interfaceNotToFwdBackTo, + ttlToSend, buffer, bufferSize, + ourIPAddress(env)); + if (numMembers < 0) break; + } + + if (DebugLevel >= 3) { + env << *this << ": wrote " << bufferSize << " bytes, ttl " + << (unsigned)ttlToSend; + if (numMembers > 0) { + env << "; relayed to " << numMembers << " members"; + } + env << "\n"; + } + return True; + } while (0); + + if (DebugLevel >= 0) { // this is a fatal error + env.setResultMsg("Groupsock write failed: ", env.getResultMsg()); + } + return False; +} + +Boolean Groupsock::handleRead(unsigned char* buffer, unsigned bufferMaxSize, + unsigned& bytesRead, + struct sockaddr_in& fromAddress) { + // Read data from the socket, and relay it across any attached tunnels + //##### later make this code more general - independent of tunnels + + bytesRead = 0; + + int maxBytesToRead = bufferMaxSize - TunnelEncapsulationTrailerMaxSize; + int numBytes = readSocket(env(), socketNum(), + buffer, maxBytesToRead, fromAddress); + if (numBytes < 0) { + if (DebugLevel >= 0) { // this is a fatal error + env().setResultMsg("Groupsock read failed: ", + env().getResultMsg()); + } + return False; + } + + // If we're a SSM group, make sure the source address matches: + if (isSSM() + && fromAddress.sin_addr.s_addr != sourceFilterAddress().s_addr) { + return True; + } + + // We'll handle this data. + // Also write it (with the encapsulation trailer) to each member, + // unless the packet was originally sent by us to begin with. + bytesRead = numBytes; + + int numMembers = 0; + if (!wasLoopedBackFromUs(env(), fromAddress)) { + statsIncoming.countPacket(numBytes); + statsGroupIncoming.countPacket(numBytes); + numMembers = + outputToAllMembersExcept(NULL, ttl(), + buffer, bytesRead, + fromAddress.sin_addr.s_addr); + if (numMembers > 0) { + statsRelayedIncoming.countPacket(numBytes); + statsGroupRelayedIncoming.countPacket(numBytes); + } + } + if (DebugLevel >= 3) { + env() << *this << ": read " << bytesRead << " bytes from " << AddressString(fromAddress).val(); + if (numMembers > 0) { + env() << "; relayed to " << numMembers << " members"; + } + env() << "\n"; + } + + return True; +} + +Boolean Groupsock::wasLoopedBackFromUs(UsageEnvironment& env, + struct sockaddr_in& fromAddress) { + if (fromAddress.sin_addr.s_addr + == ourIPAddress(env)) { + if (fromAddress.sin_port == sourcePortNum()) { +#ifdef DEBUG_LOOPBACK_CHECKING + if (DebugLevel >= 3) { + env() << *this << ": got looped-back packet\n"; + } +#endif + return True; + } + } + + return False; +} + +int Groupsock::outputToAllMembersExcept(DirectedNetInterface* exceptInterface, + u_int8_t ttlToFwd, + unsigned char* data, unsigned size, + netAddressBits sourceAddr) { + // Don't forward TTL-0 packets + if (ttlToFwd == 0) return 0; + + DirectedNetInterfaceSet::Iterator iter(members()); + unsigned numMembers = 0; + DirectedNetInterface* interf; + while ((interf = iter.next()) != NULL) { + // Check whether we've asked to exclude this interface: + if (interf == exceptInterface) + continue; + + // Check that the packet's source address makes it OK to + // be relayed across this interface: + UsageEnvironment& saveEnv = env(); + // because the following call may delete "this" + if (!interf->SourceAddrOKForRelaying(saveEnv, sourceAddr)) { + if (strcmp(saveEnv.getResultMsg(), "") != 0) { + // Treat this as a fatal error + return -1; + } else { + continue; + } + } + + if (numMembers == 0) { + // We know that we're going to forward to at least one + // member, so fill in the tunnel encapsulation trailer. + // (Note: Allow for it not being 4-byte-aligned.) + TunnelEncapsulationTrailer* trailerInPacket + = (TunnelEncapsulationTrailer*)&data[size]; + TunnelEncapsulationTrailer* trailer; + + Boolean misaligned = ((uintptr_t)trailerInPacket & 3) != 0; + unsigned trailerOffset; + u_int8_t tunnelCmd; + if (isSSM()) { + // add an 'auxilliary address' before the trailer + trailerOffset = TunnelEncapsulationTrailerAuxSize; + tunnelCmd = TunnelDataAuxCmd; + } else { + trailerOffset = 0; + tunnelCmd = TunnelDataCmd; + } + unsigned trailerSize = TunnelEncapsulationTrailerSize + trailerOffset; + unsigned tmpTr[TunnelEncapsulationTrailerMaxSize]; + if (misaligned) { + trailer = (TunnelEncapsulationTrailer*)&tmpTr; + } else { + trailer = trailerInPacket; + } + trailer += trailerOffset; + + if (fDests != NULL) { + trailer->address() = fDests->fGroupEId.groupAddress().s_addr; + trailer->port() = fDests->fPort; // structure copy, outputs in network order + } + trailer->ttl() = ttlToFwd; + trailer->command() = tunnelCmd; + + if (isSSM()) { + trailer->auxAddress() = sourceFilterAddress().s_addr; + } + + if (misaligned) { + memmove(trailerInPacket, trailer-trailerOffset, trailerSize); + } + + size += trailerSize; + } + + interf->write(data, size); + ++numMembers; + } + + return numMembers; +} + +UsageEnvironment& operator<<(UsageEnvironment& s, const Groupsock& g) { + UsageEnvironment& s1 = s << timestampString() << " Groupsock(" + << g.socketNum() << ": " + << AddressString(g.groupAddress()).val() + << ", " << g.port() << ", "; + if (g.isSSM()) { + return s1 << "SSM source: " + << AddressString(g.sourceFilterAddress()).val() << ")"; + } else { + return s1 << (unsigned)(g.ttl()) << ")"; + } +} + + +////////// GroupsockLookupTable ////////// + + +// A hash table used to index Groupsocks by socket number. + +static HashTable*& getSocketTable(UsageEnvironment& env) { + _groupsockPriv* priv = groupsockPriv(env); + if (priv->socketTable == NULL) { // We need to create it + priv->socketTable = HashTable::create(ONE_WORD_HASH_KEYS); + } + return priv->socketTable; +} + +static Boolean unsetGroupsockBySocket(Groupsock const* groupsock) { + do { + if (groupsock == NULL) break; + + int sock = groupsock->socketNum(); + // Make sure "sock" is in bounds: + if (sock < 0) break; + + HashTable*& sockets = getSocketTable(groupsock->env()); + + Groupsock* gs = (Groupsock*)sockets->Lookup((char*)(long)sock); + if (gs == NULL || gs != groupsock) break; + sockets->Remove((char*)(long)sock); + + if (sockets->IsEmpty()) { + // We can also delete the table (to reclaim space): + delete sockets; sockets = NULL; + reclaimGroupsockPriv(gs->env()); + } + + return True; + } while (0); + + return False; +} + +static Boolean setGroupsockBySocket(UsageEnvironment& env, int sock, + Groupsock* groupsock) { + do { + // Make sure the "sock" parameter is in bounds: + if (sock < 0) { + char buf[100]; + sprintf(buf, "trying to use bad socket (%d)", sock); + env.setResultMsg(buf); + break; + } + + HashTable* sockets = getSocketTable(env); + + // Make sure we're not replacing an existing Groupsock (although that shouldn't happen) + Boolean alreadyExists + = (sockets->Lookup((char*)(long)sock) != 0); + if (alreadyExists) { + char buf[100]; + sprintf(buf, + "Attempting to replace an existing socket (%d", + sock); + env.setResultMsg(buf); + break; + } + + sockets->Add((char*)(long)sock, groupsock); + return True; + } while (0); + + return False; +} + +static Groupsock* getGroupsockBySocket(UsageEnvironment& env, int sock) { + do { + // Make sure the "sock" parameter is in bounds: + if (sock < 0) break; + + HashTable* sockets = getSocketTable(env); + return (Groupsock*)sockets->Lookup((char*)(long)sock); + } while (0); + + return NULL; +} + +Groupsock* +GroupsockLookupTable::Fetch(UsageEnvironment& env, + netAddressBits groupAddress, + Port port, u_int8_t ttl, + Boolean& isNew) { + isNew = False; + Groupsock* groupsock; + do { + groupsock = (Groupsock*) fTable.Lookup(groupAddress, (~0), port); + if (groupsock == NULL) { // we need to create one: + groupsock = AddNew(env, groupAddress, (~0), port, ttl); + if (groupsock == NULL) break; + isNew = True; + } + } while (0); + + return groupsock; +} + +Groupsock* +GroupsockLookupTable::Fetch(UsageEnvironment& env, + netAddressBits groupAddress, + netAddressBits sourceFilterAddr, Port port, + Boolean& isNew) { + isNew = False; + Groupsock* groupsock; + do { + groupsock + = (Groupsock*) fTable.Lookup(groupAddress, sourceFilterAddr, port); + if (groupsock == NULL) { // we need to create one: + groupsock = AddNew(env, groupAddress, sourceFilterAddr, port, 0); + if (groupsock == NULL) break; + isNew = True; + } + } while (0); + + return groupsock; +} + +Groupsock* +GroupsockLookupTable::Lookup(netAddressBits groupAddress, Port port) { + return (Groupsock*) fTable.Lookup(groupAddress, (~0), port); +} + +Groupsock* +GroupsockLookupTable::Lookup(netAddressBits groupAddress, + netAddressBits sourceFilterAddr, Port port) { + return (Groupsock*) fTable.Lookup(groupAddress, sourceFilterAddr, port); +} + +Groupsock* GroupsockLookupTable::Lookup(UsageEnvironment& env, int sock) { + return getGroupsockBySocket(env, sock); +} + +Boolean GroupsockLookupTable::Remove(Groupsock const* groupsock) { + unsetGroupsockBySocket(groupsock); + return fTable.Remove(groupsock->groupAddress().s_addr, + groupsock->sourceFilterAddress().s_addr, + groupsock->port()); +} + +Groupsock* GroupsockLookupTable::AddNew(UsageEnvironment& env, + netAddressBits groupAddress, + netAddressBits sourceFilterAddress, + Port port, u_int8_t ttl) { + Groupsock* groupsock; + do { + struct in_addr groupAddr; groupAddr.s_addr = groupAddress; + if (sourceFilterAddress == netAddressBits(~0)) { + // regular, ISM groupsock + groupsock = new Groupsock(env, groupAddr, port, ttl); + } else { + // SSM groupsock + struct in_addr sourceFilterAddr; + sourceFilterAddr.s_addr = sourceFilterAddress; + groupsock = new Groupsock(env, groupAddr, sourceFilterAddr, port); + } + + if (groupsock == NULL || groupsock->socketNum() < 0) break; + + if (!setGroupsockBySocket(env, groupsock->socketNum(), groupsock)) break; + + fTable.Add(groupAddress, sourceFilterAddress, port, (void*)groupsock); + } while (0); + + return groupsock; +} + +GroupsockLookupTable::Iterator::Iterator(GroupsockLookupTable& groupsocks) + : fIter(AddressPortLookupTable::Iterator(groupsocks.fTable)) { +} + +Groupsock* GroupsockLookupTable::Iterator::next() { + return (Groupsock*) fIter.next(); +}; diff --git a/AnyCore/lib_rtsp/groupsock/GroupsockHelper.cpp b/AnyCore/lib_rtsp/groupsock/GroupsockHelper.cpp new file mode 100644 index 0000000..10210db --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/GroupsockHelper.cpp @@ -0,0 +1,820 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "mTunnel" multicast access service +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Helper routines to implement 'group sockets' +// Implementation + +#include "GroupsockHelper.hh" + +#if defined(__WIN32__) || defined(_WIN32) +#include +extern "C" int initializeWinsockIfNecessary(); +#else +#include +#include +#include +#define initializeWinsockIfNecessary() 1 +#endif +#include + +// By default, use INADDR_ANY for the sending and receiving interfaces: +netAddressBits SendingInterfaceAddr = INADDR_ANY; +netAddressBits ReceivingInterfaceAddr = INADDR_ANY; + +static void socketErr(UsageEnvironment& env, char const* errorMsg) { + env.setResultErrMsg(errorMsg); +} + +NoReuse::NoReuse(UsageEnvironment& env) + : fEnv(env) { + groupsockPriv(fEnv)->reuseFlag = 0; +} + +NoReuse::~NoReuse() { + groupsockPriv(fEnv)->reuseFlag = 1; + reclaimGroupsockPriv(fEnv); +} + + +_groupsockPriv* groupsockPriv(UsageEnvironment& env) { + if (env.groupsockPriv == NULL) { // We need to create it + _groupsockPriv* result = new _groupsockPriv; + result->socketTable = NULL; + result->reuseFlag = 1; // default value => allow reuse of socket numbers + env.groupsockPriv = result; + } + return (_groupsockPriv*)(env.groupsockPriv); +} + +void reclaimGroupsockPriv(UsageEnvironment& env) { + _groupsockPriv* priv = (_groupsockPriv*)(env.groupsockPriv); + if (priv->socketTable == NULL && priv->reuseFlag == 1/*default value*/) { + // We can delete the structure (to save space); it will get created again, if needed: + delete priv; + env.groupsockPriv = NULL; + } +} + +static int createSocket(int type) { + // Call "socket()" to create a (IPv4) socket of the specified type. + // But also set it to have the 'close on exec' property (if we can) + int sock; + +#ifdef SOCK_CLOEXEC + sock = socket(AF_INET, type|SOCK_CLOEXEC, 0); + if (sock != -1 || errno != EINVAL) return sock; + // An "errno" of EINVAL likely means that the system wasn't happy with the SOCK_CLOEXEC; fall through and try again without it: +#endif + + sock = socket(AF_INET, type, 0); +#ifdef FD_CLOEXEC + if (sock != -1) fcntl(sock, F_SETFD, FD_CLOEXEC); +#endif + return sock; +} + +int setupDatagramSocket(UsageEnvironment& env, Port port) { + if (!initializeWinsockIfNecessary()) { + socketErr(env, "Failed to initialize 'winsock': "); + return -1; + } + + int newSocket = createSocket(SOCK_DGRAM); + if (newSocket < 0) { + socketErr(env, "unable to create datagram socket: "); + return newSocket; + } + + int reuseFlag = groupsockPriv(env)->reuseFlag; + reclaimGroupsockPriv(env); + if (setsockopt(newSocket, SOL_SOCKET, SO_REUSEADDR, + (const char*)&reuseFlag, sizeof reuseFlag) < 0) { + socketErr(env, "setsockopt(SO_REUSEADDR) error: "); + closeSocket(newSocket); + return -1; + } + +#if defined(__WIN32__) || defined(_WIN32) + // Windoze doesn't properly handle SO_REUSEPORT or IP_MULTICAST_LOOP +#else +#ifdef SO_REUSEPORT + if (setsockopt(newSocket, SOL_SOCKET, SO_REUSEPORT, + (const char*)&reuseFlag, sizeof reuseFlag) < 0) { + socketErr(env, "setsockopt(SO_REUSEPORT) error: "); + closeSocket(newSocket); + return -1; + } +#endif + +#ifdef IP_MULTICAST_LOOP + const u_int8_t loop = 1; + if (setsockopt(newSocket, IPPROTO_IP, IP_MULTICAST_LOOP, + (const char*)&loop, sizeof loop) < 0) { + socketErr(env, "setsockopt(IP_MULTICAST_LOOP) error: "); + closeSocket(newSocket); + return -1; + } +#endif +#endif + + // Note: Windoze requires binding, even if the port number is 0 + netAddressBits addr = INADDR_ANY; +#if defined(__WIN32__) || defined(_WIN32) +#else + if (port.num() != 0 || ReceivingInterfaceAddr != INADDR_ANY) { +#endif + if (port.num() == 0) addr = ReceivingInterfaceAddr; + MAKE_SOCKADDR_IN(name, addr, port.num()); + if (bind(newSocket, (struct sockaddr*)&name, sizeof name) != 0) { + char tmpBuffer[100]; + sprintf(tmpBuffer, "bind() error (port number: %d): ", + ntohs(port.num())); + socketErr(env, tmpBuffer); + closeSocket(newSocket); + return -1; + } +#if defined(__WIN32__) || defined(_WIN32) +#else + } +#endif + + // Set the sending interface for multicasts, if it's not the default: + if (SendingInterfaceAddr != INADDR_ANY) { + struct in_addr addr; + addr.s_addr = SendingInterfaceAddr; + + if (setsockopt(newSocket, IPPROTO_IP, IP_MULTICAST_IF, + (const char*)&addr, sizeof addr) < 0) { + socketErr(env, "error setting outgoing multicast interface: "); + closeSocket(newSocket); + return -1; + } + } + + return newSocket; +} + +Boolean makeSocketNonBlocking(int sock) { +#if defined(__WIN32__) || defined(_WIN32) + unsigned long arg = 1; + return ioctlsocket(sock, FIONBIO, &arg) == 0; +#elif defined(VXWORKS) + int arg = 1; + return ioctl(sock, FIONBIO, (int)&arg) == 0; +#else + int curFlags = fcntl(sock, F_GETFL, 0); + return fcntl(sock, F_SETFL, curFlags|O_NONBLOCK) >= 0; +#endif +} + +Boolean makeSocketBlocking(int sock, unsigned writeTimeoutInMilliseconds) { + Boolean result; +#if defined(__WIN32__) || defined(_WIN32) + unsigned long arg = 0; + result = ioctlsocket(sock, FIONBIO, &arg) == 0; +#elif defined(VXWORKS) + int arg = 0; + result = ioctl(sock, FIONBIO, (int)&arg) == 0; +#else + int curFlags = fcntl(sock, F_GETFL, 0); + result = fcntl(sock, F_SETFL, curFlags&(~O_NONBLOCK)) >= 0; +#endif + + if (writeTimeoutInMilliseconds > 0) { +#ifdef SO_SNDTIMEO + struct timeval tv; + tv.tv_sec = writeTimeoutInMilliseconds/1000; + tv.tv_usec = (writeTimeoutInMilliseconds%1000)*1000; + setsockopt(sock, SOL_SOCKET, SO_SNDTIMEO, (char *)&tv, sizeof tv); +#endif + } + + return result; +} + +int setupStreamSocket(UsageEnvironment& env, + Port port, Boolean makeNonBlocking) { + if (!initializeWinsockIfNecessary()) { + socketErr(env, "Failed to initialize 'winsock': "); + return -1; + } + + int newSocket = createSocket(SOCK_STREAM); + if (newSocket < 0) { + socketErr(env, "unable to create stream socket: "); + return newSocket; + } + + int reuseFlag = groupsockPriv(env)->reuseFlag; + reclaimGroupsockPriv(env); + if (setsockopt(newSocket, SOL_SOCKET, SO_REUSEADDR, + (const char*)&reuseFlag, sizeof reuseFlag) < 0) { + socketErr(env, "setsockopt(SO_REUSEADDR) error: "); + closeSocket(newSocket); + return -1; + } + + // SO_REUSEPORT doesn't really make sense for TCP sockets, so we + // normally don't set them. However, if you really want to do this + // #define REUSE_FOR_TCP +#ifdef REUSE_FOR_TCP +#if defined(__WIN32__) || defined(_WIN32) + // Windoze doesn't properly handle SO_REUSEPORT +#else +#ifdef SO_REUSEPORT + if (setsockopt(newSocket, SOL_SOCKET, SO_REUSEPORT, + (const char*)&reuseFlag, sizeof reuseFlag) < 0) { + socketErr(env, "setsockopt(SO_REUSEPORT) error: "); + closeSocket(newSocket); + return -1; + } +#endif +#endif +#endif + + // Note: Windoze requires binding, even if the port number is 0 +#if defined(__WIN32__) || defined(_WIN32) +#else + if (port.num() != 0 || ReceivingInterfaceAddr != INADDR_ANY) { +#endif + MAKE_SOCKADDR_IN(name, ReceivingInterfaceAddr, port.num()); + if (bind(newSocket, (struct sockaddr*)&name, sizeof name) != 0) { + char tmpBuffer[100]; + sprintf(tmpBuffer, "bind() error (port number: %d): ", + ntohs(port.num())); + socketErr(env, tmpBuffer); + closeSocket(newSocket); + return -1; + } +#if defined(__WIN32__) || defined(_WIN32) +#else + } +#endif + + if (makeNonBlocking) { + if (!makeSocketNonBlocking(newSocket)) { + socketErr(env, "failed to make non-blocking: "); + closeSocket(newSocket); + return -1; + } + } + + return newSocket; +} + +int readSocket(UsageEnvironment& env, + int socket, unsigned char* buffer, unsigned bufferSize, + struct sockaddr_in& fromAddress) { + SOCKLEN_T addressSize = sizeof fromAddress; + int bytesRead = recvfrom(socket, (char*)buffer, bufferSize, 0, + (struct sockaddr*)&fromAddress, + &addressSize); + if (bytesRead < 0) { + //##### HACK to work around bugs in Linux and Windows: + int err = env.getErrno(); + if (err == 111 /*ECONNREFUSED (Linux)*/ +#if defined(__WIN32__) || defined(_WIN32) + // What a piece of crap Windows is. Sometimes + // recvfrom() returns -1, but with an 'errno' of 0. + // This appears not to be a real error; just treat + // it as if it were a read of zero bytes, and hope + // we don't have to do anything else to 'reset' + // this alleged error: + || err == 0 || err == EWOULDBLOCK +#else + || err == EAGAIN +#endif + || err == 113 /*EHOSTUNREACH (Linux)*/) { // Why does Linux return this for datagram sock? + fromAddress.sin_addr.s_addr = 0; + return 0; + } + //##### END HACK + socketErr(env, "recvfrom() error: "); + } else if (bytesRead == 0) { + // "recvfrom()" on a stream socket can return 0 if the remote end has closed the connection. Treat this as an error: + return -1; + } + + return bytesRead; +} + +Boolean writeSocket(UsageEnvironment& env, + int socket, struct in_addr address, Port port, + u_int8_t ttlArg, + unsigned char* buffer, unsigned bufferSize) { + // Before sending, set the socket's TTL: +#if defined(__WIN32__) || defined(_WIN32) +#define TTL_TYPE int +#else +#define TTL_TYPE u_int8_t +#endif + TTL_TYPE ttl = (TTL_TYPE)ttlArg; + if (setsockopt(socket, IPPROTO_IP, IP_MULTICAST_TTL, + (const char*)&ttl, sizeof ttl) < 0) { + socketErr(env, "setsockopt(IP_MULTICAST_TTL) error: "); + return False; + } + + return writeSocket(env, socket, address, port, buffer, bufferSize); +} + +Boolean writeSocket(UsageEnvironment& env, + int socket, struct in_addr address, Port port, + unsigned char* buffer, unsigned bufferSize) { + do { + MAKE_SOCKADDR_IN(dest, address.s_addr, port.num()); + int bytesSent = sendto(socket, (char*)buffer, bufferSize, 0, + (struct sockaddr*)&dest, sizeof dest); + if (bytesSent != (int)bufferSize) { + char tmpBuf[100]; + sprintf(tmpBuf, "writeSocket(%d), sendTo() error: wrote %d bytes instead of %u: ", socket, bytesSent, bufferSize); + socketErr(env, tmpBuf); + break; + } + + return True; + } while (0); + + return False; +} + +static unsigned getBufferSize(UsageEnvironment& env, int bufOptName, + int socket) { + unsigned curSize; + SOCKLEN_T sizeSize = sizeof curSize; + if (getsockopt(socket, SOL_SOCKET, bufOptName, + (char*)&curSize, &sizeSize) < 0) { + socketErr(env, "getBufferSize() error: "); + return 0; + } + + return curSize; +} +unsigned getSendBufferSize(UsageEnvironment& env, int socket) { + return getBufferSize(env, SO_SNDBUF, socket); +} +unsigned getReceiveBufferSize(UsageEnvironment& env, int socket) { + return getBufferSize(env, SO_RCVBUF, socket); +} + +static unsigned setBufferTo(UsageEnvironment& env, int bufOptName, + int socket, unsigned requestedSize) { + SOCKLEN_T sizeSize = sizeof requestedSize; + setsockopt(socket, SOL_SOCKET, bufOptName, (char*)&requestedSize, sizeSize); + + // Get and return the actual, resulting buffer size: + return getBufferSize(env, bufOptName, socket); +} +unsigned setSendBufferTo(UsageEnvironment& env, + int socket, unsigned requestedSize) { + return setBufferTo(env, SO_SNDBUF, socket, requestedSize); +} +unsigned setReceiveBufferTo(UsageEnvironment& env, + int socket, unsigned requestedSize) { + return setBufferTo(env, SO_RCVBUF, socket, requestedSize); +} + +static unsigned increaseBufferTo(UsageEnvironment& env, int bufOptName, + int socket, unsigned requestedSize) { + // First, get the current buffer size. If it's already at least + // as big as what we're requesting, do nothing. + unsigned curSize = getBufferSize(env, bufOptName, socket); + + // Next, try to increase the buffer to the requested size, + // or to some smaller size, if that's not possible: + while (requestedSize > curSize) { + SOCKLEN_T sizeSize = sizeof requestedSize; + if (setsockopt(socket, SOL_SOCKET, bufOptName, + (char*)&requestedSize, sizeSize) >= 0) { + // success + return requestedSize; + } + requestedSize = (requestedSize+curSize)/2; + } + + return getBufferSize(env, bufOptName, socket); +} +unsigned increaseSendBufferTo(UsageEnvironment& env, + int socket, unsigned requestedSize) { + return increaseBufferTo(env, SO_SNDBUF, socket, requestedSize); +} +unsigned increaseReceiveBufferTo(UsageEnvironment& env, + int socket, unsigned requestedSize) { + return increaseBufferTo(env, SO_RCVBUF, socket, requestedSize); +} + +static void clearMulticastAllSocketOption(int socket) { +#ifdef IP_MULTICAST_ALL + // This option is defined in modern versions of Linux to overcome a bug in the Linux kernel's default behavior. + // When set to 0, it ensures that we receive only packets that were sent to the specified IP multicast address, + // even if some other process on the same system has joined a different multicast group with the same port number. + int multicastAll = 0; + (void)setsockopt(socket, IPPROTO_IP, IP_MULTICAST_ALL, (void*)&multicastAll, sizeof multicastAll); + // Ignore the call's result. Should it fail, we'll still receive packets (just perhaps more than intended) +#endif +} + +Boolean socketJoinGroup(UsageEnvironment& env, int socket, + netAddressBits groupAddress){ + if (!IsMulticastAddress(groupAddress)) return True; // ignore this case + + struct ip_mreq imr; + imr.imr_multiaddr.s_addr = groupAddress; + imr.imr_interface.s_addr = ReceivingInterfaceAddr; + if (setsockopt(socket, IPPROTO_IP, IP_ADD_MEMBERSHIP, + (const char*)&imr, sizeof (struct ip_mreq)) < 0) { +#if defined(__WIN32__) || defined(_WIN32) + if (env.getErrno() != 0) { + // That piece-of-shit toy operating system (Windows) sometimes lies + // about setsockopt() failing! +#endif + socketErr(env, "setsockopt(IP_ADD_MEMBERSHIP) error: "); + return False; +#if defined(__WIN32__) || defined(_WIN32) + } +#endif + } + + clearMulticastAllSocketOption(socket); + + return True; +} + +Boolean socketLeaveGroup(UsageEnvironment&, int socket, + netAddressBits groupAddress) { + if (!IsMulticastAddress(groupAddress)) return True; // ignore this case + + struct ip_mreq imr; + imr.imr_multiaddr.s_addr = groupAddress; + imr.imr_interface.s_addr = ReceivingInterfaceAddr; + if (setsockopt(socket, IPPROTO_IP, IP_DROP_MEMBERSHIP, + (const char*)&imr, sizeof (struct ip_mreq)) < 0) { + return False; + } + + return True; +} + +// The source-specific join/leave operations require special setsockopt() +// commands, and a special structure (ip_mreq_source). If the include files +// didn't define these, we do so here: +#if !defined(IP_ADD_SOURCE_MEMBERSHIP) +struct ip_mreq_source { + struct in_addr imr_multiaddr; /* IP multicast address of group */ + struct in_addr imr_sourceaddr; /* IP address of source */ + struct in_addr imr_interface; /* local IP address of interface */ +}; +#endif + +#ifndef IP_ADD_SOURCE_MEMBERSHIP + +#ifdef LINUX +#define IP_ADD_SOURCE_MEMBERSHIP 39 +#define IP_DROP_SOURCE_MEMBERSHIP 40 +#else +#define IP_ADD_SOURCE_MEMBERSHIP 25 +#define IP_DROP_SOURCE_MEMBERSHIP 26 +#endif + +#endif + +Boolean socketJoinGroupSSM(UsageEnvironment& env, int socket, + netAddressBits groupAddress, + netAddressBits sourceFilterAddr) { + if (!IsMulticastAddress(groupAddress)) return True; // ignore this case + + struct ip_mreq_source imr; +#ifdef __ANDROID__ + imr.imr_multiaddr = groupAddress; + imr.imr_sourceaddr = sourceFilterAddr; + imr.imr_interface = ReceivingInterfaceAddr; +#else + imr.imr_multiaddr.s_addr = groupAddress; + imr.imr_sourceaddr.s_addr = sourceFilterAddr; + imr.imr_interface.s_addr = ReceivingInterfaceAddr; +#endif + if (setsockopt(socket, IPPROTO_IP, IP_ADD_SOURCE_MEMBERSHIP, + (const char*)&imr, sizeof (struct ip_mreq_source)) < 0) { + socketErr(env, "setsockopt(IP_ADD_SOURCE_MEMBERSHIP) error: "); + return False; + } + + clearMulticastAllSocketOption(socket); + + return True; +} + +Boolean socketLeaveGroupSSM(UsageEnvironment& /*env*/, int socket, + netAddressBits groupAddress, + netAddressBits sourceFilterAddr) { + if (!IsMulticastAddress(groupAddress)) return True; // ignore this case + + struct ip_mreq_source imr; +#ifdef __ANDROID__ + imr.imr_multiaddr = groupAddress; + imr.imr_sourceaddr = sourceFilterAddr; + imr.imr_interface = ReceivingInterfaceAddr; +#else + imr.imr_multiaddr.s_addr = groupAddress; + imr.imr_sourceaddr.s_addr = sourceFilterAddr; + imr.imr_interface.s_addr = ReceivingInterfaceAddr; +#endif + if (setsockopt(socket, IPPROTO_IP, IP_DROP_SOURCE_MEMBERSHIP, + (const char*)&imr, sizeof (struct ip_mreq_source)) < 0) { + return False; + } + + return True; +} + +static Boolean getSourcePort0(int socket, portNumBits& resultPortNum/*host order*/) { + sockaddr_in test; test.sin_port = 0; + SOCKLEN_T len = sizeof test; + if (getsockname(socket, (struct sockaddr*)&test, &len) < 0) return False; + + resultPortNum = ntohs(test.sin_port); + return True; +} + +Boolean getSourcePort(UsageEnvironment& env, int socket, Port& port) { + portNumBits portNum = 0; + if (!getSourcePort0(socket, portNum) || portNum == 0) { + // Hack - call bind(), then try again: + MAKE_SOCKADDR_IN(name, INADDR_ANY, 0); + bind(socket, (struct sockaddr*)&name, sizeof name); + + if (!getSourcePort0(socket, portNum) || portNum == 0) { + socketErr(env, "getsockname() error: "); + return False; + } + } + + port = Port(portNum); + return True; +} + +static Boolean badAddressForUs(netAddressBits addr) { + // Check for some possible erroneous addresses: + netAddressBits nAddr = htonl(addr); + return (nAddr == 0x7F000001 /* 127.0.0.1 */ + || nAddr == 0 + || nAddr == (netAddressBits)(~0)); +} + +Boolean loopbackWorks = 1; + +netAddressBits ourIPAddress(UsageEnvironment& env) { + static netAddressBits ourAddress = 0; + int sock = -1; + struct in_addr testAddr; + + if (ReceivingInterfaceAddr != INADDR_ANY) { + // Hack: If we were told to receive on a specific interface address, then + // define this to be our ip address: + ourAddress = ReceivingInterfaceAddr; + } + + if (ourAddress == 0) { + // We need to find our source address + struct sockaddr_in fromAddr; + fromAddr.sin_addr.s_addr = 0; + + // Get our address by sending a (0-TTL) multicast packet, + // receiving it, and looking at the source address used. + // (This is kinda bogus, but it provides the best guarantee + // that other nodes will think our address is the same as we do.) + do { + loopbackWorks = 0; // until we learn otherwise + + testAddr.s_addr = our_inet_addr("228.67.43.91"); // arbitrary + Port testPort(15947); // ditto + + sock = setupDatagramSocket(env, testPort); + if (sock < 0) break; + + if (!socketJoinGroup(env, sock, testAddr.s_addr)) break; + + unsigned char testString[] = "hostIdTest"; + unsigned testStringLength = sizeof testString; + + if (!writeSocket(env, sock, testAddr, testPort, 0, + testString, testStringLength)) break; + + // Block until the socket is readable (with a 5-second timeout): + fd_set rd_set; + FD_ZERO(&rd_set); + FD_SET((unsigned)sock, &rd_set); + const unsigned numFds = sock+1; + struct timeval timeout; + timeout.tv_sec = 5; + timeout.tv_usec = 0; + int result = select(numFds, &rd_set, NULL, NULL, &timeout); + if (result <= 0) break; + + unsigned char readBuffer[20]; + int bytesRead = readSocket(env, sock, + readBuffer, sizeof readBuffer, + fromAddr); + if (bytesRead != (int)testStringLength + || strncmp((char*)readBuffer, (char*)testString, testStringLength) != 0) { + break; + } + + // We use this packet's source address, if it's good: + loopbackWorks = !badAddressForUs(fromAddr.sin_addr.s_addr); + } while (0); + + if (sock >= 0) { + socketLeaveGroup(env, sock, testAddr.s_addr); + closeSocket(sock); + } + + if (!loopbackWorks) do { + // We couldn't find our address using multicast loopback, + // so try instead to look it up directly - by first getting our host name, and then resolving this host name + char hostname[100]; + hostname[0] = '\0'; + int result = gethostname(hostname, sizeof hostname); + if (result != 0 || hostname[0] == '\0') { + env.setResultErrMsg("initial gethostname() failed"); + break; + } + + // Try to resolve "hostname" to an IP address: + NetAddressList addresses(hostname); + NetAddressList::Iterator iter(addresses); + NetAddress const* address; + + // Take the first address that's not bad: + netAddressBits addr = 0; + while ((address = iter.nextAddress()) != NULL) { + netAddressBits a = *(netAddressBits*)(address->data()); + if (!badAddressForUs(a)) { + addr = a; + break; + } + } + + // Assign the address that we found to "fromAddr" (as if the 'loopback' method had worked), to simplify the code below: + fromAddr.sin_addr.s_addr = addr; + } while (0); + + // Make sure we have a good address: + netAddressBits from = fromAddr.sin_addr.s_addr; + if (badAddressForUs(from)) { + char tmp[100]; + sprintf(tmp, "This computer has an invalid IP address: %s", AddressString(from).val()); + env.setResultMsg(tmp); + from = 0; + } + + ourAddress = from; + + // Use our newly-discovered IP address, and the current time, + // to initialize the random number generator's seed: + struct timeval timeNow; + gettimeofday(&timeNow, NULL); + unsigned seed = ourAddress^timeNow.tv_sec^timeNow.tv_usec; + our_srandom(seed); + } + return ourAddress; +} + +netAddressBits chooseRandomIPv4SSMAddress(UsageEnvironment& env) { + // First, a hack to ensure that our random number generator is seeded: + (void) ourIPAddress(env); + + // Choose a random address in the range [232.0.1.0, 232.255.255.255) + // i.e., [0xE8000100, 0xE8FFFFFF) + netAddressBits const first = 0xE8000100, lastPlus1 = 0xE8FFFFFF; + netAddressBits const range = lastPlus1 - first; + + return ntohl(first + ((netAddressBits)our_random())%range); +} + +char const* timestampString() { + struct timeval tvNow; + gettimeofday(&tvNow, NULL); + +#if !defined(_WIN32_WCE) + static char timeString[9]; // holds hh:mm:ss plus trailing '\0' + char const* ctimeResult = ctime((time_t*)&tvNow.tv_sec); + if (ctimeResult == NULL) { + sprintf(timeString, "??:??:??"); + } else { + char const* from = &ctimeResult[11]; + int i; + for (i = 0; i < 8; ++i) { + timeString[i] = from[i]; + } + timeString[i] = '\0'; + } +#else + // WinCE apparently doesn't have "ctime()", so instead, construct + // a timestamp string just using the integer and fractional parts + // of "tvNow": + static char timeString[50]; + sprintf(timeString, "%lu.%06ld", tvNow.tv_sec, tvNow.tv_usec); +#endif + + return (char const*)&timeString; +} + +#if defined(__WIN32__) || defined(_WIN32) +// For Windoze, we need to implement our own gettimeofday() + +// used to make sure that static variables in gettimeofday() aren't initialized simultaneously by multiple threads +static LONG initializeLock_gettimeofday = 0; + +#if !defined(_WIN32_WCE) +#include +#endif + +int gettimeofday(struct timeval* tp, int* /*tz*/) { + static LARGE_INTEGER tickFrequency, epochOffset; + + static Boolean isInitialized = False; + + LARGE_INTEGER tickNow; + +#if !defined(_WIN32_WCE) + QueryPerformanceCounter(&tickNow); +#else + tickNow.QuadPart = GetTickCount(); +#endif + + if (!isInitialized) { + if(1 == InterlockedIncrement(&initializeLock_gettimeofday)) { +#if !defined(_WIN32_WCE) + // For our first call, use "ftime()", so that we get a time with a proper epoch. + // For subsequent calls, use "QueryPerformanceCount()", because it's more fine-grain. + struct timeb tb; + ftime(&tb); + tp->tv_sec = tb.time; + tp->tv_usec = 1000*tb.millitm; + + // Also get our counter frequency: + QueryPerformanceFrequency(&tickFrequency); +#else + /* FILETIME of Jan 1 1970 00:00:00. */ + const LONGLONG epoch = 116444736000000000LL; + FILETIME fileTime; + LARGE_INTEGER time; + GetSystemTimeAsFileTime(&fileTime); + + time.HighPart = fileTime.dwHighDateTime; + time.LowPart = fileTime.dwLowDateTime; + + // convert to from 100ns time to unix timestamp in seconds, 1000*1000*10 + tp->tv_sec = (long)((time.QuadPart - epoch) / 10000000L); + + /* + GetSystemTimeAsFileTime has just a seconds resolution, + thats why wince-version of gettimeofday is not 100% accurate, usec accuracy would be calculated like this: + // convert 100 nanoseconds to usec + tp->tv_usec= (long)((time.QuadPart - epoch)%10000000L) / 10L; + */ + tp->tv_usec = 0; + + // resolution of GetTickCounter() is always milliseconds + tickFrequency.QuadPart = 1000; +#endif + // compute an offset to add to subsequent counter times, so we get a proper epoch: + epochOffset.QuadPart + = tp->tv_sec * tickFrequency.QuadPart + (tp->tv_usec * tickFrequency.QuadPart) / 1000000L - tickNow.QuadPart; + + // next caller can use ticks for time calculation + isInitialized = True; + return 0; + } else { + InterlockedDecrement(&initializeLock_gettimeofday); + // wait until first caller has initialized static values + while(!isInitialized){ + Sleep(1); + } + } + } + + // adjust our tick count so that we get a proper epoch: + tickNow.QuadPart += epochOffset.QuadPart; + + tp->tv_sec = (long)(tickNow.QuadPart / tickFrequency.QuadPart); + tp->tv_usec = (long)(((tickNow.QuadPart % tickFrequency.QuadPart) * 1000000L) / tickFrequency.QuadPart); + + return 0; +} +#endif diff --git a/AnyCore/lib_rtsp/groupsock/IOHandlers.cpp b/AnyCore/lib_rtsp/groupsock/IOHandlers.cpp new file mode 100644 index 0000000..abd8664 --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/IOHandlers.cpp @@ -0,0 +1,46 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "mTunnel" multicast access service +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// IO event handlers +// Implementation + +#include "IOHandlers.hh" +#include "TunnelEncaps.hh" + +//##### TEMP: Use a single buffer, sized for UDP tunnels: +//##### This assumes that the I/O handlers are non-reentrant +static unsigned const maxPacketLength = 50*1024; // bytes + // This is usually overkill, because UDP packets are usually no larger + // than the typical Ethernet MTU (1500 bytes). However, I've seen + // reports of Windows Media Servers sending UDP packets as large as + // 27 kBytes. These will probably undego lots of IP-level + // fragmentation, but that occurs below us. We just have to hope that + // fragments don't get lost. +static unsigned const ioBufferSize + = maxPacketLength + TunnelEncapsulationTrailerMaxSize; +static unsigned char ioBuffer[ioBufferSize]; + + +void socketReadHandler(Socket* sock, int /*mask*/) { + unsigned bytesRead; + struct sockaddr_in fromAddress; + UsageEnvironment& saveEnv = sock->env(); + // because handleRead(), if it fails, may delete "sock" + if (!sock->handleRead(ioBuffer, ioBufferSize, bytesRead, fromAddress)) { + saveEnv.reportBackgroundError(); + } +} diff --git a/AnyCore/lib_rtsp/groupsock/Makefile b/AnyCore/lib_rtsp/groupsock/Makefile new file mode 100644 index 0000000..0f7febb --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/Makefile @@ -0,0 +1,83 @@ +INCLUDES = -Iinclude -I../UsageEnvironment/include +PREFIX = /usr/local +LIBDIR = $(PREFIX)/lib +##### Change the following for your environment: +#------------------------------------------------------------------------------------------------- +# +# Create BY zhang zhiwei 2014.07.03 +# sunfrank2012@gmail.com +# +#------------------------------------------------------------------------------------------------- +# NDK ROOT +ANDROID_NDK_ROOT=/cygdrive/c/Android/NDK/android-ndk-r8e/ +# For sysroot | arm-linux-androideabi-gcc can't find cygwin path +NDK_ROOT=c:/Android/NDK/android-ndk-r8e/ +# For x86 +PREBUILT=$(ANDROID_NDK_ROOT)/toolchains/arm-linux-androideabi-4.6/prebuilt/windows +# Fro x86_64 +#PREBUILT=$(ANDROID_NDK_ROOT)/toolchains/arm-linux-androideabi-4.6/prebuilt/windows-x86_64 +PLATFORM=android-14 + +CROSS_COMPILE= $(PREBUILT)/bin/arm-linux-androideabi- +COMPILE_OPTS = $(INCLUDES) -fPIC -DANDROID -std=c99 -mfpu=neon -mfloat-abi=softfp -I. -O2 -DXLOCALE_NOT_USED -DANDROID -DSOCKLEN_T=socklen_t -DNO_SSTREAM=1 -D_LARGEFILE_SOURCE=1 -D_FILE_OFFSET_BITS=64 -DNULL=0 --sysroot=${NDK_ROOT}/platforms/${PLATFORM}/arch-arm/ -I${ANDROID_NDK_ROOT}/platforms/${PLATFORM}/arch-arm/usr/include +C = c +C_COMPILER = $(CROSS_COMPILE)gcc +C_FLAGS = $(COMPILE_OPTS) +CPP = cpp +CPLUSPLUS_COMPILER = $(CROSS_COMPILE)g++ +CPLUSPLUS_FLAGS = $(COMPILE_OPTS) -Wall -DBSD=1 -fexceptions +OBJ = o +LINK = $(CROSS_COMPILE)g++ -o +LINK_OPTS = +CONSOLE_LINK_OPTS = $(LINK_OPTS) +LIBRARY_LINK = $(CROSS_COMPILE)ar cr +LIBRARY_LINK_OPTS = $(LINK_OPTS) +LIB_SUFFIX = a +LIBS_FOR_CONSOLE_APPLICATION =-lc -lm -lz -L./ -lsupc++ -fexceptions -L${ANDROID_NDK_ROOT}/platforms/${PLATFORM}/arch-arm/usr/lib -lstdc++ +LIBS_FOR_GUI_APPLICATION = +EXE = +##### End of variables to change + +NAME = libgroupsock +ALL = $(NAME).$(LIB_SUFFIX) +all: $(ALL) + +.$(C).$(OBJ): + $(C_COMPILER) -c $(C_FLAGS) $< +.$(CPP).$(OBJ): + $(CPLUSPLUS_COMPILER) -c $(CPLUSPLUS_FLAGS) $< + +GROUPSOCK_LIB_OBJS = GroupsockHelper.$(OBJ) GroupEId.$(OBJ) inet.$(OBJ) Groupsock.$(OBJ) NetInterface.$(OBJ) NetAddress.$(OBJ) IOHandlers.$(OBJ) + +GroupsockHelper.$(CPP): include/GroupsockHelper.hh +include/GroupsockHelper.hh: include/NetAddress.hh +include/NetAddress.hh: include/NetCommon.h +GroupEId.$(CPP): include/GroupEId.hh +include/GroupEId.hh: include/NetAddress.hh +inet.$(C): include/NetCommon.h +Groupsock.$(CPP): include/Groupsock.hh include/GroupsockHelper.hh include/TunnelEncaps.hh +include/Groupsock.hh: include/groupsock_version.hh include/NetInterface.hh include/GroupEId.hh +include/NetInterface.hh: include/NetAddress.hh +include/TunnelEncaps.hh: include/NetAddress.hh +NetInterface.$(CPP): include/NetInterface.hh include/GroupsockHelper.hh +NetAddress.$(CPP): include/NetAddress.hh include/GroupsockHelper.hh +IOHandlers.$(CPP): include/IOHandlers.hh include/TunnelEncaps.hh + +libgroupsock.$(LIB_SUFFIX): $(GROUPSOCK_LIB_OBJS) \ + $(PLATFORM_SPECIFIC_LIB_OBJS) + $(LIBRARY_LINK)$@ $(LIBRARY_LINK_OPTS) \ + $(GROUPSOCK_LIB_OBJS) + +clean: + -rm -rf *.$(OBJ) $(ALL) core *.core *~ include/*~ + +install: install1 $(INSTALL2) +install1: libgroupsock.$(LIB_SUFFIX) + install -d $(DESTDIR)$(PREFIX)/include/groupsock $(DESTDIR)$(LIBDIR) + install -m 644 include/*.hh include/*.h $(DESTDIR)$(PREFIX)/include/groupsock + install -m 644 libgroupsock.$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR) +install_shared_libraries: libgroupsock.$(LIB_SUFFIX) + ln -s libgroupsock.$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/libgroupsock.$(SHORT_LIB_SUFFIX) + ln -s libgroupsock.$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/libgroupsock.so + +##### Any additional, platform-specific rules come here: diff --git a/AnyCore/lib_rtsp/groupsock/Makefile.head b/AnyCore/lib_rtsp/groupsock/Makefile.head new file mode 100644 index 0000000..219f685 --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/Makefile.head @@ -0,0 +1,4 @@ +INCLUDES = -Iinclude -I../UsageEnvironment/include +PREFIX = /usr/local +LIBDIR = $(PREFIX)/lib +##### Change the following for your environment: diff --git a/AnyCore/lib_rtsp/groupsock/Makefile.tail b/AnyCore/lib_rtsp/groupsock/Makefile.tail new file mode 100644 index 0000000..23d98dc --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/Makefile.tail @@ -0,0 +1,45 @@ +##### End of variables to change + +NAME = libgroupsock +ALL = $(NAME).$(LIB_SUFFIX) +all: $(ALL) + +.$(C).$(OBJ): + $(C_COMPILER) -c $(C_FLAGS) $< +.$(CPP).$(OBJ): + $(CPLUSPLUS_COMPILER) -c $(CPLUSPLUS_FLAGS) $< + +GROUPSOCK_LIB_OBJS = GroupsockHelper.$(OBJ) GroupEId.$(OBJ) inet.$(OBJ) Groupsock.$(OBJ) NetInterface.$(OBJ) NetAddress.$(OBJ) IOHandlers.$(OBJ) + +GroupsockHelper.$(CPP): include/GroupsockHelper.hh +include/GroupsockHelper.hh: include/NetAddress.hh +include/NetAddress.hh: include/NetCommon.h +GroupEId.$(CPP): include/GroupEId.hh +include/GroupEId.hh: include/NetAddress.hh +inet.$(C): include/NetCommon.h +Groupsock.$(CPP): include/Groupsock.hh include/GroupsockHelper.hh include/TunnelEncaps.hh +include/Groupsock.hh: include/groupsock_version.hh include/NetInterface.hh include/GroupEId.hh +include/NetInterface.hh: include/NetAddress.hh +include/TunnelEncaps.hh: include/NetAddress.hh +NetInterface.$(CPP): include/NetInterface.hh include/GroupsockHelper.hh +NetAddress.$(CPP): include/NetAddress.hh include/GroupsockHelper.hh +IOHandlers.$(CPP): include/IOHandlers.hh include/TunnelEncaps.hh + +libgroupsock.$(LIB_SUFFIX): $(GROUPSOCK_LIB_OBJS) \ + $(PLATFORM_SPECIFIC_LIB_OBJS) + $(LIBRARY_LINK)$@ $(LIBRARY_LINK_OPTS) \ + $(GROUPSOCK_LIB_OBJS) + +clean: + -rm -rf *.$(OBJ) $(ALL) core *.core *~ include/*~ + +install: install1 $(INSTALL2) +install1: libgroupsock.$(LIB_SUFFIX) + install -d $(DESTDIR)$(PREFIX)/include/groupsock $(DESTDIR)$(LIBDIR) + install -m 644 include/*.hh include/*.h $(DESTDIR)$(PREFIX)/include/groupsock + install -m 644 libgroupsock.$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR) +install_shared_libraries: libgroupsock.$(LIB_SUFFIX) + ln -s libgroupsock.$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/libgroupsock.$(SHORT_LIB_SUFFIX) + ln -s libgroupsock.$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/libgroupsock.so + +##### Any additional, platform-specific rules come here: diff --git a/AnyCore/lib_rtsp/groupsock/NetAddress.cpp b/AnyCore/lib_rtsp/groupsock/NetAddress.cpp new file mode 100644 index 0000000..d250643 --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/NetAddress.cpp @@ -0,0 +1,312 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "mTunnel" multicast access service +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Network Addresses +// Implementation + +#include "NetAddress.hh" +#include "GroupsockHelper.hh" + +#include +#include +#if defined(__WIN32__) || defined(_WIN32) +#define USE_GETHOSTBYNAME 1 /*because at least some Windows don't have getaddrinfo()*/ +#else +#ifndef INADDR_NONE +#define INADDR_NONE 0xFFFFFFFF +#endif +#endif + +////////// NetAddress ////////// + +NetAddress::NetAddress(u_int8_t const* data, unsigned length) { + assign(data, length); +} + +NetAddress::NetAddress(unsigned length) { + fData = new u_int8_t[length]; + if (fData == NULL) { + fLength = 0; + return; + } + + for (unsigned i = 0; i < length; ++i) fData[i] = 0; + fLength = length; +} + +NetAddress::NetAddress(NetAddress const& orig) { + assign(orig.data(), orig.length()); +} + +NetAddress& NetAddress::operator=(NetAddress const& rightSide) { + if (&rightSide != this) { + clean(); + assign(rightSide.data(), rightSide.length()); + } + return *this; +} + +NetAddress::~NetAddress() { + clean(); +} + +void NetAddress::assign(u_int8_t const* data, unsigned length) { + fData = new u_int8_t[length]; + if (fData == NULL) { + fLength = 0; + return; + } + + for (unsigned i = 0; i < length; ++i) fData[i] = data[i]; + fLength = length; +} + +void NetAddress::clean() { + delete[] fData; fData = NULL; + fLength = 0; +} + + +////////// NetAddressList ////////// + +NetAddressList::NetAddressList(char const* hostname) + : fNumAddresses(0), fAddressArray(NULL) { + // First, check whether "hostname" is an IP address string: + netAddressBits addr = our_inet_addr((char*)hostname); + if (addr != INADDR_NONE) { + // Yes, it was an IP address string. Return a 1-element list with this address: + fNumAddresses = 1; + fAddressArray = new NetAddress*[fNumAddresses]; + if (fAddressArray == NULL) return; + + fAddressArray[0] = new NetAddress((u_int8_t*)&addr, sizeof (netAddressBits)); + return; + } + + // "hostname" is not an IP address string; try resolving it as a real host name instead: +#if defined(USE_GETHOSTBYNAME) || defined(VXWORKS) + struct hostent* host; +#if defined(VXWORKS) + char hostentBuf[512]; + + host = (struct hostent*)resolvGetHostByName((char*)hostname, (char*)&hostentBuf, sizeof hostentBuf); +#else + host = gethostbyname((char*)hostname); +#endif + if (host == NULL || host->h_length != 4 || host->h_addr_list == NULL) return; // no luck + + u_int8_t const** const hAddrPtr = (u_int8_t const**)host->h_addr_list; + // First, count the number of addresses: + u_int8_t const** hAddrPtr1 = hAddrPtr; + while (*hAddrPtr1 != NULL) { + ++fNumAddresses; + ++hAddrPtr1; + } + + // Next, set up the list: + fAddressArray = new NetAddress*[fNumAddresses]; + if (fAddressArray == NULL) return; + + for (unsigned i = 0; i < fNumAddresses; ++i) { + fAddressArray[i] = new NetAddress(hAddrPtr[i], host->h_length); + } +#else + // Use "getaddrinfo()" (rather than the older, deprecated "gethostbyname()"): + struct addrinfo addrinfoHints; + memset(&addrinfoHints, 0, sizeof addrinfoHints); + addrinfoHints.ai_family = AF_INET; // For now, we're interested in IPv4 addresses only + struct addrinfo* addrinfoResultPtr = NULL; + int result = getaddrinfo(hostname, NULL, &addrinfoHints, &addrinfoResultPtr); + if (result != 0 || addrinfoResultPtr == NULL) return; // no luck + + // First, count the number of addresses: + const struct addrinfo* p = addrinfoResultPtr; + while (p != NULL) { + if (p->ai_addrlen < 4) continue; // sanity check: skip over addresses that are too small + ++fNumAddresses; + p = p->ai_next; + } + + // Next, set up the list: + fAddressArray = new NetAddress*[fNumAddresses]; + if (fAddressArray == NULL) return; + + unsigned i = 0; + p = addrinfoResultPtr; + while (p != NULL) { + if (p->ai_addrlen < 4) continue; + fAddressArray[i++] = new NetAddress((u_int8_t const*)&(((struct sockaddr_in*)p->ai_addr)->sin_addr.s_addr), 4); + p = p->ai_next; + } + + // Finally, free the data that we had allocated by calling "getaddrinfo()": + freeaddrinfo(addrinfoResultPtr); +#endif +} + +NetAddressList::NetAddressList(NetAddressList const& orig) { + assign(orig.numAddresses(), orig.fAddressArray); +} + +NetAddressList& NetAddressList::operator=(NetAddressList const& rightSide) { + if (&rightSide != this) { + clean(); + assign(rightSide.numAddresses(), rightSide.fAddressArray); + } + return *this; +} + +NetAddressList::~NetAddressList() { + clean(); +} + +void NetAddressList::assign(unsigned numAddresses, NetAddress** addressArray) { + fAddressArray = new NetAddress*[numAddresses]; + if (fAddressArray == NULL) { + fNumAddresses = 0; + return; + } + + for (unsigned i = 0; i < numAddresses; ++i) { + fAddressArray[i] = new NetAddress(*addressArray[i]); + } + fNumAddresses = numAddresses; +} + +void NetAddressList::clean() { + while (fNumAddresses-- > 0) { + delete fAddressArray[fNumAddresses]; + } + delete[] fAddressArray; fAddressArray = NULL; +} + +NetAddress const* NetAddressList::firstAddress() const { + if (fNumAddresses == 0) return NULL; + + return fAddressArray[0]; +} + +////////// NetAddressList::Iterator ////////// +NetAddressList::Iterator::Iterator(NetAddressList const& addressList) + : fAddressList(addressList), fNextIndex(0) {} + +NetAddress const* NetAddressList::Iterator::nextAddress() { + if (fNextIndex >= fAddressList.numAddresses()) return NULL; // no more + return fAddressList.fAddressArray[fNextIndex++]; +} + + +////////// Port ////////// + +Port::Port(portNumBits num /* in host byte order */) { + fPortNum = htons(num); +} + +UsageEnvironment& operator<<(UsageEnvironment& s, const Port& p) { + return s << ntohs(p.num()); +} + + +////////// AddressPortLookupTable ////////// + +AddressPortLookupTable::AddressPortLookupTable() + : fTable(HashTable::create(3)) { // three-word keys are used +} + +AddressPortLookupTable::~AddressPortLookupTable() { + delete fTable; +} + +void* AddressPortLookupTable::Add(netAddressBits address1, + netAddressBits address2, + Port port, void* value) { + int key[3]; + key[0] = (int)address1; + key[1] = (int)address2; + key[2] = (int)port.num(); + return fTable->Add((char*)key, value); +} + +void* AddressPortLookupTable::Lookup(netAddressBits address1, + netAddressBits address2, + Port port) { + int key[3]; + key[0] = (int)address1; + key[1] = (int)address2; + key[2] = (int)port.num(); + return fTable->Lookup((char*)key); +} + +Boolean AddressPortLookupTable::Remove(netAddressBits address1, + netAddressBits address2, + Port port) { + int key[3]; + key[0] = (int)address1; + key[1] = (int)address2; + key[2] = (int)port.num(); + return fTable->Remove((char*)key); +} + +AddressPortLookupTable::Iterator::Iterator(AddressPortLookupTable& table) + : fIter(HashTable::Iterator::create(*(table.fTable))) { +} + +AddressPortLookupTable::Iterator::~Iterator() { + delete fIter; +} + +void* AddressPortLookupTable::Iterator::next() { + char const* key; // dummy + return fIter->next(key); +} + + +////////// isMulticastAddress() implementation ////////// + +Boolean IsMulticastAddress(netAddressBits address) { + // Note: We return False for addresses in the range 224.0.0.0 + // through 224.0.0.255, because these are non-routable + // Note: IPv4-specific ##### + netAddressBits addressInNetworkOrder = htonl(address); + return addressInNetworkOrder > 0xE00000FF && + addressInNetworkOrder <= 0xEFFFFFFF; +} + + +////////// AddressString implementation ////////// + +AddressString::AddressString(struct sockaddr_in const& addr) { + init(addr.sin_addr.s_addr); +} + +AddressString::AddressString(struct in_addr const& addr) { + init(addr.s_addr); +} + +AddressString::AddressString(netAddressBits addr) { + init(addr); +} + +void AddressString::init(netAddressBits addr) { + fVal = new char[16]; // large enough for "abc.def.ghi.jkl" + netAddressBits addrNBO = htonl(addr); // make sure we have a value in a known byte order: big endian + sprintf(fVal, "%u.%u.%u.%u", (addrNBO>>24)&0xFF, (addrNBO>>16)&0xFF, (addrNBO>>8)&0xFF, addrNBO&0xFF); +} + +AddressString::~AddressString() { + delete[] fVal; +} diff --git a/AnyCore/lib_rtsp/groupsock/NetInterface.cpp b/AnyCore/lib_rtsp/groupsock/NetInterface.cpp new file mode 100644 index 0000000..a1d4592 --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/NetInterface.cpp @@ -0,0 +1,174 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "mTunnel" multicast access service +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Network Interfaces +// Implementation + +#include "NetInterface.hh" +#include "GroupsockHelper.hh" + +#ifndef NO_SSTREAM +#include +#endif +#include + +////////// NetInterface ////////// + +UsageEnvironment* NetInterface::DefaultUsageEnvironment = NULL; + +NetInterface::NetInterface() { +} + +NetInterface::~NetInterface() { +} + + +////////// NetInterface ////////// + +DirectedNetInterface::DirectedNetInterface() { +} + +DirectedNetInterface::~DirectedNetInterface() { +} + + +////////// DirectedNetInterfaceSet ////////// + +DirectedNetInterfaceSet::DirectedNetInterfaceSet() + : fTable(HashTable::create(ONE_WORD_HASH_KEYS)) { +} + +DirectedNetInterfaceSet::~DirectedNetInterfaceSet() { + delete fTable; +} + +DirectedNetInterface* +DirectedNetInterfaceSet::Add(DirectedNetInterface const* interf) { + return (DirectedNetInterface*) fTable->Add((char*)interf, (void*)interf); +} + +Boolean +DirectedNetInterfaceSet::Remove(DirectedNetInterface const* interf) { + return fTable->Remove((char*)interf); +} + +DirectedNetInterfaceSet::Iterator:: +Iterator(DirectedNetInterfaceSet& interfaces) + : fIter(HashTable::Iterator::create(*(interfaces.fTable))) { +} + +DirectedNetInterfaceSet::Iterator::~Iterator() { + delete fIter; +} + +DirectedNetInterface* DirectedNetInterfaceSet::Iterator::next() { + char const* key; // dummy + return (DirectedNetInterface*) fIter->next(key); +}; + + +////////// Socket ////////// + +int Socket::DebugLevel = 1; // default value + +Socket::Socket(UsageEnvironment& env, Port port) + : fEnv(DefaultUsageEnvironment != NULL ? *DefaultUsageEnvironment : env), fPort(port) { + fSocketNum = setupDatagramSocket(fEnv, port); +} + +void Socket::reset() { + closeSocket(fSocketNum); + fSocketNum = -1; +} + +Socket::~Socket() { + reset(); +} + +Boolean Socket::changePort(Port newPort) { + int oldSocketNum = fSocketNum; + unsigned oldReceiveBufferSize = getReceiveBufferSize(fEnv, fSocketNum); + unsigned oldSendBufferSize = getSendBufferSize(fEnv, fSocketNum); + closeSocket(fSocketNum); + + fSocketNum = setupDatagramSocket(fEnv, newPort); + if (fSocketNum < 0) { + fEnv.taskScheduler().turnOffBackgroundReadHandling(oldSocketNum); + return False; + } + + setReceiveBufferTo(fEnv, fSocketNum, oldReceiveBufferSize); + setSendBufferTo(fEnv, fSocketNum, oldSendBufferSize); + if (fSocketNum != oldSocketNum) { // the socket number has changed, so move any event handling for it: + fEnv.taskScheduler().moveSocketHandling(oldSocketNum, fSocketNum); + } + return True; +} + +UsageEnvironment& operator<<(UsageEnvironment& s, const Socket& sock) { + return s << timestampString() << " Socket(" << sock.socketNum() << ")"; +} + +////////// SocketLookupTable ////////// + +SocketLookupTable::SocketLookupTable() + : fTable(HashTable::create(ONE_WORD_HASH_KEYS)) { +} + +SocketLookupTable::~SocketLookupTable() { + delete fTable; +} + +Socket* SocketLookupTable::Fetch(UsageEnvironment& env, Port port, + Boolean& isNew) { + isNew = False; + Socket* sock; + do { + sock = (Socket*) fTable->Lookup((char*)(long)(port.num())); + if (sock == NULL) { // we need to create one: + sock = CreateNew(env, port); + if (sock == NULL || sock->socketNum() < 0) break; + + fTable->Add((char*)(long)(port.num()), (void*)sock); + isNew = True; + } + + return sock; + } while (0); + + delete sock; + return NULL; +} + +Boolean SocketLookupTable::Remove(Socket const* sock) { + return fTable->Remove( (char*)(long)(sock->port().num()) ); +} + +////////// NetInterfaceTrafficStats ////////// + +NetInterfaceTrafficStats::NetInterfaceTrafficStats() { + fTotNumPackets = fTotNumBytes = 0.0; +} + +void NetInterfaceTrafficStats::countPacket(unsigned packetSize) { + fTotNumPackets += 1.0; + fTotNumBytes += packetSize; +} + +Boolean NetInterfaceTrafficStats::haveSeenTraffic() const { + return fTotNumPackets != 0.0; +} diff --git a/AnyCore/lib_rtsp/groupsock/groupsock.mak b/AnyCore/lib_rtsp/groupsock/groupsock.mak new file mode 100644 index 0000000..6d0006d --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/groupsock.mak @@ -0,0 +1,96 @@ +INCLUDES = -Iinclude -I../UsageEnvironment/include +PREFIX = /usr/local +LIBDIR = $(PREFIX)/lib +##### Change the following for your environment: +# Comment out the following line to produce Makefiles that generate debuggable code: +NODEBUG=1 + +# The following definition ensures that we are properly matching +# the WinSock2 library file with the correct header files. +# (will link with "ws2_32.lib" and include "winsock2.h" & "Ws2tcpip.h") +TARGETOS = WINNT + +# If for some reason you wish to use WinSock1 instead, uncomment the +# following two definitions. +# (will link with "wsock32.lib" and include "winsock.h") +#TARGETOS = WIN95 +#APPVER = 4.0 + +!include + +UI_OPTS = $(guilflags) $(guilibsdll) +# Use the following to get a console (e.g., for debugging): +CONSOLE_UI_OPTS = $(conlflags) $(conlibsdll) +CPU=i386 + +TOOLS32 = c:\Program Files\DevStudio\Vc +COMPILE_OPTS = $(INCLUDES) $(cdebug) $(cflags) $(cvarsdll) -I. -I"$(TOOLS32)\include" +C = c +C_COMPILER = "$(TOOLS32)\bin\cl" +C_FLAGS = $(COMPILE_OPTS) +CPP = cpp +CPLUSPLUS_COMPILER = $(C_COMPILER) +CPLUSPLUS_FLAGS = $(COMPILE_OPTS) +OBJ = obj +LINK = $(link) -out: +LIBRARY_LINK = lib -out: +LINK_OPTS_0 = $(linkdebug) msvcirt.lib +LIBRARY_LINK_OPTS = +LINK_OPTS = $(LINK_OPTS_0) $(UI_OPTS) +CONSOLE_LINK_OPTS = $(LINK_OPTS_0) $(CONSOLE_UI_OPTS) +SERVICE_LINK_OPTS = kernel32.lib advapi32.lib shell32.lib -subsystem:console,$(APPVER) +LIB_SUFFIX = lib +LIBS_FOR_CONSOLE_APPLICATION = +LIBS_FOR_GUI_APPLICATION = +MULTIMEDIA_LIBS = winmm.lib +EXE = .exe +PLATFORM = Windows + +rc32 = "$(TOOLS32)\bin\rc" +.rc.res: + $(rc32) $< +##### End of variables to change + +NAME = libgroupsock +ALL = $(NAME).$(LIB_SUFFIX) +all: $(ALL) + +.$(C).$(OBJ): + $(C_COMPILER) -c $(C_FLAGS) $< +.$(CPP).$(OBJ): + $(CPLUSPLUS_COMPILER) -c $(CPLUSPLUS_FLAGS) $< + +GROUPSOCK_LIB_OBJS = GroupsockHelper.$(OBJ) GroupEId.$(OBJ) inet.$(OBJ) Groupsock.$(OBJ) NetInterface.$(OBJ) NetAddress.$(OBJ) IOHandlers.$(OBJ) + +GroupsockHelper.$(CPP): include/GroupsockHelper.hh +include/GroupsockHelper.hh: include/NetAddress.hh +include/NetAddress.hh: include/NetCommon.h +GroupEId.$(CPP): include/GroupEId.hh +include/GroupEId.hh: include/NetAddress.hh +inet.$(C): include/NetCommon.h +Groupsock.$(CPP): include/Groupsock.hh include/GroupsockHelper.hh include/TunnelEncaps.hh +include/Groupsock.hh: include/groupsock_version.hh include/NetInterface.hh include/GroupEId.hh +include/NetInterface.hh: include/NetAddress.hh +include/TunnelEncaps.hh: include/NetAddress.hh +NetInterface.$(CPP): include/NetInterface.hh include/GroupsockHelper.hh +NetAddress.$(CPP): include/NetAddress.hh include/GroupsockHelper.hh +IOHandlers.$(CPP): include/IOHandlers.hh include/TunnelEncaps.hh + +libgroupsock.$(LIB_SUFFIX): $(GROUPSOCK_LIB_OBJS) \ + $(PLATFORM_SPECIFIC_LIB_OBJS) + $(LIBRARY_LINK)$@ $(LIBRARY_LINK_OPTS) \ + $(GROUPSOCK_LIB_OBJS) + +clean: + -rm -rf *.$(OBJ) $(ALL) core *.core *~ include/*~ + +install: install1 $(INSTALL2) +install1: libgroupsock.$(LIB_SUFFIX) + install -d $(DESTDIR)$(PREFIX)/include/groupsock $(DESTDIR)$(LIBDIR) + install -m 644 include/*.hh include/*.h $(DESTDIR)$(PREFIX)/include/groupsock + install -m 644 libgroupsock.$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR) +install_shared_libraries: libgroupsock.$(LIB_SUFFIX) + ln -s libgroupsock.$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/libgroupsock.$(SHORT_LIB_SUFFIX) + ln -s libgroupsock.$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/libgroupsock.so + +##### Any additional, platform-specific rules come here: diff --git a/AnyCore/lib_rtsp/groupsock/groupsock.vcproj b/AnyCore/lib_rtsp/groupsock/groupsock.vcproj new file mode 100644 index 0000000..c5d7709 --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/groupsock.vcproj @@ -0,0 +1,239 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/AnyCore/lib_rtsp/groupsock/groupsock.vcproj.Eric-PC.Eric.user b/AnyCore/lib_rtsp/groupsock/groupsock.vcproj.Eric-PC.Eric.user new file mode 100644 index 0000000..c3c730f --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/groupsock.vcproj.Eric-PC.Eric.user @@ -0,0 +1,65 @@ + + + + + + + + + + + diff --git a/AnyCore/lib_rtsp/groupsock/groupsock.vcxproj b/AnyCore/lib_rtsp/groupsock/groupsock.vcxproj new file mode 100644 index 0000000..20dd3a3 --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/groupsock.vcxproj @@ -0,0 +1,109 @@ +锘 + + + + Debug + Win32 + + + Release + Win32 + + + + {141CEBF6-A03B-469E-966E-231B416F4EAF} + groupsock + Win32Proj + + + + StaticLibrary + v140 + Unicode + true + + + StaticLibrary + v140 + Unicode + + + + + + + + + + + + + <_ProjectFileVersion>14.0.25431.1 + + + $(SolutionDir)$(Configuration)\ + $(Configuration)\ + + + $(SolutionDir)$(Configuration)\ + $(Configuration)\ + + + + Disabled + ./include;../UsageEnvironment/include;%(AdditionalIncludeDirectories) + WIN32;_DEBUG;_LIB;%(PreprocessorDefinitions) + true + EnableFastChecks + MultiThreadedDebugDLL + + Level3 + EditAndContinue + + + + + + + + MaxSpeed + true + ./include;../UsageEnvironment/include;%(AdditionalIncludeDirectories) + WIN32;NDEBUG;_LIB;%(PreprocessorDefinitions) + MultiThreadedDLL + true + + Level3 + ProgramDatabase + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/AnyCore/lib_rtsp/groupsock/groupsock.vcxproj.filters b/AnyCore/lib_rtsp/groupsock/groupsock.vcxproj.filters new file mode 100644 index 0000000..2aed5c9 --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/groupsock.vcxproj.filters @@ -0,0 +1,72 @@ +锘 + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + {93995380-89BD-4b04-88EB-625FBE52EBFB} + h;hpp;hxx;hm;inl;inc;xsd + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav + + + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + + + + \ No newline at end of file diff --git a/AnyCore/lib_rtsp/groupsock/include/GroupEId.hh b/AnyCore/lib_rtsp/groupsock/include/GroupEId.hh new file mode 100644 index 0000000..c279b9e --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/include/GroupEId.hh @@ -0,0 +1,98 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "multikit" Multicast Application Shell +// Copyright (c) 1996-2014, Live Networks, Inc. All rights reserved +// "Group Endpoint Id" +// C++ header + +#ifndef _GROUPEID_HH +#define _GROUPEID_HH + +#ifndef _BOOLEAN_HH +#include "Boolean.hh" +#endif + +#ifndef _NET_ADDRESS_HH +#include "NetAddress.hh" +#endif + +const u_int8_t MAX_TTL = 255; + +class Scope { + public: + Scope(u_int8_t ttl = 0, const char* publicKey = NULL); + Scope(const Scope& orig); + Scope& operator=(const Scope& rightSide); + ~Scope(); + + u_int8_t ttl() const + { return fTTL; } + + const char* publicKey() const + { return fPublicKey; } + unsigned publicKeySize() const; + + private: + void assign(u_int8_t ttl, const char* publicKey); + void clean(); + + u_int8_t fTTL; + char* fPublicKey; +}; + +class GroupEId { +public: + GroupEId(struct in_addr const& groupAddr, + portNumBits portNum, Scope const& scope, + unsigned numSuccessiveGroupAddrs = 1); + // used for a 'source-independent multicast' group + GroupEId(struct in_addr const& groupAddr, + struct in_addr const& sourceFilterAddr, + portNumBits portNum, + unsigned numSuccessiveGroupAddrs = 1); + // used for a 'source-specific multicast' group + GroupEId(); // used only as a temp constructor prior to initialization + + struct in_addr const& groupAddress() const { return fGroupAddress; } + struct in_addr const& sourceFilterAddress() const { return fSourceFilterAddress; } + + Boolean isSSM() const; + + unsigned numSuccessiveGroupAddrs() const { + // could be >1 for hier encoding + return fNumSuccessiveGroupAddrs; + } + + portNumBits portNum() const { return fPortNum; } + + const Scope& scope() const { return fScope; } + +private: + void init(struct in_addr const& groupAddr, + struct in_addr const& sourceFilterAddr, + portNumBits portNum, + Scope const& scope, + unsigned numSuccessiveGroupAddrs); + +private: + struct in_addr fGroupAddress; + struct in_addr fSourceFilterAddress; + unsigned fNumSuccessiveGroupAddrs; + portNumBits fPortNum; + Scope fScope; +}; + +#endif diff --git a/AnyCore/lib_rtsp/groupsock/include/Groupsock.hh b/AnyCore/lib_rtsp/groupsock/include/Groupsock.hh new file mode 100644 index 0000000..01ea53c --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/include/Groupsock.hh @@ -0,0 +1,203 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "mTunnel" multicast access service +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// 'Group sockets' +// C++ header + +#ifndef _GROUPSOCK_HH +#define _GROUPSOCK_HH + +#ifndef _GROUPSOCK_VERSION_HH +#include "groupsock_version.hh" +#endif + +#ifndef _NET_INTERFACE_HH +#include "NetInterface.hh" +#endif + +#ifndef _GROUPEID_HH +#include "GroupEId.hh" +#endif + +// An "OutputSocket" is (by default) used only to send packets. +// No packets are received on it (unless a subclass arranges this) + +class OutputSocket: public Socket { +public: + OutputSocket(UsageEnvironment& env); + virtual ~OutputSocket(); + + Boolean write(netAddressBits address, Port port, u_int8_t ttl, + unsigned char* buffer, unsigned bufferSize); + +protected: + OutputSocket(UsageEnvironment& env, Port port); + + portNumBits sourcePortNum() const {return fSourcePort.num();} + +private: // redefined virtual function + virtual Boolean handleRead(unsigned char* buffer, unsigned bufferMaxSize, + unsigned& bytesRead, + struct sockaddr_in& fromAddress); + +private: + Port fSourcePort; + unsigned fLastSentTTL; +}; + +class destRecord { +public: + destRecord(struct in_addr const& addr, Port const& port, u_int8_t ttl, + destRecord* next); + virtual ~destRecord(); + +public: + destRecord* fNext; + GroupEId fGroupEId; + Port fPort; +}; + +// A "Groupsock" is used to both send and receive packets. +// As the name suggests, it was originally designed to send/receive +// multicast, but it can send/receive unicast as well. + +class Groupsock: public OutputSocket { +public: + Groupsock(UsageEnvironment& env, struct in_addr const& groupAddr, + Port port, u_int8_t ttl); + // used for a 'source-independent multicast' group + Groupsock(UsageEnvironment& env, struct in_addr const& groupAddr, + struct in_addr const& sourceFilterAddr, + Port port); + // used for a 'source-specific multicast' group + virtual ~Groupsock(); + + void changeDestinationParameters(struct in_addr const& newDestAddr, + Port newDestPort, int newDestTTL); + // By default, the destination address, port and ttl for + // outgoing packets are those that were specified in + // the constructor. This works OK for multicast sockets, + // but for unicast we usually want the destination port + // number, at least, to be different from the source port. + // (If a parameter is 0 (or ~0 for ttl), then no change made.) + + // As a special case, we also allow multiple destinations (addresses & ports) + // (This can be used to implement multi-unicast.) + void addDestination(struct in_addr const& addr, Port const& port); + void removeDestination(struct in_addr const& addr, Port const& port); + void removeAllDestinations(); + + struct in_addr const& groupAddress() const { + return fIncomingGroupEId.groupAddress(); + } + struct in_addr const& sourceFilterAddress() const { + return fIncomingGroupEId.sourceFilterAddress(); + } + + Boolean isSSM() const { + return fIncomingGroupEId.isSSM(); + } + + u_int8_t ttl() const { return fTTL; } + + void multicastSendOnly(); // send, but don't receive any multicast packets + + Boolean output(UsageEnvironment& env, u_int8_t ttl, + unsigned char* buffer, unsigned bufferSize, + DirectedNetInterface* interfaceNotToFwdBackTo = NULL); + + DirectedNetInterfaceSet& members() { return fMembers; } + + Boolean deleteIfNoMembers; + Boolean isSlave; // for tunneling + + static NetInterfaceTrafficStats statsIncoming; + static NetInterfaceTrafficStats statsOutgoing; + static NetInterfaceTrafficStats statsRelayedIncoming; + static NetInterfaceTrafficStats statsRelayedOutgoing; + NetInterfaceTrafficStats statsGroupIncoming; // *not* static + NetInterfaceTrafficStats statsGroupOutgoing; // *not* static + NetInterfaceTrafficStats statsGroupRelayedIncoming; // *not* static + NetInterfaceTrafficStats statsGroupRelayedOutgoing; // *not* static + + Boolean wasLoopedBackFromUs(UsageEnvironment& env, + struct sockaddr_in& fromAddress); + +public: // redefined virtual functions + virtual Boolean handleRead(unsigned char* buffer, unsigned bufferMaxSize, + unsigned& bytesRead, + struct sockaddr_in& fromAddress); + +private: + int outputToAllMembersExcept(DirectedNetInterface* exceptInterface, + u_int8_t ttlToFwd, + unsigned char* data, unsigned size, + netAddressBits sourceAddr); + +private: + GroupEId fIncomingGroupEId; + destRecord* fDests; + u_int8_t fTTL; + DirectedNetInterfaceSet fMembers; +}; + +UsageEnvironment& operator<<(UsageEnvironment& s, const Groupsock& g); + +// A data structure for looking up a 'groupsock' +// by (multicast address, port), or by socket number +class GroupsockLookupTable { +public: + Groupsock* Fetch(UsageEnvironment& env, netAddressBits groupAddress, + Port port, u_int8_t ttl, Boolean& isNew); + // Creates a new Groupsock if none already exists + Groupsock* Fetch(UsageEnvironment& env, netAddressBits groupAddress, + netAddressBits sourceFilterAddr, + Port port, Boolean& isNew); + // Creates a new Groupsock if none already exists + Groupsock* Lookup(netAddressBits groupAddress, Port port); + // Returns NULL if none already exists + Groupsock* Lookup(netAddressBits groupAddress, + netAddressBits sourceFilterAddr, + Port port); + // Returns NULL if none already exists + Groupsock* Lookup(UsageEnvironment& env, int sock); + // Returns NULL if none already exists + Boolean Remove(Groupsock const* groupsock); + + // Used to iterate through the groupsocks in the table + class Iterator { + public: + Iterator(GroupsockLookupTable& groupsocks); + + Groupsock* next(); // NULL iff none + + private: + AddressPortLookupTable::Iterator fIter; + }; + +private: + Groupsock* AddNew(UsageEnvironment& env, + netAddressBits groupAddress, + netAddressBits sourceFilterAddress, + Port port, u_int8_t ttl); + +private: + friend class Iterator; + AddressPortLookupTable fTable; +}; + +#endif diff --git a/AnyCore/lib_rtsp/groupsock/include/GroupsockHelper.hh b/AnyCore/lib_rtsp/groupsock/include/GroupsockHelper.hh new file mode 100644 index 0000000..93b9a1a --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/include/GroupsockHelper.hh @@ -0,0 +1,142 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "mTunnel" multicast access service +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Helper routines to implement 'group sockets' +// C++ header + +#ifndef _GROUPSOCK_HELPER_HH +#define _GROUPSOCK_HELPER_HH + +#ifndef _NET_ADDRESS_HH +#include "NetAddress.hh" +#endif + +int setupDatagramSocket(UsageEnvironment& env, Port port); +int setupStreamSocket(UsageEnvironment& env, + Port port, Boolean makeNonBlocking = True); + +int readSocket(UsageEnvironment& env, + int socket, unsigned char* buffer, unsigned bufferSize, + struct sockaddr_in& fromAddress); + +Boolean writeSocket(UsageEnvironment& env, + int socket, struct in_addr address, Port port, + u_int8_t ttlArg, + unsigned char* buffer, unsigned bufferSize); + +Boolean writeSocket(UsageEnvironment& env, + int socket, struct in_addr address, Port port, + unsigned char* buffer, unsigned bufferSize); + // An optimized version of "writeSocket" that omits the "setsockopt()" call to set the TTL. + +unsigned getSendBufferSize(UsageEnvironment& env, int socket); +unsigned getReceiveBufferSize(UsageEnvironment& env, int socket); +unsigned setSendBufferTo(UsageEnvironment& env, + int socket, unsigned requestedSize); +unsigned setReceiveBufferTo(UsageEnvironment& env, + int socket, unsigned requestedSize); +unsigned increaseSendBufferTo(UsageEnvironment& env, + int socket, unsigned requestedSize); +unsigned increaseReceiveBufferTo(UsageEnvironment& env, + int socket, unsigned requestedSize); + +Boolean makeSocketNonBlocking(int sock); +Boolean makeSocketBlocking(int sock, unsigned writeTimeoutInMilliseconds = 0); + // A "writeTimeoutInMilliseconds" value of 0 means: Don't timeout + +Boolean socketJoinGroup(UsageEnvironment& env, int socket, + netAddressBits groupAddress); +Boolean socketLeaveGroup(UsageEnvironment&, int socket, + netAddressBits groupAddress); + +// source-specific multicast join/leave +Boolean socketJoinGroupSSM(UsageEnvironment& env, int socket, + netAddressBits groupAddress, + netAddressBits sourceFilterAddr); +Boolean socketLeaveGroupSSM(UsageEnvironment&, int socket, + netAddressBits groupAddress, + netAddressBits sourceFilterAddr); + +Boolean getSourcePort(UsageEnvironment& env, int socket, Port& port); + +netAddressBits ourIPAddress(UsageEnvironment& env); // in network order + +// IP addresses of our sending and receiving interfaces. (By default, these +// are INADDR_ANY (i.e., 0), specifying the default interface.) +extern netAddressBits SendingInterfaceAddr; +extern netAddressBits ReceivingInterfaceAddr; + +// Allocates a randomly-chosen IPv4 SSM (multicast) address: +netAddressBits chooseRandomIPv4SSMAddress(UsageEnvironment& env); + +// Returns a simple "hh:mm:ss" string, for use in debugging output (e.g.) +char const* timestampString(); + + +#ifdef HAVE_SOCKADDR_LEN +#define SET_SOCKADDR_SIN_LEN(var) var.sin_len = sizeof var +#else +#define SET_SOCKADDR_SIN_LEN(var) +#endif + +#define MAKE_SOCKADDR_IN(var,adr,prt) /*adr,prt must be in network order*/\ + struct sockaddr_in var;\ + var.sin_family = AF_INET;\ + var.sin_addr.s_addr = (adr);\ + var.sin_port = (prt);\ + SET_SOCKADDR_SIN_LEN(var); + + +// By default, we create sockets with the SO_REUSE_* flag set. +// If, instead, you want to create sockets without the SO_REUSE_* flags, +// Then enclose the creation code with: +// { +// NoReuse dummy; +// ... +// } +class NoReuse { +public: + NoReuse(UsageEnvironment& env); + ~NoReuse(); + +private: + UsageEnvironment& fEnv; +}; + + +// Define the "UsageEnvironment"-specific "groupsockPriv" structure: + +struct _groupsockPriv { // There should be only one of these allocated + HashTable* socketTable; + int reuseFlag; +}; +_groupsockPriv* groupsockPriv(UsageEnvironment& env); // allocates it if necessary +void reclaimGroupsockPriv(UsageEnvironment& env); + + +#if defined(__WIN32__) || defined(_WIN32) +// For Windoze, we need to implement our own gettimeofday() +extern int gettimeofday(struct timeval*, int*); +#endif + +// The following are implemented in inet.c: +extern "C" netAddressBits our_inet_addr(char const*); +extern "C" void our_srandom(int x); +extern "C" long our_random(); +extern "C" u_int32_t our_random32(); // because "our_random()" returns a 31-bit number + +#endif diff --git a/AnyCore/lib_rtsp/groupsock/include/IOHandlers.hh b/AnyCore/lib_rtsp/groupsock/include/IOHandlers.hh new file mode 100644 index 0000000..dd00ea3 --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/include/IOHandlers.hh @@ -0,0 +1,31 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "mTunnel" multicast access service +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// IO event handlers +// C++ header + +#ifndef _IO_HANDLERS_HH +#define _IO_HANDLERS_HH + +#ifndef _NET_INTERFACE_HH +#include "NetInterface.hh" +#endif + +// Handles incoming data on sockets: +void socketReadHandler(Socket* sock, int mask); + +#endif diff --git a/AnyCore/lib_rtsp/groupsock/include/NetAddress.hh b/AnyCore/lib_rtsp/groupsock/include/NetAddress.hh new file mode 100644 index 0000000..70380c9 --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/include/NetAddress.hh @@ -0,0 +1,162 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "mTunnel" multicast access service +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Network Addresses +// C++ header + +#ifndef _NET_ADDRESS_HH +#define _NET_ADDRESS_HH + +#ifndef _HASH_TABLE_HH +#include "HashTable.hh" +#endif + +#ifndef _NET_COMMON_H +#include "NetCommon.h" +#endif + +#ifndef _USAGE_ENVIRONMENT_HH +#include "UsageEnvironment.hh" +#endif + +// Definition of a type representing a low-level network address. +// At present, this is 32-bits, for IPv4. Later, generalize it, +// to allow for IPv6. +typedef u_int32_t netAddressBits; + +class NetAddress { +public: + NetAddress(u_int8_t const* data, + unsigned length = 4 /* default: 32 bits */); + NetAddress(unsigned length = 4); // sets address data to all-zeros + NetAddress(NetAddress const& orig); + NetAddress& operator=(NetAddress const& rightSide); + virtual ~NetAddress(); + + unsigned length() const { return fLength; } + u_int8_t const* data() const // always in network byte order + { return fData; } + +private: + void assign(u_int8_t const* data, unsigned length); + void clean(); + + unsigned fLength; + u_int8_t* fData; +}; + +class NetAddressList { +public: + NetAddressList(char const* hostname); + NetAddressList(NetAddressList const& orig); + NetAddressList& operator=(NetAddressList const& rightSide); + virtual ~NetAddressList(); + + unsigned numAddresses() const { return fNumAddresses; } + + NetAddress const* firstAddress() const; + + // Used to iterate through the addresses in a list: + class Iterator { + public: + Iterator(NetAddressList const& addressList); + NetAddress const* nextAddress(); // NULL iff none + private: + NetAddressList const& fAddressList; + unsigned fNextIndex; + }; + +private: + void assign(netAddressBits numAddresses, NetAddress** addressArray); + void clean(); + + friend class Iterator; + unsigned fNumAddresses; + NetAddress** fAddressArray; +}; + +typedef u_int16_t portNumBits; + +class Port { +public: + Port(portNumBits num /* in host byte order */); + + portNumBits num() const { return fPortNum; } // in network byte order + +private: + portNumBits fPortNum; // stored in network byte order +#ifdef IRIX + portNumBits filler; // hack to overcome a bug in IRIX C++ compiler +#endif +}; + +UsageEnvironment& operator<<(UsageEnvironment& s, const Port& p); + + +// A generic table for looking up objects by (address1, address2, port) +class AddressPortLookupTable { +public: + AddressPortLookupTable(); + virtual ~AddressPortLookupTable(); + + void* Add(netAddressBits address1, netAddressBits address2, Port port, void* value); + // Returns the old value if different, otherwise 0 + Boolean Remove(netAddressBits address1, netAddressBits address2, Port port); + void* Lookup(netAddressBits address1, netAddressBits address2, Port port); + // Returns 0 if not found + void* RemoveNext() { return fTable->RemoveNext(); } + + // Used to iterate through the entries in the table + class Iterator { + public: + Iterator(AddressPortLookupTable& table); + virtual ~Iterator(); + + void* next(); // NULL iff none + + private: + HashTable::Iterator* fIter; + }; + +private: + friend class Iterator; + HashTable* fTable; +}; + + +Boolean IsMulticastAddress(netAddressBits address); + + +// A mechanism for displaying an IPv4 address in ASCII. This is intended to replace "inet_ntoa()", which is not thread-safe. +class AddressString { +public: + AddressString(struct sockaddr_in const& addr); + AddressString(struct in_addr const& addr); + AddressString(netAddressBits addr); // "addr" is assumed to be in host byte order here + + virtual ~AddressString(); + + char const* val() const { return fVal; } + +private: + void init(netAddressBits addr); // used to implement each of the constructors + +private: + char* fVal; // The result ASCII string: allocated by the constructor; deleted by the destructor +}; + +#endif diff --git a/AnyCore/lib_rtsp/groupsock/include/NetCommon.h b/AnyCore/lib_rtsp/groupsock/include/NetCommon.h new file mode 100644 index 0000000..3e7cd82 --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/include/NetCommon.h @@ -0,0 +1,131 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +/* "groupsock" interface + * Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. + * Common include files, typically used for networking + */ + +#ifndef _NET_COMMON_H +#define _NET_COMMON_H + +#if defined(__WIN32__) || defined(_WIN32) || defined(_WIN32_WCE) +/* Windows */ +#if defined(WINNT) || defined(_WINNT) || defined(__BORLANDC__) || defined(__MINGW32__) || defined(_WIN32_WCE) || defined (_MSC_VER) +#define _MSWSOCK_ +#include +#include +#endif +#include +#include +#include + +#define closeSocket closesocket +#ifdef EWOULDBLOCK +#undef EWOULDBLOCK +#endif +#ifdef EINPROGRESS +#undef EINPROGRESS +#endif +#ifdef EAGAIN +#undef EAGAIN +#endif +#ifdef EINTR +#undef EINTR +#endif +#define EWOULDBLOCK WSAEWOULDBLOCK +#define EINPROGRESS WSAEWOULDBLOCK +#define EAGAIN WSAEWOULDBLOCK +#define EINTR WSAEINTR + +#if defined(_WIN32_WCE) +#define NO_STRSTREAM 1 +#endif + +/* Definitions of size-specific types: */ +typedef __int64 int64_t; +typedef unsigned __int64 u_int64_t; + +typedef int int32_t; +typedef unsigned u_int32_t; + +typedef short int16_t; +typedef unsigned short u_int16_t; + +typedef unsigned char u_int8_t; + +// For "uintptr_t" and "intptr_t", we assume that if they're not already defined, then this must be +// an old, 32-bit version of Windows: +#if !defined(_MSC_STDINT_H_) && !defined(_UINTPTR_T_DEFINED) && !defined(_UINTPTR_T_DECLARED) && !defined(_UINTPTR_T) +typedef unsigned uintptr_t; +#endif +#if !defined(_MSC_STDINT_H_) && !defined(_INTPTR_T_DEFINED) && !defined(_INTPTR_T_DECLARED) && !defined(_INTPTR_T) +typedef int intptr_t; +#endif + +#elif defined(VXWORKS) +/* VxWorks */ +#include +#include +#include +#include +#include +#include +#include + +typedef unsigned int u_int32_t; +typedef unsigned short u_int16_t; +typedef unsigned char u_int8_t; + +#else +/* Unix */ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#if defined(_QNX4) +#include +#include +#endif + +#define closeSocket close + +#ifdef SOLARIS +#define u_int64_t uint64_t +#define u_int32_t uint32_t +#define u_int16_t uint16_t +#define u_int8_t uint8_t +#else +#define u_int64_t uint64_t +#define u_int32_t uint32_t +#define u_int16_t uint16_t +#define u_int8_t uint8_t +#endif +#endif + +#ifndef SOCKLEN_T +#define SOCKLEN_T int +#endif + +#endif diff --git a/AnyCore/lib_rtsp/groupsock/include/NetInterface.hh b/AnyCore/lib_rtsp/groupsock/include/NetInterface.hh new file mode 100644 index 0000000..b0ecc22 --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/include/NetInterface.hh @@ -0,0 +1,149 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "mTunnel" multicast access service +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Network Interfaces +// C++ header + +#ifndef _NET_INTERFACE_HH +#define _NET_INTERFACE_HH + +#ifndef _NET_ADDRESS_HH +#include "NetAddress.hh" +#endif + +class NetInterface { +public: + virtual ~NetInterface(); + + static UsageEnvironment* DefaultUsageEnvironment; + // if non-NULL, used for each new interfaces + +protected: + NetInterface(); // virtual base class +}; + +class DirectedNetInterface: public NetInterface { +public: + virtual ~DirectedNetInterface(); + + virtual Boolean write(unsigned char* data, unsigned numBytes) = 0; + + virtual Boolean SourceAddrOKForRelaying(UsageEnvironment& env, + unsigned addr) = 0; + +protected: + DirectedNetInterface(); // virtual base class +}; + +class DirectedNetInterfaceSet { +public: + DirectedNetInterfaceSet(); + virtual ~DirectedNetInterfaceSet(); + + DirectedNetInterface* Add(DirectedNetInterface const* interf); + // Returns the old value if different, otherwise 0 + Boolean Remove(DirectedNetInterface const* interf); + + Boolean IsEmpty() { return fTable->IsEmpty(); } + + // Used to iterate through the interfaces in the set + class Iterator { + public: + Iterator(DirectedNetInterfaceSet& interfaces); + virtual ~Iterator(); + + DirectedNetInterface* next(); // NULL iff none + + private: + HashTable::Iterator* fIter; + }; + +private: + friend class Iterator; + HashTable* fTable; +}; + +class Socket: public NetInterface { +public: + virtual ~Socket(); + void reset(); // closes the socket, and sets "fSocketNum" to -1 + + virtual Boolean handleRead(unsigned char* buffer, unsigned bufferMaxSize, + unsigned& bytesRead, + struct sockaddr_in& fromAddress) = 0; + // Returns False on error; resultData == NULL if data ignored + + int socketNum() const { return fSocketNum; } + + Port port() const { + return fPort; + } + + UsageEnvironment& env() const { return fEnv; } + + static int DebugLevel; + +protected: + Socket(UsageEnvironment& env, Port port); // virtual base class + + Boolean changePort(Port newPort); // will also cause socketNum() to change + +private: + int fSocketNum; + UsageEnvironment& fEnv; + Port fPort; +}; + +UsageEnvironment& operator<<(UsageEnvironment& s, const Socket& sock); + +// A data structure for looking up a Socket by port: + +class SocketLookupTable { +public: + virtual ~SocketLookupTable(); + + Socket* Fetch(UsageEnvironment& env, Port port, Boolean& isNew); + // Creates a new Socket if none already exists + Boolean Remove(Socket const* sock); + +protected: + SocketLookupTable(); // abstract base class + virtual Socket* CreateNew(UsageEnvironment& env, Port port) = 0; + +private: + HashTable* fTable; +}; + +// A data structure for counting traffic: + +class NetInterfaceTrafficStats { +public: + NetInterfaceTrafficStats(); + + void countPacket(unsigned packetSize); + + float totNumPackets() const {return fTotNumPackets;} + float totNumBytes() const {return fTotNumBytes;} + + Boolean haveSeenTraffic() const; + +private: + float fTotNumPackets; + float fTotNumBytes; +}; + +#endif diff --git a/AnyCore/lib_rtsp/groupsock/include/TunnelEncaps.hh b/AnyCore/lib_rtsp/groupsock/include/TunnelEncaps.hh new file mode 100644 index 0000000..62efad5 --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/include/TunnelEncaps.hh @@ -0,0 +1,101 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "mTunnel" multicast access service +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Encapsulation trailer for tunnels +// C++ header + +#ifndef _TUNNEL_ENCAPS_HH +#define _TUNNEL_ENCAPS_HH + +#ifndef _NET_ADDRESS_HH +#include "NetAddress.hh" +#endif + +typedef u_int16_t Cookie; + +class TunnelEncapsulationTrailer { + // The trailer is layed out as follows: + // bytes 0-1: source 'cookie' + // bytes 2-3: destination 'cookie' + // bytes 4-7: address + // bytes 8-9: port + // byte 10: ttl + // byte 11: command + + // Optionally, there may also be a 4-byte 'auxilliary address' + // (e.g., for 'source-specific multicast' preceding this) + // bytes -4 through -1: auxilliary address + + public: + Cookie& srcCookie() + { return *(Cookie*)byteOffset(0); } + Cookie& dstCookie() + { return *(Cookie*)byteOffset(2); } + u_int32_t& address() + { return *(u_int32_t*)byteOffset(4); } + Port& port() + { return *(Port*)byteOffset(8); } + u_int8_t& ttl() + { return *(u_int8_t*)byteOffset(10); } + u_int8_t& command() + { return *(u_int8_t*)byteOffset(11); } + + u_int32_t& auxAddress() + { return *(u_int32_t*)byteOffset(-4); } + + private: + inline char* byteOffset(int charIndex) + { return ((char*)this) + charIndex; } +}; + +const unsigned TunnelEncapsulationTrailerSize = 12; // bytes +const unsigned TunnelEncapsulationTrailerAuxSize = 4; // bytes +const unsigned TunnelEncapsulationTrailerMaxSize + = TunnelEncapsulationTrailerSize + TunnelEncapsulationTrailerAuxSize; + +// Command codes: +// 0: unused +const u_int8_t TunnelDataCmd = 1; +const u_int8_t TunnelJoinGroupCmd = 2; +const u_int8_t TunnelLeaveGroupCmd = 3; +const u_int8_t TunnelTearDownCmd = 4; +const u_int8_t TunnelProbeCmd = 5; +const u_int8_t TunnelProbeAckCmd = 6; +const u_int8_t TunnelProbeNackCmd = 7; +const u_int8_t TunnelJoinRTPGroupCmd = 8; +const u_int8_t TunnelLeaveRTPGroupCmd = 9; +// 0x0A through 0x10: currently unused. +const u_int8_t TunnelExtensionFlag = 0x80; // a flag, not a cmd code +const u_int8_t TunnelDataAuxCmd + = (TunnelExtensionFlag|TunnelDataCmd); +const u_int8_t TunnelJoinGroupAuxCmd + = (TunnelExtensionFlag|TunnelJoinGroupCmd); +const u_int8_t TunnelLeaveGroupAuxCmd + = (TunnelExtensionFlag|TunnelLeaveGroupCmd); +// Note: the TearDown, Probe, ProbeAck, ProbeNack cmds have no Aux version +// 0x84 through 0x87: currently unused. +const u_int8_t TunnelJoinRTPGroupAuxCmd + = (TunnelExtensionFlag|TunnelJoinRTPGroupCmd); +const u_int8_t TunnelLeaveRTPGroupAuxCmd + = (TunnelExtensionFlag|TunnelLeaveRTPGroupCmd); +// 0x8A through 0xFF: currently unused + +inline Boolean TunnelIsAuxCmd(u_int8_t cmd) { + return (cmd&TunnelExtensionFlag) != 0; +} + +#endif diff --git a/AnyCore/lib_rtsp/groupsock/include/groupsock_version.hh b/AnyCore/lib_rtsp/groupsock/include/groupsock_version.hh new file mode 100644 index 0000000..b93e37f --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/include/groupsock_version.hh @@ -0,0 +1,10 @@ +// Version information for the "groupsock" library +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. + +#ifndef _GROUPSOCK_VERSION_HH +#define _GROUPSOCK_VERSION_HH + +#define GROUPSOCK_LIBRARY_VERSION_STRING "2014.09.22" +#define GROUPSOCK_LIBRARY_VERSION_INT 1411344000 + +#endif diff --git a/AnyCore/lib_rtsp/groupsock/inet.c b/AnyCore/lib_rtsp/groupsock/inet.c new file mode 100644 index 0000000..8d9e756 --- /dev/null +++ b/AnyCore/lib_rtsp/groupsock/inet.c @@ -0,0 +1,451 @@ +#ifndef _NET_COMMON_H +#include "NetCommon.h" +#endif + +#include + +#ifdef VXWORKS +#include +#endif + +/* Some systems (e.g., SunOS) have header files that erroneously declare inet_addr() as taking no arguments. + * This confuses C++. To overcome this, we use our own routine, implemented in C. + */ + +unsigned our_inet_addr(cp) + char const* cp; +{ + return inet_addr(cp); +} + +#if defined(__WIN32__) || defined(_WIN32) +#ifndef IMN_PIM +#define WS_VERSION_CHOICE1 0x202/*MAKEWORD(2,2)*/ +#define WS_VERSION_CHOICE2 0x101/*MAKEWORD(1,1)*/ +int initializeWinsockIfNecessary(void) { + /* We need to call an initialization routine before + * we can do anything with winsock. (How fucking lame!): + */ + static int _haveInitializedWinsock = 0; + WSADATA wsadata; + + if (!_haveInitializedWinsock) { + if ((WSAStartup(WS_VERSION_CHOICE1, &wsadata) != 0) + && ((WSAStartup(WS_VERSION_CHOICE2, &wsadata)) != 0)) { + return 0; /* error in initialization */ + } + if ((wsadata.wVersion != WS_VERSION_CHOICE1) + && (wsadata.wVersion != WS_VERSION_CHOICE2)) { + WSACleanup(); + return 0; /* desired Winsock version was not available */ + } + _haveInitializedWinsock = 1; + } + + return 1; +} +#else +int initializeWinsockIfNecessary(void) { return 1; } +#endif +#else +#define initializeWinsockIfNecessary() 1 +#endif + +#ifndef NULL +#define NULL 0 +#endif + +#ifdef USE_SYSTEM_RANDOM +/* Use the system-supplied "random()" and "srandom()" functions */ +#include +long our_random() { +#if defined(__WIN32__) || defined(_WIN32) + return rand(); +#else + return random(); +#endif +} +void our_srandom(unsigned int x) { +#if defined(__WIN32__) || defined(_WIN32) + srand(x); +#else + srandom(x); +#endif +} + +#else + +/* Use our own implementation of the "random()" and "srandom()" functions */ +/* + * random.c: + * + * An improved random number generation package. In addition to the standard + * rand()/srand() like interface, this package also has a special state info + * interface. The our_initstate() routine is called with a seed, an array of + * bytes, and a count of how many bytes are being passed in; this array is + * then initialized to contain information for random number generation with + * that much state information. Good sizes for the amount of state + * information are 32, 64, 128, and 256 bytes. The state can be switched by + * calling the our_setstate() routine with the same array as was initiallized + * with our_initstate(). By default, the package runs with 128 bytes of state + * information and generates far better random numbers than a linear + * congruential generator. If the amount of state information is less than + * 32 bytes, a simple linear congruential R.N.G. is used. + * + * Internally, the state information is treated as an array of longs; the + * zeroeth element of the array is the type of R.N.G. being used (small + * integer); the remainder of the array is the state information for the + * R.N.G. Thus, 32 bytes of state information will give 7 longs worth of + * state information, which will allow a degree seven polynomial. (Note: + * the zeroeth word of state information also has some other information + * stored in it -- see our_setstate() for details). + * + * The random number generation technique is a linear feedback shift register + * approach, employing trinomials (since there are fewer terms to sum up that + * way). In this approach, the least significant bit of all the numbers in + * the state table will act as a linear feedback shift register, and will + * have period 2^deg - 1 (where deg is the degree of the polynomial being + * used, assuming that the polynomial is irreducible and primitive). The + * higher order bits will have longer periods, since their values are also + * influenced by pseudo-random carries out of the lower bits. The total + * period of the generator is approximately deg*(2**deg - 1); thus doubling + * the amount of state information has a vast influence on the period of the + * generator. Note: the deg*(2**deg - 1) is an approximation only good for + * large deg, when the period of the shift register is the dominant factor. + * With deg equal to seven, the period is actually much longer than the + * 7*(2**7 - 1) predicted by this formula. + */ + +/* + * For each of the currently supported random number generators, we have a + * break value on the amount of state information (you need at least this + * many bytes of state info to support this random number generator), a degree + * for the polynomial (actually a trinomial) that the R.N.G. is based on, and + * the separation between the two lower order coefficients of the trinomial. + */ +#define TYPE_0 0 /* linear congruential */ +#define BREAK_0 8 +#define DEG_0 0 +#define SEP_0 0 + +#define TYPE_1 1 /* x**7 + x**3 + 1 */ +#define BREAK_1 32 +#define DEG_1 7 +#define SEP_1 3 + +#define TYPE_2 2 /* x**15 + x + 1 */ +#define BREAK_2 64 +#define DEG_2 15 +#define SEP_2 1 + +#define TYPE_3 3 /* x**31 + x**3 + 1 */ +#define BREAK_3 128 +#define DEG_3 31 +#define SEP_3 3 + +#define TYPE_4 4 /* x**63 + x + 1 */ +#define BREAK_4 256 +#define DEG_4 63 +#define SEP_4 1 + +/* + * Array versions of the above information to make code run faster -- + * relies on fact that TYPE_i == i. + */ +#define MAX_TYPES 5 /* max number of types above */ + +static int const degrees[MAX_TYPES] = { DEG_0, DEG_1, DEG_2, DEG_3, DEG_4 }; +static int const seps [MAX_TYPES] = { SEP_0, SEP_1, SEP_2, SEP_3, SEP_4 }; + +/* + * Initially, everything is set up as if from: + * + * our_initstate(1, &randtbl, 128); + * + * Note that this initialization takes advantage of the fact that srandom() + * advances the front and rear pointers 10*rand_deg times, and hence the + * rear pointer which starts at 0 will also end up at zero; thus the zeroeth + * element of the state information, which contains info about the current + * position of the rear pointer is just + * + * MAX_TYPES * (rptr - state) + TYPE_3 == TYPE_3. + */ + +static long randtbl[DEG_3 + 1] = { + TYPE_3, + 0x9a319039, 0x32d9c024, 0x9b663182, 0x5da1f342, 0xde3b81e0, 0xdf0a6fb5, + 0xf103bc02, 0x48f340fb, 0x7449e56b, 0xbeb1dbb0, 0xab5c5918, 0x946554fd, + 0x8c2e680f, 0xeb3d799f, 0xb11ee0b7, 0x2d436b86, 0xda672e2a, 0x1588ca88, + 0xe369735d, 0x904f35f7, 0xd7158fd6, 0x6fa6f051, 0x616e6b96, 0xac94efdc, + 0x36413f93, 0xc622c298, 0xf5a42ab8, 0x8a88d77b, 0xf5ad9d0e, 0x8999220b, + 0x27fb47b9, +}; + +/* + * fptr and rptr are two pointers into the state info, a front and a rear + * pointer. These two pointers are always rand_sep places aparts, as they + * cycle cyclically through the state information. (Yes, this does mean we + * could get away with just one pointer, but the code for random() is more + * efficient this way). The pointers are left positioned as they would be + * from the call + * + * our_initstate(1, randtbl, 128); + * + * (The position of the rear pointer, rptr, is really 0 (as explained above + * in the initialization of randtbl) because the state table pointer is set + * to point to randtbl[1] (as explained below). + */ +static long* fptr = &randtbl[SEP_3 + 1]; +static long* rptr = &randtbl[1]; + +/* + * The following things are the pointer to the state information table, the + * type of the current generator, the degree of the current polynomial being + * used, and the separation between the two pointers. Note that for efficiency + * of random(), we remember the first location of the state information, not + * the zeroeth. Hence it is valid to access state[-1], which is used to + * store the type of the R.N.G. Also, we remember the last location, since + * this is more efficient than indexing every time to find the address of + * the last element to see if the front and rear pointers have wrapped. + */ +static long *state = &randtbl[1]; +static int rand_type = TYPE_3; +static int rand_deg = DEG_3; +static int rand_sep = SEP_3; +static long* end_ptr = &randtbl[DEG_3 + 1]; + +/* + * srandom: + * + * Initialize the random number generator based on the given seed. If the + * type is the trivial no-state-information type, just remember the seed. + * Otherwise, initializes state[] based on the given "seed" via a linear + * congruential generator. Then, the pointers are set to known locations + * that are exactly rand_sep places apart. Lastly, it cycles the state + * information a given number of times to get rid of any initial dependencies + * introduced by the L.C.R.N.G. Note that the initialization of randtbl[] + * for default usage relies on values produced by this routine. + */ +long our_random(void); /*forward*/ +void +our_srandom(unsigned int x) +{ + register int i; + + if (rand_type == TYPE_0) + state[0] = x; + else { + state[0] = x; + for (i = 1; i < rand_deg; i++) + state[i] = 1103515245 * state[i - 1] + 12345; + fptr = &state[rand_sep]; + rptr = &state[0]; + for (i = 0; i < 10 * rand_deg; i++) + (void)our_random(); + } +} + +/* + * our_initstate: + * + * Initialize the state information in the given array of n bytes for future + * random number generation. Based on the number of bytes we are given, and + * the break values for the different R.N.G.'s, we choose the best (largest) + * one we can and set things up for it. srandom() is then called to + * initialize the state information. + * + * Note that on return from srandom(), we set state[-1] to be the type + * multiplexed with the current value of the rear pointer; this is so + * successive calls to our_initstate() won't lose this information and will be + * able to restart with our_setstate(). + * + * Note: the first thing we do is save the current state, if any, just like + * our_setstate() so that it doesn't matter when our_initstate is called. + * + * Returns a pointer to the old state. + */ +char * +our_initstate(seed, arg_state, n) + unsigned int seed; /* seed for R.N.G. */ + char *arg_state; /* pointer to state array */ + int n; /* # bytes of state info */ +{ + register char *ostate = (char *)(&state[-1]); + + if (rand_type == TYPE_0) + state[-1] = rand_type; + else + state[-1] = MAX_TYPES * (rptr - state) + rand_type; + if (n < BREAK_0) { +#ifdef DEBUG + (void)fprintf(stderr, + "random: not enough state (%d bytes); ignored.\n", n); +#endif + return(0); + } + if (n < BREAK_1) { + rand_type = TYPE_0; + rand_deg = DEG_0; + rand_sep = SEP_0; + } else if (n < BREAK_2) { + rand_type = TYPE_1; + rand_deg = DEG_1; + rand_sep = SEP_1; + } else if (n < BREAK_3) { + rand_type = TYPE_2; + rand_deg = DEG_2; + rand_sep = SEP_2; + } else if (n < BREAK_4) { + rand_type = TYPE_3; + rand_deg = DEG_3; + rand_sep = SEP_3; + } else { + rand_type = TYPE_4; + rand_deg = DEG_4; + rand_sep = SEP_4; + } + state = &(((long *)arg_state)[1]); /* first location */ + end_ptr = &state[rand_deg]; /* must set end_ptr before srandom */ + our_srandom(seed); + if (rand_type == TYPE_0) + state[-1] = rand_type; + else + state[-1] = MAX_TYPES*(rptr - state) + rand_type; + return(ostate); +} + +/* + * our_setstate: + * + * Restore the state from the given state array. + * + * Note: it is important that we also remember the locations of the pointers + * in the current state information, and restore the locations of the pointers + * from the old state information. This is done by multiplexing the pointer + * location into the zeroeth word of the state information. + * + * Note that due to the order in which things are done, it is OK to call + * our_setstate() with the same state as the current state. + * + * Returns a pointer to the old state information. + */ +char * +our_setstate(arg_state) + char *arg_state; +{ + register long *new_state = (long *)arg_state; + register int type = new_state[0] % MAX_TYPES; + register int rear = new_state[0] / MAX_TYPES; + char *ostate = (char *)(&state[-1]); + + if (rand_type == TYPE_0) + state[-1] = rand_type; + else + state[-1] = MAX_TYPES * (rptr - state) + rand_type; + switch(type) { + case TYPE_0: + case TYPE_1: + case TYPE_2: + case TYPE_3: + case TYPE_4: + rand_type = type; + rand_deg = degrees[type]; + rand_sep = seps[type]; + break; + default: +#ifdef DEBUG + (void)fprintf(stderr, + "random: state info corrupted; not changed.\n"); +#endif + break; + } + state = &new_state[1]; + if (rand_type != TYPE_0) { + rptr = &state[rear]; + fptr = &state[(rear + rand_sep) % rand_deg]; + } + end_ptr = &state[rand_deg]; /* set end_ptr too */ + return(ostate); +} + +/* + * random: + * + * If we are using the trivial TYPE_0 R.N.G., just do the old linear + * congruential bit. Otherwise, we do our fancy trinomial stuff, which is + * the same in all the other cases due to all the global variables that have + * been set up. The basic operation is to add the number at the rear pointer + * into the one at the front pointer. Then both pointers are advanced to + * the next location cyclically in the table. The value returned is the sum + * generated, reduced to 31 bits by throwing away the "least random" low bit. + * + * Note: the code takes advantage of the fact that both the front and + * rear pointers can't wrap on the same call by not testing the rear + * pointer if the front one has wrapped. + * + * Returns a 31-bit random number. + */ +long our_random() { + long i; + + if (rand_type == TYPE_0) { + i = state[0] = (state[0] * 1103515245 + 12345) & 0x7fffffff; + } else { + /* Make copies of "rptr" and "fptr" before working with them, in case we're being called concurrently by multiple threads: */ + long* rp = rptr; + long* fp = fptr; + + /* Make sure "rp" and "fp" are separated by the correct distance (again, allowing for concurrent access): */ + if (!(fp == rp+SEP_3 || fp+DEG_3 == rp+SEP_3)) { + /* A rare case that should occur only if we're being called concurrently by multiple threads. */ + /* Restore the proper separation between the pointers: */ + if (rp <= fp) rp = fp-SEP_3; else rp = fp+DEG_3-SEP_3; + } + + *fp += *rp; + i = (*fp >> 1) & 0x7fffffff; /* chucking least random bit */ + if (++fp >= end_ptr) { + fp = state; + ++rp; + } else if (++rp >= end_ptr) { + rp = state; + } + + /* Restore "rptr" and "fptr" from our working copies: */ + rptr = rp; + fptr = fp; + } + + return i; +} +#endif + +u_int32_t our_random32() { + /* Return a 32-bit random number. + Because "our_random()" returns a 31-bit random number, we call it a second + time, to generate the high bit. + (Actually, to increase the likelhood of randomness, we take the middle 16 bits of two successive calls to "our_random()") + */ + long random_1 = our_random(); + u_int32_t random16_1 = (u_int32_t)(random_1&0x00FFFF00); + + long random_2 = our_random(); + u_int32_t random16_2 = (u_int32_t)(random_2&0x00FFFF00); + + return (random16_1<<8) | (random16_2>>8); +} + +#ifdef USE_OUR_BZERO +#ifndef __bzero +void +__bzero (to, count) + char *to; + int count; +{ + while (count-- > 0) + { + *to++ = 0; + } +} +#endif +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/AC3AudioFileServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/AC3AudioFileServerMediaSubsession.cpp new file mode 100644 index 0000000..8eb6e6b --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/AC3AudioFileServerMediaSubsession.cpp @@ -0,0 +1,61 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from an AC3 audio file. +// Implementation + +#include "AC3AudioFileServerMediaSubsession.hh" +#include "ByteStreamFileSource.hh" +#include "AC3AudioStreamFramer.hh" +#include "AC3AudioRTPSink.hh" + +AC3AudioFileServerMediaSubsession* +AC3AudioFileServerMediaSubsession::createNew(UsageEnvironment& env, + char const* fileName, + Boolean reuseFirstSource) { + return new AC3AudioFileServerMediaSubsession(env, fileName, reuseFirstSource); +} + +AC3AudioFileServerMediaSubsession +::AC3AudioFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, Boolean reuseFirstSource) + : FileServerMediaSubsession(env, fileName, reuseFirstSource) { +} + +AC3AudioFileServerMediaSubsession::~AC3AudioFileServerMediaSubsession() { +} + +FramedSource* AC3AudioFileServerMediaSubsession +::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) { + estBitrate = 48; // kbps, estimate + + ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(envir(), fFileName); + if (fileSource == NULL) return NULL; + + return AC3AudioStreamFramer::createNew(envir(), fileSource); +} + +RTPSink* AC3AudioFileServerMediaSubsession +::createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource) { + AC3AudioStreamFramer* audioSource = (AC3AudioStreamFramer*)inputSource; + return AC3AudioRTPSink::createNew(envir(), rtpGroupsock, + rtpPayloadTypeIfDynamic, + audioSource->samplingRate()); +} diff --git a/AnyCore/lib_rtsp/liveMedia/AC3AudioRTPSink.cpp b/AnyCore/lib_rtsp/liveMedia/AC3AudioRTPSink.cpp new file mode 100644 index 0000000..871501c --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/AC3AudioRTPSink.cpp @@ -0,0 +1,97 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for AC3 audio +// Implementation + +#include "AC3AudioRTPSink.hh" + +AC3AudioRTPSink::AC3AudioRTPSink(UsageEnvironment& env, Groupsock* RTPgs, + u_int8_t rtpPayloadFormat, + u_int32_t rtpTimestampFrequency) + : AudioRTPSink(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency, "AC3"), + fTotNumFragmentsUsed(0) { +} + +AC3AudioRTPSink::~AC3AudioRTPSink() { +} + +AC3AudioRTPSink* +AC3AudioRTPSink::createNew(UsageEnvironment& env, Groupsock* RTPgs, + u_int8_t rtpPayloadFormat, + u_int32_t rtpTimestampFrequency) { + return new AC3AudioRTPSink(env, RTPgs, + rtpPayloadFormat, rtpTimestampFrequency); +} + +Boolean AC3AudioRTPSink +::frameCanAppearAfterPacketStart(unsigned char const* /*frameStart*/, + unsigned /*numBytesInFrame*/) const { + // (For now) allow at most 1 frame in a single packet: + return False; +} + +void AC3AudioRTPSink +::doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes) { + // Set the 2-byte "payload header", as defined in RFC 4184. + unsigned char headers[2]; + + Boolean isFragment = numRemainingBytes > 0 || fragmentationOffset > 0; + if (!isFragment) { + headers[0] = 0; // One or more complete frames + headers[1] = 1; // because we (for now) allow at most 1 frame per packet + } else { + if (fragmentationOffset > 0) { + headers[0] = 3; // Fragment of frame other than initial fragment + } else { + // An initial fragment of the frame + unsigned const totalFrameSize = fragmentationOffset + numBytesInFrame + numRemainingBytes; + unsigned const fiveEighthsPoint = totalFrameSize/2 + totalFrameSize/8; + headers[0] = numBytesInFrame >= fiveEighthsPoint ? 1 : 2; + + // Because this outgoing packet will be full (because it's an initial fragment), we can compute how many total + // fragments (and thus packets) will make up the complete AC-3 frame: + fTotNumFragmentsUsed = (totalFrameSize + (numBytesInFrame-1))/numBytesInFrame; + } + + headers[1] = fTotNumFragmentsUsed; + } + + setSpecialHeaderBytes(headers, sizeof headers); + + if (numRemainingBytes == 0) { + // This packet contains the last (or only) fragment of the frame. + // Set the RTP 'M' ('marker') bit: + setMarkerBit(); + } + + // Important: Also call our base class's doSpecialFrameHandling(), + // to set the packet's timestamp: + MultiFramedRTPSink::doSpecialFrameHandling(fragmentationOffset, + frameStart, numBytesInFrame, + framePresentationTime, + numRemainingBytes); +} + +unsigned AC3AudioRTPSink::specialHeaderSize() const { + return 2; +} diff --git a/AnyCore/lib_rtsp/liveMedia/AC3AudioRTPSource.cpp b/AnyCore/lib_rtsp/liveMedia/AC3AudioRTPSource.cpp new file mode 100644 index 0000000..cf27299 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/AC3AudioRTPSource.cpp @@ -0,0 +1,66 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// AC3 Audio RTP Sources +// Implementation + +#include "AC3AudioRTPSource.hh" + +AC3AudioRTPSource* +AC3AudioRTPSource::createNew(UsageEnvironment& env, + Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) { + return new AC3AudioRTPSource(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency); +} + +AC3AudioRTPSource::AC3AudioRTPSource(UsageEnvironment& env, + Groupsock* rtpGS, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) + : MultiFramedRTPSource(env, rtpGS, + rtpPayloadFormat, rtpTimestampFrequency) { +} + +AC3AudioRTPSource::~AC3AudioRTPSource() { +} + +Boolean AC3AudioRTPSource +::processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize) { + unsigned char* headerStart = packet->data(); + unsigned packetSize = packet->dataSize(); + + // There's a 2-byte payload header at the beginning: + if (packetSize < 2) return False; + resultSpecialHeaderSize = 2; + + unsigned char FT = headerStart[0]&0x03; + fCurrentPacketBeginsFrame = FT != 3; + + // The RTP "M" (marker) bit indicates the last fragment of a frame. + // In case the sender did not set the "M" bit correctly, we also test for FT == 0: + fCurrentPacketCompletesFrame = packet->rtpMarkerBit() || FT == 0; + + return True; +} + +char const* AC3AudioRTPSource::MIMEtype() const { + return "audio/AC3"; +} + diff --git a/AnyCore/lib_rtsp/liveMedia/AC3AudioStreamFramer.cpp b/AnyCore/lib_rtsp/liveMedia/AC3AudioStreamFramer.cpp new file mode 100644 index 0000000..dbb72de --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/AC3AudioStreamFramer.cpp @@ -0,0 +1,340 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up an AC3 audio elementary stream into frames +// Implementation + +#include "AC3AudioStreamFramer.hh" +#include "StreamParser.hh" +#include + +////////// AC3AudioStreamParser definition ////////// + +class AC3FrameParams { +public: + AC3FrameParams() : samplingFreq(0) {} + // 8-byte header at the start of each frame: + // u_int32_t hdr0, hdr1; + unsigned hdr0, hdr1; + + // parameters derived from the headers + unsigned kbps, samplingFreq, frameSize; + + void setParamsFromHeader(); +}; + +class AC3AudioStreamParser: public StreamParser { +public: + AC3AudioStreamParser(AC3AudioStreamFramer* usingSource, + FramedSource* inputSource); + virtual ~AC3AudioStreamParser(); + +public: + void testStreamCode(unsigned char ourStreamCode, + unsigned char* ptr, unsigned size); + unsigned parseFrame(unsigned& numTruncatedBytes); + // returns the size of the frame that was acquired, or 0 if none was + + void registerReadInterest(unsigned char* to, unsigned maxSize); + + AC3FrameParams const& currentFrame() const { return fCurrentFrame; } + + Boolean haveParsedAFrame() const { return fHaveParsedAFrame; } + void readAndSaveAFrame(); + +private: + static void afterGettingSavedFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingSavedFrame1(unsigned frameSize); + static void onSavedFrameClosure(void* clientData); + void onSavedFrameClosure1(); + +private: + AC3AudioStreamFramer* fUsingSource; + unsigned char* fTo; + unsigned fMaxSize; + + Boolean fHaveParsedAFrame; + unsigned char* fSavedFrame; + unsigned fSavedFrameSize; + char fSavedFrameFlag; + + // Parameters of the most recently read frame: + AC3FrameParams fCurrentFrame; +}; + + +////////// AC3AudioStreamFramer implementation ////////// + +AC3AudioStreamFramer::AC3AudioStreamFramer(UsageEnvironment& env, + FramedSource* inputSource, + unsigned char streamCode) + : FramedFilter(env, inputSource), fOurStreamCode(streamCode) { + // Use the current wallclock time as the initial 'presentation time': + gettimeofday(&fNextFramePresentationTime, NULL); + + fParser = new AC3AudioStreamParser(this, inputSource); +} + +AC3AudioStreamFramer::~AC3AudioStreamFramer() { + delete fParser; +} + +AC3AudioStreamFramer* +AC3AudioStreamFramer::createNew(UsageEnvironment& env, + FramedSource* inputSource, + unsigned char streamCode) { + // Need to add source type checking here??? ##### + return new AC3AudioStreamFramer(env, inputSource, streamCode); +} + +unsigned AC3AudioStreamFramer::samplingRate() { + if (!fParser->haveParsedAFrame()) { + // Because we haven't yet parsed a frame, we don't yet know the input + // stream's sampling rate. So, we first need to read a frame + // (into a special buffer that we keep around for later use). + fParser->readAndSaveAFrame(); + } + + return fParser->currentFrame().samplingFreq; +} + +void AC3AudioStreamFramer::flushInput() { + fParser->flushInput(); +} + +void AC3AudioStreamFramer::doGetNextFrame() { + fParser->registerReadInterest(fTo, fMaxSize); + parseNextFrame(); +} + +#define MILLION 1000000 + +struct timeval AC3AudioStreamFramer::currentFramePlayTime() const { + AC3FrameParams const& fr = fParser->currentFrame(); + unsigned const numSamples = 1536; + unsigned const freq = fr.samplingFreq; + + // result is numSamples/freq + unsigned const uSeconds = (freq == 0) ? 0 + : ((numSamples*2*MILLION)/freq + 1)/2; // rounds to nearest integer + + struct timeval result; + result.tv_sec = uSeconds/MILLION; + result.tv_usec = uSeconds%MILLION; + return result; +} + +void AC3AudioStreamFramer +::handleNewData(void* clientData, unsigned char* ptr, unsigned size, + struct timeval /*presentationTime*/) { + AC3AudioStreamFramer* framer = (AC3AudioStreamFramer*)clientData; + framer->handleNewData(ptr, size); +} + +void AC3AudioStreamFramer +::handleNewData(unsigned char* ptr, unsigned size) { + fParser->testStreamCode(fOurStreamCode, ptr, size); + + parseNextFrame(); +} + +void AC3AudioStreamFramer::parseNextFrame() { + unsigned acquiredFrameSize = fParser->parseFrame(fNumTruncatedBytes); + if (acquiredFrameSize > 0) { + // We were able to acquire a frame from the input. + // It has already been copied to the reader's space. + fFrameSize = acquiredFrameSize; + + // Also set the presentation time, and increment it for next time, + // based on the length of this frame: + fPresentationTime = fNextFramePresentationTime; + + struct timeval framePlayTime = currentFramePlayTime(); + fDurationInMicroseconds = framePlayTime.tv_sec*MILLION + framePlayTime.tv_usec; + fNextFramePresentationTime.tv_usec += framePlayTime.tv_usec; + fNextFramePresentationTime.tv_sec + += framePlayTime.tv_sec + fNextFramePresentationTime.tv_usec/MILLION; + fNextFramePresentationTime.tv_usec %= MILLION; + + // Call our own 'after getting' function. Because we're not a 'leaf' + // source, we can call this directly, without risking infinite recursion. + afterGetting(this); + } else { + // We were unable to parse a complete frame from the input, because: + // - we had to read more data from the source stream, or + // - the source stream has ended. + } +} + + +////////// AC3AudioStreamParser implementation ////////// + +static int const kbpsTable[] = {32, 40, 48, 56, 64, 80, 96, 112, + 128, 160, 192, 224, 256, 320, 384, 448, + 512, 576, 640}; + +void AC3FrameParams::setParamsFromHeader() { + unsigned char byte4 = hdr1 >> 24; + + unsigned char kbpsIndex = (byte4&0x3E) >> 1; + if (kbpsIndex > 18) kbpsIndex = 18; + kbps = kbpsTable[kbpsIndex]; + + unsigned char samplingFreqIndex = (byte4&0xC0) >> 6; + switch (samplingFreqIndex) { + case 0: + samplingFreq = 48000; + frameSize = 4*kbps; + break; + case 1: + samplingFreq = 44100; + frameSize = 2*(320*kbps/147 + (byte4&1)); + break; + case 2: + case 3: // not legal? + samplingFreq = 32000; + frameSize = 6*kbps; + } +} + +AC3AudioStreamParser +::AC3AudioStreamParser(AC3AudioStreamFramer* usingSource, + FramedSource* inputSource) + : StreamParser(inputSource, FramedSource::handleClosure, usingSource, + &AC3AudioStreamFramer::handleNewData, usingSource), + fUsingSource(usingSource), fHaveParsedAFrame(False), + fSavedFrame(NULL), fSavedFrameSize(0) { +} + +AC3AudioStreamParser::~AC3AudioStreamParser() { +} + +void AC3AudioStreamParser::registerReadInterest(unsigned char* to, + unsigned maxSize) { + fTo = to; + fMaxSize = maxSize; +} + +void AC3AudioStreamParser +::testStreamCode(unsigned char ourStreamCode, + unsigned char* ptr, unsigned size) { + if (ourStreamCode == 0) return; // we assume that there's no stream code at the beginning of the data + + if (size < 4) return; + unsigned char streamCode = *ptr; + + if (streamCode == ourStreamCode) { + // Remove the first 4 bytes from the stream: + memmove(ptr, ptr + 4, size - 4); + totNumValidBytes() = totNumValidBytes() - 4; + } else { + // Discard all of the data that was just read: + totNumValidBytes() = totNumValidBytes() - size; + } +} + +unsigned AC3AudioStreamParser::parseFrame(unsigned& numTruncatedBytes) { + if (fSavedFrameSize > 0) { + // We've already read and parsed a frame. Use it instead: + memmove(fTo, fSavedFrame, fSavedFrameSize); + delete[] fSavedFrame; fSavedFrame = NULL; + unsigned frameSize = fSavedFrameSize; + fSavedFrameSize = 0; + return frameSize; + } + + try { + saveParserState(); + + // We expect an AC3 audio header (first 2 bytes == 0x0B77) at the start: + while (1) { + unsigned next4Bytes = test4Bytes(); + if (next4Bytes>>16 == 0x0B77) break; + skipBytes(1); + saveParserState(); + } + fCurrentFrame.hdr0 = get4Bytes(); + fCurrentFrame.hdr1 = test4Bytes(); + + fCurrentFrame.setParamsFromHeader(); + fHaveParsedAFrame = True; + + // Copy the frame to the requested destination: + unsigned frameSize = fCurrentFrame.frameSize; + if (frameSize > fMaxSize) { + numTruncatedBytes = frameSize - fMaxSize; + frameSize = fMaxSize; + } else { + numTruncatedBytes = 0; + } + + fTo[0] = fCurrentFrame.hdr0 >> 24; + fTo[1] = fCurrentFrame.hdr0 >> 16; + fTo[2] = fCurrentFrame.hdr0 >> 8; + fTo[3] = fCurrentFrame.hdr0; + getBytes(&fTo[4], frameSize-4); + skipBytes(numTruncatedBytes); + + return frameSize; + } catch (int /*e*/) { +#ifdef DEBUG + fUsingSource->envir() << "AC3AudioStreamParser::parseFrame() EXCEPTION (This is normal behavior - *not* an error)\n"; +#endif + return 0; // the parsing got interrupted + } +} + +void AC3AudioStreamParser::readAndSaveAFrame() { + unsigned const maxAC3FrameSize = 4000; + fSavedFrame = new unsigned char[maxAC3FrameSize]; + fSavedFrameSize = 0; + + fSavedFrameFlag = 0; + fUsingSource->getNextFrame(fSavedFrame, maxAC3FrameSize, + afterGettingSavedFrame, this, + onSavedFrameClosure, this); + fUsingSource->envir().taskScheduler().doEventLoop(&fSavedFrameFlag); +} + +void AC3AudioStreamParser +::afterGettingSavedFrame(void* clientData, unsigned frameSize, + unsigned /*numTruncatedBytes*/, + struct timeval /*presentationTime*/, + unsigned /*durationInMicroseconds*/) { + AC3AudioStreamParser* parser = (AC3AudioStreamParser*)clientData; + parser->afterGettingSavedFrame1(frameSize); +} + +void AC3AudioStreamParser +::afterGettingSavedFrame1(unsigned frameSize) { + fSavedFrameSize = frameSize; + fSavedFrameFlag = ~0; +} + +void AC3AudioStreamParser::onSavedFrameClosure(void* clientData) { + AC3AudioStreamParser* parser = (AC3AudioStreamParser*)clientData; + parser->onSavedFrameClosure1(); +} + +void AC3AudioStreamParser::onSavedFrameClosure1() { + delete[] fSavedFrame; fSavedFrame = NULL; + fSavedFrameSize = 0; + fSavedFrameFlag = ~0; +} diff --git a/AnyCore/lib_rtsp/liveMedia/ADTSAudioFileServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/ADTSAudioFileServerMediaSubsession.cpp new file mode 100644 index 0000000..a3a1bd2 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/ADTSAudioFileServerMediaSubsession.cpp @@ -0,0 +1,60 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from an AAC audio file in ADTS format +// Implementation + +#include "ADTSAudioFileServerMediaSubsession.hh" +#include "ADTSAudioFileSource.hh" +#include "MPEG4GenericRTPSink.hh" + +ADTSAudioFileServerMediaSubsession* +ADTSAudioFileServerMediaSubsession::createNew(UsageEnvironment& env, + char const* fileName, + Boolean reuseFirstSource) { + return new ADTSAudioFileServerMediaSubsession(env, fileName, reuseFirstSource); +} + +ADTSAudioFileServerMediaSubsession +::ADTSAudioFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, Boolean reuseFirstSource) + : FileServerMediaSubsession(env, fileName, reuseFirstSource) { +} + +ADTSAudioFileServerMediaSubsession +::~ADTSAudioFileServerMediaSubsession() { +} + +FramedSource* ADTSAudioFileServerMediaSubsession +::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) { + estBitrate = 96; // kbps, estimate + + return ADTSAudioFileSource::createNew(envir(), fFileName); +} + +RTPSink* ADTSAudioFileServerMediaSubsession +::createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource) { + ADTSAudioFileSource* adtsSource = (ADTSAudioFileSource*)inputSource; + return MPEG4GenericRTPSink::createNew(envir(), rtpGroupsock, + rtpPayloadTypeIfDynamic, + adtsSource->samplingFrequency(), + "audio", "AAC-hbr", adtsSource->configStr(), + adtsSource->numChannels()); +} diff --git a/AnyCore/lib_rtsp/liveMedia/ADTSAudioFileSource.cpp b/AnyCore/lib_rtsp/liveMedia/ADTSAudioFileSource.cpp new file mode 100644 index 0000000..cf191a6 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/ADTSAudioFileSource.cpp @@ -0,0 +1,171 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A source object for AAC audio files in ADTS format +// Implementation + +#include "ADTSAudioFileSource.hh" +#include "InputFile.hh" +#include + +////////// ADTSAudioFileSource ////////// + +static unsigned const samplingFrequencyTable[16] = { + 96000, 88200, 64000, 48000, + 44100, 32000, 24000, 22050, + 16000, 12000, 11025, 8000, + 7350, 0, 0, 0 +}; + +ADTSAudioFileSource* +ADTSAudioFileSource::createNew(UsageEnvironment& env, char const* fileName) { + FILE* fid = NULL; + do { + fid = OpenInputFile(env, fileName); + if (fid == NULL) break; + + // Now, having opened the input file, read the fixed header of the first frame, + // to get the audio stream's parameters: + unsigned char fixedHeader[4]; // it's actually 3.5 bytes long + if (fread(fixedHeader, 1, sizeof fixedHeader, fid) < sizeof fixedHeader) break; + + // Check the 'syncword': + if (!(fixedHeader[0] == 0xFF && (fixedHeader[1]&0xF0) == 0xF0)) { + env.setResultMsg("Bad 'syncword' at start of ADTS file"); + break; + } + + // Get and check the 'profile': + u_int8_t profile = (fixedHeader[2]&0xC0)>>6; // 2 bits + if (profile == 3) { + env.setResultMsg("Bad (reserved) 'profile': 3 in first frame of ADTS file"); + break; + } + + // Get and check the 'sampling_frequency_index': + u_int8_t sampling_frequency_index = (fixedHeader[2]&0x3C)>>2; // 4 bits + if (samplingFrequencyTable[sampling_frequency_index] == 0) { + env.setResultMsg("Bad 'sampling_frequency_index' in first frame of ADTS file"); + break; + } + + // Get and check the 'channel_configuration': + u_int8_t channel_configuration + = ((fixedHeader[2]&0x01)<<2)|((fixedHeader[3]&0xC0)>>6); // 3 bits + + // If we get here, the frame header was OK. + // Reset the fid to the beginning of the file: +#ifndef _WIN32_WCE + rewind(fid); +#else + SeekFile64(fid, SEEK_SET,0); +#endif +#ifdef DEBUG + fprintf(stderr, "Read first frame: profile %d, " + "sampling_frequency_index %d => samplingFrequency %d, " + "channel_configuration %d\n", + profile, + sampling_frequency_index, samplingFrequencyTable[sampling_frequency_index], + channel_configuration); +#endif + return new ADTSAudioFileSource(env, fid, profile, + sampling_frequency_index, channel_configuration); + } while (0); + + // An error occurred: + CloseInputFile(fid); + return NULL; +} + +ADTSAudioFileSource +::ADTSAudioFileSource(UsageEnvironment& env, FILE* fid, u_int8_t profile, + u_int8_t samplingFrequencyIndex, u_int8_t channelConfiguration) + : FramedFileSource(env, fid) { + fSamplingFrequency = samplingFrequencyTable[samplingFrequencyIndex]; + fNumChannels = channelConfiguration == 0 ? 2 : channelConfiguration; + fuSecsPerFrame + = (1024/*samples-per-frame*/*1000000) / fSamplingFrequency/*samples-per-second*/; + + // Construct the 'AudioSpecificConfig', and from it, the corresponding ASCII string: + unsigned char audioSpecificConfig[2]; + u_int8_t const audioObjectType = profile + 1; + audioSpecificConfig[0] = (audioObjectType<<3) | (samplingFrequencyIndex>>1); + audioSpecificConfig[1] = (samplingFrequencyIndex<<7) | (channelConfiguration<<3); + sprintf(fConfigStr, "%02X%02x", audioSpecificConfig[0], audioSpecificConfig[1]); +} + +ADTSAudioFileSource::~ADTSAudioFileSource() { + CloseInputFile(fFid); +} + +// Note: We should change the following to use asynchronous file reading, ##### +// as we now do with ByteStreamFileSource. ##### +void ADTSAudioFileSource::doGetNextFrame() { + // Begin by reading the 7-byte fixed_variable headers: + unsigned char headers[7]; + if (fread(headers, 1, sizeof headers, fFid) < sizeof headers + || feof(fFid) || ferror(fFid)) { + // The input source has ended: + handleClosure(); + return; + } + + // Extract important fields from the headers: + Boolean protection_absent = headers[1]&0x01; + u_int16_t frame_length + = ((headers[3]&0x03)<<11) | (headers[4]<<3) | ((headers[5]&0xE0)>>5); +#ifdef DEBUG + u_int16_t syncword = (headers[0]<<4) | (headers[1]>>4); + fprintf(stderr, "Read frame: syncword 0x%x, protection_absent %d, frame_length %d\n", syncword, protection_absent, frame_length); + if (syncword != 0xFFF) fprintf(stderr, "WARNING: Bad syncword!\n"); +#endif + unsigned numBytesToRead + = frame_length > sizeof headers ? frame_length - sizeof headers : 0; + + // If there's a 'crc_check' field, skip it: + if (!protection_absent) { + SeekFile64(fFid, 2, SEEK_CUR); + numBytesToRead = numBytesToRead > 2 ? numBytesToRead - 2 : 0; + } + + // Next, read the raw frame data into the buffer provided: + if (numBytesToRead > fMaxSize) { + fNumTruncatedBytes = numBytesToRead - fMaxSize; + numBytesToRead = fMaxSize; + } + int numBytesRead = fread(fTo, 1, numBytesToRead, fFid); + if (numBytesRead < 0) numBytesRead = 0; + fFrameSize = numBytesRead; + fNumTruncatedBytes += numBytesToRead - numBytesRead; + + // Set the 'presentation time': + if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) { + // This is the first frame, so use the current time: + gettimeofday(&fPresentationTime, NULL); + } else { + // Increment by the play time of the previous frame: + unsigned uSeconds = fPresentationTime.tv_usec + fuSecsPerFrame; + fPresentationTime.tv_sec += uSeconds/1000000; + fPresentationTime.tv_usec = uSeconds%1000000; + } + + fDurationInMicroseconds = fuSecsPerFrame; + + // Switch to another task, and inform the reader that he has data: + nextTask() = envir().taskScheduler().scheduleDelayedTask(0, + (TaskFunc*)FramedSource::afterGetting, this); +} diff --git a/AnyCore/lib_rtsp/liveMedia/AMRAudioFileServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/AMRAudioFileServerMediaSubsession.cpp new file mode 100644 index 0000000..4572b96 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/AMRAudioFileServerMediaSubsession.cpp @@ -0,0 +1,59 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from an AMR audio file. +// Implementation + +#include "AMRAudioFileServerMediaSubsession.hh" +#include "AMRAudioRTPSink.hh" +#include "AMRAudioFileSource.hh" + +AMRAudioFileServerMediaSubsession* +AMRAudioFileServerMediaSubsession::createNew(UsageEnvironment& env, + char const* fileName, + Boolean reuseFirstSource) { + return new AMRAudioFileServerMediaSubsession(env, fileName, reuseFirstSource); +} + +AMRAudioFileServerMediaSubsession +::AMRAudioFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, Boolean reuseFirstSource) + : FileServerMediaSubsession(env, fileName, reuseFirstSource) { +} + +AMRAudioFileServerMediaSubsession +::~AMRAudioFileServerMediaSubsession() { +} + +FramedSource* AMRAudioFileServerMediaSubsession +::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) { + estBitrate = 10; // kbps, estimate + + return AMRAudioFileSource::createNew(envir(), fFileName); +} + +RTPSink* AMRAudioFileServerMediaSubsession +::createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource) { + AMRAudioFileSource* amrSource = (AMRAudioFileSource*)inputSource; + return AMRAudioRTPSink::createNew(envir(), rtpGroupsock, + rtpPayloadTypeIfDynamic, + amrSource->isWideband(), + amrSource->numChannels()); +} diff --git a/AnyCore/lib_rtsp/liveMedia/AMRAudioFileSink.cpp b/AnyCore/lib_rtsp/liveMedia/AMRAudioFileSink.cpp new file mode 100644 index 0000000..4a68569 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/AMRAudioFileSink.cpp @@ -0,0 +1,101 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// AMR Audio File sinks +// Implementation + +#include "AMRAudioFileSink.hh" +#include "AMRAudioSource.hh" +#include "OutputFile.hh" + +////////// AMRAudioFileSink ////////// + +AMRAudioFileSink +::AMRAudioFileSink(UsageEnvironment& env, FILE* fid, unsigned bufferSize, + char const* perFrameFileNamePrefix) + : FileSink(env, fid, bufferSize, perFrameFileNamePrefix), + fHaveWrittenHeader(False) { +} + +AMRAudioFileSink::~AMRAudioFileSink() { +} + +AMRAudioFileSink* +AMRAudioFileSink::createNew(UsageEnvironment& env, char const* fileName, + unsigned bufferSize, Boolean oneFilePerFrame) { + do { + FILE* fid; + char const* perFrameFileNamePrefix; + if (oneFilePerFrame) { + // Create the fid for each frame + fid = NULL; + perFrameFileNamePrefix = fileName; + } else { + // Normal case: create the fid once + fid = OpenOutputFile(env, fileName); + if (fid == NULL) break; + perFrameFileNamePrefix = NULL; + } + + return new AMRAudioFileSink(env, fid, bufferSize, perFrameFileNamePrefix); + } while (0); + + return NULL; +} + +Boolean AMRAudioFileSink::sourceIsCompatibleWithUs(MediaSource& source) { + // The input source must be a AMR Audio source: + return source.isAMRAudioSource(); +} + +void AMRAudioFileSink::afterGettingFrame(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime) { + AMRAudioSource* source = (AMRAudioSource*)fSource; + if (source == NULL) return; // sanity check + + if (!fHaveWrittenHeader && fPerFrameFileNameBuffer == NULL) { + // Output the appropriate AMR header to the start of the file. + // This header is defined in RFC 4867, section 5. + // (However, we don't do this if we're creating one file per frame.) + char headerBuffer[100]; + sprintf(headerBuffer, "#!AMR%s%s\n", + source->isWideband() ? "-WB" : "", + source->numChannels() > 1 ? "_MC1.0" : ""); + unsigned headerLength = strlen(headerBuffer); + if (source->numChannels() > 1) { + // Also add a 32-bit channel description field: + headerBuffer[headerLength++] = 0; + headerBuffer[headerLength++] = 0; + headerBuffer[headerLength++] = 0; + headerBuffer[headerLength++] = source->numChannels(); + } + + addData((unsigned char*)headerBuffer, headerLength, presentationTime); + } + fHaveWrittenHeader = True; + + // Add the 1-byte header, before writing the file data proper: + // (Again, we don't do this if we're creating one file per frame.) + if (fPerFrameFileNameBuffer == NULL) { + u_int8_t frameHeader = source->lastFrameHeader(); + addData(&frameHeader, 1, presentationTime); + } + + // Call the parent class to complete the normal file write with the input data: + FileSink::afterGettingFrame(frameSize, numTruncatedBytes, presentationTime); +} diff --git a/AnyCore/lib_rtsp/liveMedia/AMRAudioFileSource.cpp b/AnyCore/lib_rtsp/liveMedia/AMRAudioFileSource.cpp new file mode 100644 index 0000000..067138a --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/AMRAudioFileSource.cpp @@ -0,0 +1,174 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A source object for AMR audio files (as defined in RFC 4867, section 5) +// Implementation + +#include "AMRAudioFileSource.hh" +#include "InputFile.hh" +#include "GroupsockHelper.hh" + +////////// AMRAudioFileSource ////////// + +AMRAudioFileSource* +AMRAudioFileSource::createNew(UsageEnvironment& env, char const* fileName) { + FILE* fid = NULL; + Boolean magicNumberOK = True; + do { + + fid = OpenInputFile(env, fileName); + if (fid == NULL) break; + + // Now, having opened the input file, read the first few bytes, to + // check the required 'magic number': + magicNumberOK = False; // until we learn otherwise + Boolean isWideband = False; // by default + unsigned numChannels = 1; // by default + char buf[100]; + // Start with the first 6 bytes (the first 5 of which must be "#!AMR"): + if (fread(buf, 1, 6, fid) < 6) break; + if (strncmp(buf, "#!AMR", 5) != 0) break; // bad magic # + unsigned bytesRead = 6; + + // The next bytes must be "\n", "-WB\n", "_MC1.0\n", or "-WB_MC1.0\n" + if (buf[5] == '-') { + // The next bytes must be "WB\n" or "WB_MC1.0\n" + if (fread(&buf[bytesRead], 1, 3, fid) < 3) break; + if (strncmp(&buf[bytesRead], "WB", 2) != 0) break; // bad magic # + isWideband = True; + bytesRead += 3; + } + if (buf[bytesRead-1] == '_') { + // The next bytes must be "MC1.0\n" + if (fread(&buf[bytesRead], 1, 6, fid) < 6) break; + if (strncmp(&buf[bytesRead], "MC1.0\n", 6) != 0) break; // bad magic # + bytesRead += 6; + + // The next 4 bytes contain the number of channels: + char channelDesc[4]; + if (fread(channelDesc, 1, 4, fid) < 4) break; + numChannels = channelDesc[3]&0xF; + } else if (buf[bytesRead-1] != '\n') { + break; // bad magic # + } + + // If we get here, the magic number was OK: + magicNumberOK = True; + +#ifdef DEBUG + fprintf(stderr, "isWideband: %d, numChannels: %d\n", + isWideband, numChannels); +#endif + return new AMRAudioFileSource(env, fid, isWideband, numChannels); + } while (0); + + // An error occurred: + CloseInputFile(fid); + if (!magicNumberOK) { + env.setResultMsg("Bad (or nonexistent) AMR file header"); + } + return NULL; +} + +AMRAudioFileSource +::AMRAudioFileSource(UsageEnvironment& env, FILE* fid, + Boolean isWideband, unsigned numChannels) + : AMRAudioSource(env, isWideband, numChannels), + fFid(fid) { +} + +AMRAudioFileSource::~AMRAudioFileSource() { + CloseInputFile(fFid); +} + +// The mapping from the "FT" field to frame size. +// Values of 65535 are invalid. +#define FT_INVALID 65535 +static unsigned short const frameSize[16] = { + 12, 13, 15, 17, + 19, 20, 26, 31, + 5, FT_INVALID, FT_INVALID, FT_INVALID, + FT_INVALID, FT_INVALID, FT_INVALID, 0 +}; +static unsigned short const frameSizeWideband[16] = { + 17, 23, 32, 36, + 40, 46, 50, 58, + 60, 5, FT_INVALID, FT_INVALID, + FT_INVALID, FT_INVALID, 0, 0 +}; + +// Note: We should change the following to use asynchronous file reading, ##### +// as we now do with ByteStreamFileSource. ##### +void AMRAudioFileSource::doGetNextFrame() { + if (feof(fFid) || ferror(fFid)) { + handleClosure(); + return; + } + + // Begin by reading the 1-byte frame header (and checking it for validity) + while (1) { + if (fread(&fLastFrameHeader, 1, 1, fFid) < 1) { + handleClosure(); + return; + } + if ((fLastFrameHeader&0x83) != 0) { +#ifdef DEBUG + fprintf(stderr, "Invalid frame header 0x%02x (padding bits (0x83) are not zero)\n", fLastFrameHeader); +#endif + } else { + unsigned char ft = (fLastFrameHeader&0x78)>>3; + fFrameSize = fIsWideband ? frameSizeWideband[ft] : frameSize[ft]; + if (fFrameSize == FT_INVALID) { +#ifdef DEBUG + fprintf(stderr, "Invalid FT field %d (from frame header 0x%02x)\n", + ft, fLastFrameHeader); +#endif + } else { + // The frame header is OK +#ifdef DEBUG + fprintf(stderr, "Valid frame header 0x%02x -> ft %d -> frame size %d\n", fLastFrameHeader, ft, fFrameSize); +#endif + break; + } + } + } + + // Next, read the frame-block into the buffer provided: + fFrameSize *= fNumChannels; // because multiple channels make up a frame-block + if (fFrameSize > fMaxSize) { + fNumTruncatedBytes = fFrameSize - fMaxSize; + fFrameSize = fMaxSize; + } + fFrameSize = fread(fTo, 1, fFrameSize, fFid); + + // Set the 'presentation time': + if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) { + // This is the first frame, so use the current time: + gettimeofday(&fPresentationTime, NULL); + } else { + // Increment by the play time of the previous frame (20 ms) + unsigned uSeconds = fPresentationTime.tv_usec + 20000; + fPresentationTime.tv_sec += uSeconds/1000000; + fPresentationTime.tv_usec = uSeconds%1000000; + } + + fDurationInMicroseconds = 20000; // each frame is 20 ms + + // Switch to another task, and inform the reader that he has data: + nextTask() = envir().taskScheduler().scheduleDelayedTask(0, + (TaskFunc*)FramedSource::afterGetting, this); + } diff --git a/AnyCore/lib_rtsp/liveMedia/AMRAudioRTPSink.cpp b/AnyCore/lib_rtsp/liveMedia/AMRAudioRTPSink.cpp new file mode 100644 index 0000000..acabe93 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/AMRAudioRTPSink.cpp @@ -0,0 +1,134 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for AMR audio (RFC 4867) +// Implementation + +// NOTE: At present, this is just a limited implementation, supporting: +// octet-alignment only; no interleaving; no frame CRC; no robust-sorting. + +#include "AMRAudioRTPSink.hh" +#include "AMRAudioSource.hh" + +AMRAudioRTPSink* +AMRAudioRTPSink::createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + Boolean sourceIsWideband, + unsigned numChannelsInSource) { + return new AMRAudioRTPSink(env, RTPgs, rtpPayloadFormat, + sourceIsWideband, numChannelsInSource); +} + +AMRAudioRTPSink +::AMRAudioRTPSink(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + Boolean sourceIsWideband, unsigned numChannelsInSource) + : AudioRTPSink(env, RTPgs, rtpPayloadFormat, + sourceIsWideband ? 16000 : 8000, + sourceIsWideband ? "AMR-WB": "AMR", + numChannelsInSource), + fSourceIsWideband(sourceIsWideband), fFmtpSDPLine(NULL) { +} + +AMRAudioRTPSink::~AMRAudioRTPSink() { + delete[] fFmtpSDPLine; +} + +Boolean AMRAudioRTPSink::sourceIsCompatibleWithUs(MediaSource& source) { + // Our source must be an AMR audio source: + if (!source.isAMRAudioSource()) return False; + + // Also, the source must be wideband iff we asked for this: + AMRAudioSource& amrSource = (AMRAudioSource&)source; + if ((amrSource.isWideband()^fSourceIsWideband) != 0) return False; + + // Also, the source must have the same number of channels that we + // specified. (It could, in principle, have more, but we don't + // support that.) + if (amrSource.numChannels() != numChannels()) return False; + + // Also, because in our current implementation we output only one + // frame in each RTP packet, this means that for multi-channel audio, + // each 'frame-block' will be split over multiple RTP packets, which + // may violate the spec. Warn about this: + if (amrSource.numChannels() > 1) { + envir() << "AMRAudioRTPSink: Warning: Input source has " << amrSource.numChannels() + << " audio channels. In the current implementation, the multi-frame frame-block will be split over multiple RTP packets\n"; + } + + return True; +} + +void AMRAudioRTPSink::doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes) { + // If this is the 1st frame in the 1st packet, set the RTP 'M' (marker) + // bit (because this is considered the start of a talk spurt): + if (isFirstPacket() && isFirstFrameInPacket()) { + setMarkerBit(); + } + + // If this is the first frame in the packet, set the 1-byte payload + // header (using CMR 15) + if (isFirstFrameInPacket()) { + u_int8_t payloadHeader = 0xF0; + setSpecialHeaderBytes(&payloadHeader, 1, 0); + } + + // Set the TOC field for the current frame, based on the "FT" and "Q" + // values from our source: + AMRAudioSource* amrSource = (AMRAudioSource*)fSource; + if (amrSource == NULL) return; // sanity check + + u_int8_t toc = amrSource->lastFrameHeader(); + // Clear the "F" bit, because we're the last frame in this packet: ##### + toc &=~ 0x80; + setSpecialHeaderBytes(&toc, 1, 1+numFramesUsedSoFar()); + + // Important: Also call our base class's doSpecialFrameHandling(), + // to set the packet's timestamp: + MultiFramedRTPSink::doSpecialFrameHandling(fragmentationOffset, + frameStart, numBytesInFrame, + framePresentationTime, + numRemainingBytes); +} + +Boolean AMRAudioRTPSink +::frameCanAppearAfterPacketStart(unsigned char const* /*frameStart*/, + unsigned /*numBytesInFrame*/) const { + // For now, pack only one AMR frame into each outgoing RTP packet: ##### + return False; +} + +unsigned AMRAudioRTPSink::specialHeaderSize() const { + // For now, because we're packing only one frame per packet, + // there's just a 1-byte payload header, plus a 1-byte TOC ##### + return 2; +} + +char const* AMRAudioRTPSink::auxSDPLine() { + if (fFmtpSDPLine == NULL) { + // Generate a "a=fmtp:" line with "octet-aligned=1" + // (That is the only non-default parameter.) + char buf[100]; + sprintf(buf, "a=fmtp:%d octet-align=1\r\n", rtpPayloadType()); + delete[] fFmtpSDPLine; fFmtpSDPLine = strDup(buf); + } + return fFmtpSDPLine; +} diff --git a/AnyCore/lib_rtsp/liveMedia/AMRAudioRTPSource.cpp b/AnyCore/lib_rtsp/liveMedia/AMRAudioRTPSource.cpp new file mode 100644 index 0000000..8bbd0a3 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/AMRAudioRTPSource.cpp @@ -0,0 +1,747 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// AMR Audio RTP Sources (RFC 4867) +// Implementation + +#include "AMRAudioRTPSource.hh" +#include "MultiFramedRTPSource.hh" +#include "BitVector.hh" +#include +#include + +// This source is implemented internally by two separate sources: +// (i) a RTP source for the raw (and possibly interleaved) AMR frames, and +// (ii) a deinterleaving filter that reads from this. +// Define these two new classes here: + +class RawAMRRTPSource: public MultiFramedRTPSource { +public: + static RawAMRRTPSource* + createNew(UsageEnvironment& env, + Groupsock* RTPgs, unsigned char rtpPayloadFormat, + Boolean isWideband, Boolean isOctetAligned, + Boolean isInterleaved, Boolean CRCsArePresent); + + Boolean isWideband() const { return fIsWideband; } + unsigned char ILL() const { return fILL; } + unsigned char ILP() const { return fILP; } + unsigned TOCSize() const { return fTOCSize; } // total # of frames in the last pkt + unsigned char* TOC() const { return fTOC; } // FT+Q value for each TOC entry + unsigned& frameIndex() { return fFrameIndex; } // index of frame-block within pkt + Boolean& isSynchronized() { return fIsSynchronized; } + +private: + RawAMRRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + Boolean isWideband, Boolean isOctetAligned, + Boolean isInterleaved, Boolean CRCsArePresent); + // called only by createNew() + + virtual ~RawAMRRTPSource(); + +private: + // redefined virtual functions: + virtual Boolean hasBeenSynchronizedUsingRTCP(); + + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual char const* MIMEtype() const; + +private: + Boolean fIsWideband, fIsOctetAligned, fIsInterleaved, fCRCsArePresent; + unsigned char fILL, fILP; + unsigned fTOCSize; + unsigned char* fTOC; + unsigned fFrameIndex; + Boolean fIsSynchronized; +}; + +class AMRDeinterleaver: public AMRAudioSource { +public: + static AMRDeinterleaver* + createNew(UsageEnvironment& env, + Boolean isWideband, unsigned numChannels, unsigned maxInterleaveGroupSize, + RawAMRRTPSource* inputSource); + +private: + AMRDeinterleaver(UsageEnvironment& env, + Boolean isWideband, unsigned numChannels, + unsigned maxInterleaveGroupSize, RawAMRRTPSource* inputSource); + // called only by "createNew()" + + virtual ~AMRDeinterleaver(); + + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize, struct timeval presentationTime); + +private: + // Redefined virtual functions: + void doGetNextFrame(); + virtual void doStopGettingFrames(); + +private: + RawAMRRTPSource* fInputSource; + class AMRDeinterleavingBuffer* fDeinterleavingBuffer; + Boolean fNeedAFrame; +}; + + +////////// AMRAudioRTPSource implementation ////////// + +#define MAX_NUM_CHANNELS 20 // far larger than ever expected... +#define MAX_INTERLEAVING_GROUP_SIZE 1000 // far larger than ever expected... + +AMRAudioSource* +AMRAudioRTPSource::createNew(UsageEnvironment& env, + Groupsock* RTPgs, + RTPSource*& resultRTPSource, + unsigned char rtpPayloadFormat, + Boolean isWideband, + unsigned numChannels, + Boolean isOctetAligned, + unsigned interleaving, + Boolean robustSortingOrder, + Boolean CRCsArePresent) { + // Perform sanity checks on the input parameters: + if (robustSortingOrder) { + env << "AMRAudioRTPSource::createNew(): 'Robust sorting order' was specified, but we don't yet support this!\n"; + return NULL; + } else if (numChannels > MAX_NUM_CHANNELS) { + env << "AMRAudioRTPSource::createNew(): The \"number of channels\" parameter (" + << numChannels << ") is much too large!\n"; + return NULL; + } else if (interleaving > MAX_INTERLEAVING_GROUP_SIZE) { + env << "AMRAudioRTPSource::createNew(): The \"interleaving\" parameter (" + << interleaving << ") is much too large!\n"; + return NULL; + } + + // 'Bandwidth-efficient mode' precludes some other options: + if (!isOctetAligned) { + if (interleaving > 0 || robustSortingOrder || CRCsArePresent) { + env << "AMRAudioRTPSource::createNew(): 'Bandwidth-efficient mode' was specified, along with interleaving, 'robust sorting order', and/or CRCs, so we assume 'octet-aligned mode' instead.\n"; + isOctetAligned = True; + } + } + + Boolean isInterleaved; + unsigned maxInterleaveGroupSize; // in frames (not frame-blocks) + if (interleaving > 0) { + isInterleaved = True; + maxInterleaveGroupSize = interleaving*numChannels; + } else { + isInterleaved = False; + maxInterleaveGroupSize = numChannels; + } + + RawAMRRTPSource* rawRTPSource; + resultRTPSource = rawRTPSource + = RawAMRRTPSource::createNew(env, RTPgs, rtpPayloadFormat, + isWideband, isOctetAligned, + isInterleaved, CRCsArePresent); + if (resultRTPSource == NULL) return NULL; + + AMRDeinterleaver* deinterleaver + = AMRDeinterleaver::createNew(env, isWideband, numChannels, + maxInterleaveGroupSize, rawRTPSource); + if (deinterleaver == NULL) { + Medium::close(resultRTPSource); + resultRTPSource = NULL; + } + + return deinterleaver; +} + + +////////// AMRBufferedPacket and AMRBufferedPacketFactory ////////// + +// A subclass of BufferedPacket, used to separate out AMR frames. + +class AMRBufferedPacket: public BufferedPacket { +public: + AMRBufferedPacket(RawAMRRTPSource& ourSource); + virtual ~AMRBufferedPacket(); + +private: // redefined virtual functions + virtual unsigned nextEnclosedFrameSize(unsigned char*& framePtr, + unsigned dataSize); +private: + RawAMRRTPSource& fOurSource; +}; + +class AMRBufferedPacketFactory: public BufferedPacketFactory { +private: // redefined virtual functions + virtual BufferedPacket* createNewPacket(MultiFramedRTPSource* ourSource); +}; + + +///////// RawAMRRTPSource implementation //////// + +RawAMRRTPSource* +RawAMRRTPSource::createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + Boolean isWideband, Boolean isOctetAligned, + Boolean isInterleaved, Boolean CRCsArePresent) { + return new RawAMRRTPSource(env, RTPgs, rtpPayloadFormat, + isWideband, isOctetAligned, + isInterleaved, CRCsArePresent); +} + +RawAMRRTPSource +::RawAMRRTPSource(UsageEnvironment& env, + Groupsock* RTPgs, unsigned char rtpPayloadFormat, + Boolean isWideband, Boolean isOctetAligned, + Boolean isInterleaved, Boolean CRCsArePresent) + : MultiFramedRTPSource(env, RTPgs, rtpPayloadFormat, + isWideband ? 16000 : 8000, + new AMRBufferedPacketFactory), + fIsWideband(isWideband), fIsOctetAligned(isOctetAligned), + fIsInterleaved(isInterleaved), fCRCsArePresent(CRCsArePresent), + fILL(0), fILP(0), fTOCSize(0), fTOC(NULL), fFrameIndex(0), fIsSynchronized(False) { +} + +RawAMRRTPSource::~RawAMRRTPSource() { + delete[] fTOC; +} + +#define FT_SPEECH_LOST 14 +#define FT_NO_DATA 15 + +static void unpackBandwidthEfficientData(BufferedPacket* packet, + Boolean isWideband); // forward + +Boolean RawAMRRTPSource +::processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize) { + // If the data is 'bandwidth-efficient', first unpack it so that it's + // 'octet-aligned': + if (!fIsOctetAligned) unpackBandwidthEfficientData(packet, fIsWideband); + + unsigned char* headerStart = packet->data(); + unsigned packetSize = packet->dataSize(); + + // There's at least a 1-byte header, containing the CMR: + if (packetSize < 1) return False; + resultSpecialHeaderSize = 1; + + if (fIsInterleaved) { + // There's an extra byte, containing the interleave parameters: + if (packetSize < 2) return False; + + // Get the interleaving parameters, and check them for validity: + unsigned char const secondByte = headerStart[1]; + fILL = (secondByte&0xF0)>>4; + fILP = secondByte&0x0F; + if (fILP > fILL) return False; // invalid + ++resultSpecialHeaderSize; + } +#ifdef DEBUG + fprintf(stderr, "packetSize: %d, ILL: %d, ILP: %d\n", packetSize, fILL, fILP); +#endif + fFrameIndex = 0; // initially + + // Next, there's a "Payload Table of Contents" (one byte per entry): + unsigned numFramesPresent = 0, numNonEmptyFramesPresent = 0; + unsigned tocStartIndex = resultSpecialHeaderSize; + Boolean F; + do { + if (resultSpecialHeaderSize >= packetSize) return False; + unsigned char const tocByte = headerStart[resultSpecialHeaderSize++]; + F = (tocByte&0x80) != 0; + unsigned char const FT = (tocByte&0x78) >> 3; +#ifdef DEBUG + unsigned char Q = (tocByte&0x04)>>2; + fprintf(stderr, "\tTOC entry: F %d, FT %d, Q %d\n", F, FT, Q); +#endif + ++numFramesPresent; + if (FT != FT_SPEECH_LOST && FT != FT_NO_DATA) ++numNonEmptyFramesPresent; + } while (F); +#ifdef DEBUG + fprintf(stderr, "TOC contains %d entries (%d non-empty)\n", numFramesPresent, numNonEmptyFramesPresent); +#endif + + // Now that we know the size of the TOC, fill in our copy: + if (numFramesPresent > fTOCSize) { + delete[] fTOC; + fTOC = new unsigned char[numFramesPresent]; + } + fTOCSize = numFramesPresent; + for (unsigned i = 0; i < fTOCSize; ++i) { + unsigned char const tocByte = headerStart[tocStartIndex + i]; + fTOC[i] = tocByte&0x7C; // clear everything except the F and Q fields + } + + if (fCRCsArePresent) { + // 'numNonEmptyFramesPresent' CRC bytes will follow. + // Note: we currently don't check the CRCs for validity ##### + resultSpecialHeaderSize += numNonEmptyFramesPresent; +#ifdef DEBUG + fprintf(stderr, "Ignoring %d following CRC bytes\n", numNonEmptyFramesPresent); +#endif + if (resultSpecialHeaderSize > packetSize) return False; + } +#ifdef DEBUG + fprintf(stderr, "Total special header size: %d\n", resultSpecialHeaderSize); +#endif + + return True; +} + +char const* RawAMRRTPSource::MIMEtype() const { + return fIsWideband ? "audio/AMR-WB" : "audio/AMR"; +} + +Boolean RawAMRRTPSource::hasBeenSynchronizedUsingRTCP() { + return fIsSynchronized; +} + + +///// AMRBufferedPacket and AMRBufferedPacketFactory implementation + +AMRBufferedPacket::AMRBufferedPacket(RawAMRRTPSource& ourSource) + : fOurSource(ourSource) { +} + +AMRBufferedPacket::~AMRBufferedPacket() { +} + +// The mapping from the "FT" field to frame size. +// Values of 65535 are invalid. +#define FT_INVALID 65535 +static unsigned short const frameBytesFromFT[16] = { + 12, 13, 15, 17, + 19, 20, 26, 31, + 5, FT_INVALID, FT_INVALID, FT_INVALID, + FT_INVALID, FT_INVALID, FT_INVALID, 0 +}; +static unsigned short const frameBytesFromFTWideband[16] = { + 17, 23, 32, 36, + 40, 46, 50, 58, + 60, 5, FT_INVALID, FT_INVALID, + FT_INVALID, FT_INVALID, 0, 0 +}; + +unsigned AMRBufferedPacket:: + nextEnclosedFrameSize(unsigned char*& framePtr, unsigned dataSize) { + if (dataSize == 0) return 0; // sanity check + + // The size of the AMR frame is determined by the corresponding 'FT' value + // in the packet's Table of Contents. + unsigned const tocIndex = fOurSource.frameIndex(); + if (tocIndex >= fOurSource.TOCSize()) return 0; // sanity check + + unsigned char const tocByte = fOurSource.TOC()[tocIndex]; + unsigned char const FT = (tocByte&0x78) >> 3; + // ASSERT: FT < 16 + unsigned short frameSize + = fOurSource.isWideband() ? frameBytesFromFTWideband[FT] : frameBytesFromFT[FT]; + if (frameSize == FT_INVALID) { + // Strange TOC entry! + fOurSource.envir() << "AMRBufferedPacket::nextEnclosedFrameSize(): invalid FT: " << FT << "\n"; + frameSize = 0; // This probably messes up the rest of this packet, but... + } +#ifdef DEBUG + fprintf(stderr, "AMRBufferedPacket::nextEnclosedFrameSize(): frame #: %d, FT: %d, isWideband: %d => frameSize: %d (dataSize: %d)\n", tocIndex, FT, fOurSource.isWideband(), frameSize, dataSize); +#endif + ++fOurSource.frameIndex(); + + if (dataSize < frameSize) return 0; + return frameSize; +} + +BufferedPacket* AMRBufferedPacketFactory +::createNewPacket(MultiFramedRTPSource* ourSource) { + return new AMRBufferedPacket((RawAMRRTPSource&)(*ourSource)); +} + +///////// AMRDeinterleavingBuffer ///////// +// (used to implement AMRDeinterleaver) + +#define AMR_MAX_FRAME_SIZE 60 + +class AMRDeinterleavingBuffer { +public: + AMRDeinterleavingBuffer(unsigned numChannels, unsigned maxInterleaveGroupSize); + virtual ~AMRDeinterleavingBuffer(); + + void deliverIncomingFrame(unsigned frameSize, RawAMRRTPSource* source, + struct timeval presentationTime); + Boolean retrieveFrame(unsigned char* to, unsigned maxSize, + unsigned& resultFrameSize, unsigned& resultNumTruncatedBytes, + u_int8_t& resultFrameHeader, + struct timeval& resultPresentationTime, + Boolean& resultIsSynchronized); + + unsigned char* inputBuffer() { return fInputBuffer; } + unsigned inputBufferSize() const { return AMR_MAX_FRAME_SIZE; } + +private: + unsigned char* createNewBuffer(); + + class FrameDescriptor { + public: + FrameDescriptor(); + virtual ~FrameDescriptor(); + + unsigned frameSize; + unsigned char* frameData; + u_int8_t frameHeader; + struct timeval presentationTime; + Boolean fIsSynchronized; + }; + + unsigned fNumChannels, fMaxInterleaveGroupSize; + FrameDescriptor* fFrames[2]; + unsigned char fIncomingBankId; // toggles between 0 and 1 + unsigned char fIncomingBinMax; // in the incoming bank + unsigned char fOutgoingBinMax; // in the outgoing bank + unsigned char fNextOutgoingBin; + Boolean fHaveSeenPackets; + u_int16_t fLastPacketSeqNumForGroup; + unsigned char* fInputBuffer; + struct timeval fLastRetrievedPresentationTime; + unsigned fNumSuccessiveSyncedFrames; + unsigned char fILL; +}; + + +////////// AMRDeinterleaver implementation ///////// + +AMRDeinterleaver* AMRDeinterleaver +::createNew(UsageEnvironment& env, + Boolean isWideband, unsigned numChannels, unsigned maxInterleaveGroupSize, + RawAMRRTPSource* inputSource) { + return new AMRDeinterleaver(env, isWideband, numChannels, maxInterleaveGroupSize, inputSource); +} + +AMRDeinterleaver::AMRDeinterleaver(UsageEnvironment& env, + Boolean isWideband, unsigned numChannels, + unsigned maxInterleaveGroupSize, + RawAMRRTPSource* inputSource) + : AMRAudioSource(env, isWideband, numChannels), + fInputSource(inputSource), fNeedAFrame(False) { + fDeinterleavingBuffer + = new AMRDeinterleavingBuffer(numChannels, maxInterleaveGroupSize); +} + +AMRDeinterleaver::~AMRDeinterleaver() { + delete fDeinterleavingBuffer; + Medium::close(fInputSource); +} + +static unsigned const uSecsPerFrame = 20000; // 20 ms + +void AMRDeinterleaver::doGetNextFrame() { + // First, try getting a frame from the deinterleaving buffer: + if (fDeinterleavingBuffer->retrieveFrame(fTo, fMaxSize, + fFrameSize, fNumTruncatedBytes, + fLastFrameHeader, fPresentationTime, + fInputSource->isSynchronized())) { + + // Success! + fNeedAFrame = False; + + fDurationInMicroseconds = uSecsPerFrame; + + // Call our own 'after getting' function. Because we're not a 'leaf' + // source, we can call this directly, without risking + // infinite recursion + afterGetting(this); + return; + } + + // No luck, so ask our source for help: + fNeedAFrame = True; + if (!fInputSource->isCurrentlyAwaitingData()) { + fInputSource->getNextFrame(fDeinterleavingBuffer->inputBuffer(), + fDeinterleavingBuffer->inputBufferSize(), + afterGettingFrame, this, + FramedSource::handleClosure, this); + } +} + +void AMRDeinterleaver::doStopGettingFrames() { + fNeedAFrame = False; + fInputSource->stopGettingFrames(); +} + +void AMRDeinterleaver +::afterGettingFrame(void* clientData, unsigned frameSize, + unsigned /*numTruncatedBytes*/, + struct timeval presentationTime, + unsigned /*durationInMicroseconds*/) { + AMRDeinterleaver* deinterleaver = (AMRDeinterleaver*)clientData; + deinterleaver->afterGettingFrame1(frameSize, presentationTime); +} + +void AMRDeinterleaver +::afterGettingFrame1(unsigned frameSize, struct timeval presentationTime) { + RawAMRRTPSource* source = (RawAMRRTPSource*)fInputSource; + + // First, put the frame into our deinterleaving buffer: + fDeinterleavingBuffer->deliverIncomingFrame(frameSize, source, presentationTime); + + // Then, try delivering a frame to the client (if he wants one): + if (fNeedAFrame) doGetNextFrame(); +} + + +////////// AMRDeinterleavingBuffer implementation ///////// + +AMRDeinterleavingBuffer +::AMRDeinterleavingBuffer(unsigned numChannels, unsigned maxInterleaveGroupSize) + : fNumChannels(numChannels), fMaxInterleaveGroupSize(maxInterleaveGroupSize), + fIncomingBankId(0), fIncomingBinMax(0), + fOutgoingBinMax(0), fNextOutgoingBin(0), + fHaveSeenPackets(False), fNumSuccessiveSyncedFrames(0), fILL(0) { + // Use two banks of descriptors - one for incoming, one for outgoing + fFrames[0] = new FrameDescriptor[fMaxInterleaveGroupSize]; + fFrames[1] = new FrameDescriptor[fMaxInterleaveGroupSize]; + fInputBuffer = createNewBuffer(); +} + +AMRDeinterleavingBuffer::~AMRDeinterleavingBuffer() { + delete[] fInputBuffer; + delete[] fFrames[0]; delete[] fFrames[1]; +} + +void AMRDeinterleavingBuffer +::deliverIncomingFrame(unsigned frameSize, RawAMRRTPSource* source, + struct timeval presentationTime) { + fILL = source->ILL(); + unsigned char const ILP = source->ILP(); + unsigned frameIndex = source->frameIndex(); + unsigned short packetSeqNum = source->curPacketRTPSeqNum(); + + // First perform a sanity check on the parameters: + // (This is overkill, as the source should have already done this.) + if (ILP > fILL || frameIndex == 0) { +#ifdef DEBUG + fprintf(stderr, "AMRDeinterleavingBuffer::deliverIncomingFrame() param sanity check failed (%d,%d,%d,%d)\n", frameSize, fILL, ILP, frameIndex); +#endif + source->envir().internalError(); + } + + --frameIndex; // because it was incremented by the source when this frame was read + u_int8_t frameHeader; + if (frameIndex >= source->TOCSize()) { // sanity check + frameHeader = FT_NO_DATA<<3; + } else { + frameHeader = source->TOC()[frameIndex]; + } + + unsigned frameBlockIndex = frameIndex/fNumChannels; + unsigned frameWithinFrameBlock = frameIndex%fNumChannels; + + // The input "presentationTime" was that of the first frame-block in this + // packet. Update it for the current frame: + unsigned uSecIncrement = frameBlockIndex*(fILL+1)*uSecsPerFrame; + presentationTime.tv_usec += uSecIncrement; + presentationTime.tv_sec += presentationTime.tv_usec/1000000; + presentationTime.tv_usec = presentationTime.tv_usec%1000000; + + // Next, check whether this packet is part of a new interleave group + if (!fHaveSeenPackets + || seqNumLT(fLastPacketSeqNumForGroup, packetSeqNum + frameBlockIndex)) { + // We've moved to a new interleave group +#ifdef DEBUG + fprintf(stderr, "AMRDeinterleavingBuffer::deliverIncomingFrame(): new interleave group\n"); +#endif + fHaveSeenPackets = True; + fLastPacketSeqNumForGroup = packetSeqNum + fILL - ILP; + + // Switch the incoming and outgoing banks: + fIncomingBankId ^= 1; + unsigned char tmp = fIncomingBinMax; + fIncomingBinMax = fOutgoingBinMax; + fOutgoingBinMax = tmp; + fNextOutgoingBin = 0; + } + + // Now move the incoming frame into the appropriate bin: + unsigned const binNumber + = ((ILP + frameBlockIndex*(fILL+1))*fNumChannels + frameWithinFrameBlock) + % fMaxInterleaveGroupSize; // the % is for sanity +#ifdef DEBUG + fprintf(stderr, "AMRDeinterleavingBuffer::deliverIncomingFrame(): frameIndex %d (%d,%d) put in bank %d, bin %d (%d): size %d, header 0x%02x, presentationTime %lu.%06ld\n", frameIndex, frameBlockIndex, frameWithinFrameBlock, fIncomingBankId, binNumber, fMaxInterleaveGroupSize, frameSize, frameHeader, presentationTime.tv_sec, presentationTime.tv_usec); +#endif + FrameDescriptor& inBin = fFrames[fIncomingBankId][binNumber]; + unsigned char* curBuffer = inBin.frameData; + inBin.frameData = fInputBuffer; + inBin.frameSize = frameSize; + inBin.frameHeader = frameHeader; + inBin.presentationTime = presentationTime; + inBin.fIsSynchronized = ((RTPSource*)source)->RTPSource::hasBeenSynchronizedUsingRTCP(); + + if (curBuffer == NULL) curBuffer = createNewBuffer(); + fInputBuffer = curBuffer; + + if (binNumber >= fIncomingBinMax) { + fIncomingBinMax = binNumber + 1; + } +} + +Boolean AMRDeinterleavingBuffer +::retrieveFrame(unsigned char* to, unsigned maxSize, + unsigned& resultFrameSize, unsigned& resultNumTruncatedBytes, + u_int8_t& resultFrameHeader, + struct timeval& resultPresentationTime, + Boolean& resultIsSynchronized) { + + if (fNextOutgoingBin >= fOutgoingBinMax) return False; // none left + + FrameDescriptor& outBin = fFrames[fIncomingBankId^1][fNextOutgoingBin]; + unsigned char* fromPtr = outBin.frameData; + unsigned char fromSize = outBin.frameSize; + outBin.frameSize = 0; // for the next time this bin is used + resultIsSynchronized = False; // by default; can be changed by: + if (outBin.fIsSynchronized) { + // Don't consider the outgoing frame to be synchronized until we've received at least a complete interleave cycle of + // synchronized frames. This ensures that the receiver will be getting all synchronized frames from now on. + if (++fNumSuccessiveSyncedFrames > fILL) { + resultIsSynchronized = True; + fNumSuccessiveSyncedFrames = fILL+1; // prevents overflow + } + } else { + fNumSuccessiveSyncedFrames = 0; + } + + // Check whether this frame is missing; if so, return a FT_NO_DATA frame: + if (fromSize == 0) { + resultFrameHeader = FT_NO_DATA<<3; + + // Compute this erasure frame's presentation time via extrapolation: + resultPresentationTime = fLastRetrievedPresentationTime; + resultPresentationTime.tv_usec += uSecsPerFrame; + if (resultPresentationTime.tv_usec >= 1000000) { + ++resultPresentationTime.tv_sec; + resultPresentationTime.tv_usec -= 1000000; + } + } else { + // Normal case - a frame exists: + resultFrameHeader = outBin.frameHeader; + resultPresentationTime = outBin.presentationTime; + } + + fLastRetrievedPresentationTime = resultPresentationTime; + + if (fromSize > maxSize) { + resultNumTruncatedBytes = fromSize - maxSize; + resultFrameSize = maxSize; + } else { + resultNumTruncatedBytes = 0; + resultFrameSize = fromSize; + } + memmove(to, fromPtr, resultFrameSize); +#ifdef DEBUG + fprintf(stderr, "AMRDeinterleavingBuffer::retrieveFrame(): from bank %d, bin %d: size %d, header 0x%02x, presentationTime %lu.%06ld\n", fIncomingBankId^1, fNextOutgoingBin, resultFrameSize, resultFrameHeader, resultPresentationTime.tv_sec, resultPresentationTime.tv_usec); +#endif + + ++fNextOutgoingBin; + return True; +} + +unsigned char* AMRDeinterleavingBuffer::createNewBuffer() { + return new unsigned char[inputBufferSize()]; +} + +AMRDeinterleavingBuffer::FrameDescriptor::FrameDescriptor() + : frameSize(0), frameData(NULL) { +} + +AMRDeinterleavingBuffer::FrameDescriptor::~FrameDescriptor() { + delete[] frameData; +} + +// Unpack bandwidth-aligned data to octet-aligned: +static unsigned short const frameBitsFromFT[16] = { + 95, 103, 118, 134, + 148, 159, 204, 244, + 39, 0, 0, 0, + 0, 0, 0, 0 +}; +static unsigned short const frameBitsFromFTWideband[16] = { + 132, 177, 253, 285, + 317, 365, 397, 461, + 477, 40, 0, 0, + 0, 0, 0, 0 +}; + +static void unpackBandwidthEfficientData(BufferedPacket* packet, + Boolean isWideband) { +#ifdef DEBUG + fprintf(stderr, "Unpacking 'bandwidth-efficient' payload (%d bytes):\n", packet->dataSize()); + for (unsigned j = 0; j < packet->dataSize(); ++j) { + fprintf(stderr, "%02x:", (packet->data())[j]); + } + fprintf(stderr, "\n"); +#endif + BitVector fromBV(packet->data(), 0, 8*packet->dataSize()); + + unsigned const toBufferSize = 2*packet->dataSize(); // conservatively large + unsigned char* toBuffer = new unsigned char[toBufferSize]; + unsigned toCount = 0; + + // Begin with the payload header: + unsigned CMR = fromBV.getBits(4); + toBuffer[toCount++] = CMR << 4; + + // Then, run through and unpack the TOC entries: + while (1) { + unsigned toc = fromBV.getBits(6); + toBuffer[toCount++] = toc << 2; + + if ((toc&0x20) == 0) break; // the F bit is 0 + } + + // Then, using the TOC data, unpack each frame payload: + unsigned const tocSize = toCount - 1; + for (unsigned i = 1; i <= tocSize; ++i) { + unsigned char tocByte = toBuffer[i]; + unsigned char const FT = (tocByte&0x78) >> 3; + unsigned short frameSizeBits + = isWideband ? frameBitsFromFTWideband[FT] : frameBitsFromFT[FT]; + unsigned short frameSizeBytes = (frameSizeBits+7)/8; + + shiftBits(&toBuffer[toCount], 0, // to + packet->data(), fromBV.curBitIndex(), // from + frameSizeBits // num bits + ); +#ifdef DEBUG + if (frameSizeBits > fromBV.numBitsRemaining()) { + fprintf(stderr, "\tWarning: Unpacking frame %d of %d: want %d bits, but only %d are available!\n", i, tocSize, frameSizeBits, fromBV.numBitsRemaining()); + } +#endif + fromBV.skipBits(frameSizeBits); + toCount += frameSizeBytes; + } + +#ifdef DEBUG + if (fromBV.numBitsRemaining() > 7) { + fprintf(stderr, "\tWarning: %d bits remain unused!\n", fromBV.numBitsRemaining()); + } +#endif + + // Finally, replace the current packet data with the unpacked data: + packet->removePadding(packet->dataSize()); // throws away current packet data + packet->appendData(toBuffer, toCount); + delete[] toBuffer; +} diff --git a/AnyCore/lib_rtsp/liveMedia/AMRAudioSource.cpp b/AnyCore/lib_rtsp/liveMedia/AMRAudioSource.cpp new file mode 100644 index 0000000..8f4d978 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/AMRAudioSource.cpp @@ -0,0 +1,38 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A source object for AMR audio sources +// Implementation + +#include "AMRAudioSource.hh" + +AMRAudioSource::AMRAudioSource(UsageEnvironment& env, + Boolean isWideband, unsigned numChannels) + : FramedSource(env), + fIsWideband(isWideband), fNumChannels(numChannels), fLastFrameHeader(0) { +} + +AMRAudioSource::~AMRAudioSource() { +} + +char const* AMRAudioSource::MIMEtype() const { + return "audio/AMR"; +} + +Boolean AMRAudioSource::isAMRAudioSource() const { + return True; +} diff --git a/AnyCore/lib_rtsp/liveMedia/AVIFileSink.cpp b/AnyCore/lib_rtsp/liveMedia/AVIFileSink.cpp new file mode 100644 index 0000000..34d061b --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/AVIFileSink.cpp @@ -0,0 +1,784 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A sink that generates an AVI file from a composite media session +// Implementation + +#include "AVIFileSink.hh" +#include "InputFile.hh" +#include "OutputFile.hh" +#include "GroupsockHelper.hh" + +#define fourChar(x,y,z,w) ( ((w)<<24)|((z)<<16)|((y)<<8)|(x) )/*little-endian*/ + +////////// AVISubsessionIOState /////////// +// A structure used to represent the I/O state of each input 'subsession': + +class SubsessionBuffer { +public: + SubsessionBuffer(unsigned bufferSize) + : fBufferSize(bufferSize) { + reset(); + fData = new unsigned char[bufferSize]; + } + virtual ~SubsessionBuffer() { delete[] fData; } + void reset() { fBytesInUse = 0; } + void addBytes(unsigned numBytes) { fBytesInUse += numBytes; } + + unsigned char* dataStart() { return &fData[0]; } + unsigned char* dataEnd() { return &fData[fBytesInUse]; } + unsigned bytesInUse() const { return fBytesInUse; } + unsigned bytesAvailable() const { return fBufferSize - fBytesInUse; } + + void setPresentationTime(struct timeval const& presentationTime) { + fPresentationTime = presentationTime; + } + struct timeval const& presentationTime() const {return fPresentationTime;} + +private: + unsigned fBufferSize; + struct timeval fPresentationTime; + unsigned char* fData; + unsigned fBytesInUse; +}; + +class AVISubsessionIOState { +public: + AVISubsessionIOState(AVIFileSink& sink, MediaSubsession& subsession); + virtual ~AVISubsessionIOState(); + + void setAVIstate(unsigned subsessionIndex); + void setFinalAVIstate(); + + void afterGettingFrame(unsigned packetDataSize, + struct timeval presentationTime); + void onSourceClosure(); + + UsageEnvironment& envir() const { return fOurSink.envir(); } + +public: + SubsessionBuffer *fBuffer, *fPrevBuffer; + AVIFileSink& fOurSink; + MediaSubsession& fOurSubsession; + + unsigned short fLastPacketRTPSeqNum; + Boolean fOurSourceIsActive; + struct timeval fPrevPresentationTime; + unsigned fMaxBytesPerSecond; + Boolean fIsVideo, fIsAudio, fIsByteSwappedAudio; + unsigned fAVISubsessionTag; + unsigned fAVICodecHandlerType; + unsigned fAVISamplingFrequency; // for audio + u_int16_t fWAVCodecTag; // for audio + unsigned fAVIScale; + unsigned fAVIRate; + unsigned fAVISize; + unsigned fNumFrames; + unsigned fSTRHFrameCountPosition; + +private: + void useFrame(SubsessionBuffer& buffer); +}; + + +///////// AVIIndexRecord definition & implementation ////////// + +class AVIIndexRecord { +public: + AVIIndexRecord(unsigned chunkId, unsigned flags, unsigned offset, unsigned size) + : fNext(NULL), fChunkId(chunkId), fFlags(flags), fOffset(offset), fSize(size) { + } + + AVIIndexRecord*& next() { return fNext; } + unsigned chunkId() const { return fChunkId; } + unsigned flags() const { return fFlags; } + unsigned offset() const { return fOffset; } + unsigned size() const { return fSize; } + +private: + AVIIndexRecord* fNext; + unsigned fChunkId; + unsigned fFlags; + unsigned fOffset; + unsigned fSize; +}; + + +////////// AVIFileSink implementation ////////// + +AVIFileSink::AVIFileSink(UsageEnvironment& env, + MediaSession& inputSession, + char const* outputFileName, + unsigned bufferSize, + unsigned short movieWidth, unsigned short movieHeight, + unsigned movieFPS, Boolean packetLossCompensate) + : Medium(env), fInputSession(inputSession), + fIndexRecordsHead(NULL), fIndexRecordsTail(NULL), fNumIndexRecords(0), + fBufferSize(bufferSize), fPacketLossCompensate(packetLossCompensate), + fAreCurrentlyBeingPlayed(False), fNumSubsessions(0), fNumBytesWritten(0), + fHaveCompletedOutputFile(False), + fMovieWidth(movieWidth), fMovieHeight(movieHeight), fMovieFPS(movieFPS) { + fOutFid = OpenOutputFile(env, outputFileName); + if (fOutFid == NULL) return; + + // Set up I/O state for each input subsession: + MediaSubsessionIterator iter(fInputSession); + MediaSubsession* subsession; + while ((subsession = iter.next()) != NULL) { + // Ignore subsessions without a data source: + FramedSource* subsessionSource = subsession->readSource(); + if (subsessionSource == NULL) continue; + + // If "subsession's" SDP description specified screen dimension + // or frame rate parameters, then use these. + if (subsession->videoWidth() != 0) { + fMovieWidth = subsession->videoWidth(); + } + if (subsession->videoHeight() != 0) { + fMovieHeight = subsession->videoHeight(); + } + if (subsession->videoFPS() != 0) { + fMovieFPS = subsession->videoFPS(); + } + + AVISubsessionIOState* ioState + = new AVISubsessionIOState(*this, *subsession); + subsession->miscPtr = (void*)ioState; + + // Also set a 'BYE' handler for this subsession's RTCP instance: + if (subsession->rtcpInstance() != NULL) { + subsession->rtcpInstance()->setByeHandler(onRTCPBye, ioState); + } + + ++fNumSubsessions; + } + + // Begin by writing an AVI header: + addFileHeader_AVI(); +} + +AVIFileSink::~AVIFileSink() { + completeOutputFile(); + + // Then, stop streaming and delete each active "AVISubsessionIOState": + MediaSubsessionIterator iter(fInputSession); + MediaSubsession* subsession; + while ((subsession = iter.next()) != NULL) { + if (subsession->readSource() != NULL) subsession->readSource()->stopGettingFrames(); + + AVISubsessionIOState* ioState + = (AVISubsessionIOState*)(subsession->miscPtr); + if (ioState == NULL) continue; + + delete ioState; + } + + // Then, delete the index records: + AVIIndexRecord* cur = fIndexRecordsHead; + while (cur != NULL) { + AVIIndexRecord* next = cur->next(); + delete cur; + cur = next; + } + + // Finally, close our output file: + CloseOutputFile(fOutFid); +} + +AVIFileSink* AVIFileSink +::createNew(UsageEnvironment& env, MediaSession& inputSession, + char const* outputFileName, + unsigned bufferSize, + unsigned short movieWidth, unsigned short movieHeight, + unsigned movieFPS, Boolean packetLossCompensate) { + AVIFileSink* newSink = + new AVIFileSink(env, inputSession, outputFileName, bufferSize, + movieWidth, movieHeight, movieFPS, packetLossCompensate); + if (newSink == NULL || newSink->fOutFid == NULL) { + Medium::close(newSink); + return NULL; + } + + return newSink; +} + +Boolean AVIFileSink::startPlaying(afterPlayingFunc* afterFunc, + void* afterClientData) { + // Make sure we're not already being played: + if (fAreCurrentlyBeingPlayed) { + envir().setResultMsg("This sink has already been played"); + return False; + } + + fAreCurrentlyBeingPlayed = True; + fAfterFunc = afterFunc; + fAfterClientData = afterClientData; + + return continuePlaying(); +} + +Boolean AVIFileSink::continuePlaying() { + // Run through each of our input session's 'subsessions', + // asking for a frame from each one: + Boolean haveActiveSubsessions = False; + MediaSubsessionIterator iter(fInputSession); + MediaSubsession* subsession; + while ((subsession = iter.next()) != NULL) { + FramedSource* subsessionSource = subsession->readSource(); + if (subsessionSource == NULL) continue; + + if (subsessionSource->isCurrentlyAwaitingData()) continue; + + AVISubsessionIOState* ioState + = (AVISubsessionIOState*)(subsession->miscPtr); + if (ioState == NULL) continue; + + haveActiveSubsessions = True; + unsigned char* toPtr = ioState->fBuffer->dataEnd(); + unsigned toSize = ioState->fBuffer->bytesAvailable(); + subsessionSource->getNextFrame(toPtr, toSize, + afterGettingFrame, ioState, + onSourceClosure, ioState); + } + if (!haveActiveSubsessions) { + envir().setResultMsg("No subsessions are currently active"); + return False; + } + + return True; +} + +void AVIFileSink +::afterGettingFrame(void* clientData, unsigned packetDataSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned /*durationInMicroseconds*/) { + AVISubsessionIOState* ioState = (AVISubsessionIOState*)clientData; + if (numTruncatedBytes > 0) { + ioState->envir() << "AVIFileSink::afterGettingFrame(): The input frame data was too large for our buffer. " + << numTruncatedBytes + << " bytes of trailing data was dropped! Correct this by increasing the \"bufferSize\" parameter in the \"createNew()\" call.\n"; + } + ioState->afterGettingFrame(packetDataSize, presentationTime); +} + +void AVIFileSink::onSourceClosure(void* clientData) { + AVISubsessionIOState* ioState = (AVISubsessionIOState*)clientData; + ioState->onSourceClosure(); +} + +void AVIFileSink::onSourceClosure1() { + // Check whether *all* of the subsession sources have closed. + // If not, do nothing for now: + MediaSubsessionIterator iter(fInputSession); + MediaSubsession* subsession; + while ((subsession = iter.next()) != NULL) { + AVISubsessionIOState* ioState + = (AVISubsessionIOState*)(subsession->miscPtr); + if (ioState == NULL) continue; + + if (ioState->fOurSourceIsActive) return; // this source hasn't closed + } + + completeOutputFile(); + + // Call our specified 'after' function: + if (fAfterFunc != NULL) { + (*fAfterFunc)(fAfterClientData); + } +} + +void AVIFileSink::onRTCPBye(void* clientData) { + AVISubsessionIOState* ioState = (AVISubsessionIOState*)clientData; + + struct timeval timeNow; + gettimeofday(&timeNow, NULL); + unsigned secsDiff + = timeNow.tv_sec - ioState->fOurSink.fStartTime.tv_sec; + + MediaSubsession& subsession = ioState->fOurSubsession; + ioState->envir() << "Received RTCP \"BYE\" on \"" + << subsession.mediumName() + << "/" << subsession.codecName() + << "\" subsession (after " + << secsDiff << " seconds)\n"; + + // Handle the reception of a RTCP "BYE" as if the source had closed: + ioState->onSourceClosure(); +} + +void AVIFileSink::addIndexRecord(AVIIndexRecord* newIndexRecord) { + if (fIndexRecordsHead == NULL) { + fIndexRecordsHead = newIndexRecord; + } else { + fIndexRecordsTail->next() = newIndexRecord; + } + fIndexRecordsTail = newIndexRecord; + ++fNumIndexRecords; +} + +void AVIFileSink::completeOutputFile() { + if (fHaveCompletedOutputFile || fOutFid == NULL) return; + + // Update various AVI 'size' fields to take account of the codec data that + // we've now written to the file: + unsigned maxBytesPerSecond = 0; + unsigned numVideoFrames = 0; + unsigned numAudioFrames = 0; + + //// Subsession-specific fields: + MediaSubsessionIterator iter(fInputSession); + MediaSubsession* subsession; + while ((subsession = iter.next()) != NULL) { + AVISubsessionIOState* ioState + = (AVISubsessionIOState*)(subsession->miscPtr); + if (ioState == NULL) continue; + + maxBytesPerSecond += ioState->fMaxBytesPerSecond; + + setWord(ioState->fSTRHFrameCountPosition, ioState->fNumFrames); + if (ioState->fIsVideo) numVideoFrames = ioState->fNumFrames; + else if (ioState->fIsAudio) numAudioFrames = ioState->fNumFrames; + } + + //// Global fields: + add4ByteString("idx1"); + addWord(fNumIndexRecords*4*4); // the size of all of the index records, which come next: + for (AVIIndexRecord* indexRecord = fIndexRecordsHead; indexRecord != NULL; indexRecord = indexRecord->next()) { + addWord(indexRecord->chunkId()); + addWord(indexRecord->flags()); + addWord(indexRecord->offset()); + addWord(indexRecord->size()); + } + + fRIFFSizeValue += fNumBytesWritten; + setWord(fRIFFSizePosition, fRIFFSizeValue); + + setWord(fAVIHMaxBytesPerSecondPosition, maxBytesPerSecond); + setWord(fAVIHFrameCountPosition, + numVideoFrames > 0 ? numVideoFrames : numAudioFrames); + + fMoviSizeValue += fNumBytesWritten; + setWord(fMoviSizePosition, fMoviSizeValue); + + // We're done: + fHaveCompletedOutputFile = True; +} + + +////////// AVISubsessionIOState implementation /////////// + +AVISubsessionIOState::AVISubsessionIOState(AVIFileSink& sink, + MediaSubsession& subsession) + : fOurSink(sink), fOurSubsession(subsession), + fMaxBytesPerSecond(0), fIsVideo(False), fIsAudio(False), fIsByteSwappedAudio(False), fNumFrames(0) { + fBuffer = new SubsessionBuffer(fOurSink.fBufferSize); + fPrevBuffer = sink.fPacketLossCompensate + ? new SubsessionBuffer(fOurSink.fBufferSize) : NULL; + + FramedSource* subsessionSource = subsession.readSource(); + fOurSourceIsActive = subsessionSource != NULL; + + fPrevPresentationTime.tv_sec = 0; + fPrevPresentationTime.tv_usec = 0; +} + +AVISubsessionIOState::~AVISubsessionIOState() { + delete fBuffer; delete fPrevBuffer; +} + +void AVISubsessionIOState::setAVIstate(unsigned subsessionIndex) { + fIsVideo = strcmp(fOurSubsession.mediumName(), "video") == 0; + fIsAudio = strcmp(fOurSubsession.mediumName(), "audio") == 0; + + if (fIsVideo) { + fAVISubsessionTag + = fourChar('0'+subsessionIndex/10,'0'+subsessionIndex%10,'d','c'); + if (strcmp(fOurSubsession.codecName(), "JPEG") == 0) { + fAVICodecHandlerType = fourChar('m','j','p','g'); + } else if (strcmp(fOurSubsession.codecName(), "MP4V-ES") == 0) { + fAVICodecHandlerType = fourChar('D','I','V','X'); + } else if (strcmp(fOurSubsession.codecName(), "MPV") == 0) { + fAVICodecHandlerType = fourChar('m','p','g','1'); // what about MPEG-2? + } else if (strcmp(fOurSubsession.codecName(), "H263-1998") == 0 || + strcmp(fOurSubsession.codecName(), "H263-2000") == 0) { + fAVICodecHandlerType = fourChar('H','2','6','3'); + } else if (strcmp(fOurSubsession.codecName(), "H264") == 0) { + fAVICodecHandlerType = fourChar('H','2','6','4'); + } else { + fAVICodecHandlerType = fourChar('?','?','?','?'); + } + fAVIScale = 1; // ??? ##### + fAVIRate = fOurSink.fMovieFPS; // ??? ##### + fAVISize = fOurSink.fMovieWidth*fOurSink.fMovieHeight*3; // ??? ##### + } else if (fIsAudio) { + fIsByteSwappedAudio = False; // by default + fAVISubsessionTag + = fourChar('0'+subsessionIndex/10,'0'+subsessionIndex%10,'w','b'); + fAVICodecHandlerType = 1; // ??? #### + unsigned numChannels = fOurSubsession.numChannels(); + fAVISamplingFrequency = fOurSubsession.rtpTimestampFrequency(); // default + if (strcmp(fOurSubsession.codecName(), "L16") == 0) { + fIsByteSwappedAudio = True; // need to byte-swap data before writing it + fWAVCodecTag = 0x0001; + fAVIScale = fAVISize = 2*numChannels; // 2 bytes/sample + fAVIRate = fAVISize*fAVISamplingFrequency; + } else if (strcmp(fOurSubsession.codecName(), "L8") == 0) { + fWAVCodecTag = 0x0001; + fAVIScale = fAVISize = numChannels; // 1 byte/sample + fAVIRate = fAVISize*fAVISamplingFrequency; + } else if (strcmp(fOurSubsession.codecName(), "PCMA") == 0) { + fWAVCodecTag = 0x0006; + fAVIScale = fAVISize = numChannels; // 1 byte/sample + fAVIRate = fAVISize*fAVISamplingFrequency; + } else if (strcmp(fOurSubsession.codecName(), "PCMU") == 0) { + fWAVCodecTag = 0x0007; + fAVIScale = fAVISize = numChannels; // 1 byte/sample + fAVIRate = fAVISize*fAVISamplingFrequency; + } else if (strcmp(fOurSubsession.codecName(), "MPA") == 0) { + fWAVCodecTag = 0x0050; + fAVIScale = fAVISize = 1; + fAVIRate = 0; // ??? ##### + } else { + fWAVCodecTag = 0x0001; // ??? ##### + fAVIScale = fAVISize = 1; + fAVIRate = 0; // ??? ##### + } + } else { // unknown medium + fAVISubsessionTag + = fourChar('0'+subsessionIndex/10,'0'+subsessionIndex%10,'?','?'); + fAVICodecHandlerType = 0; + fAVIScale = fAVISize = 1; + fAVIRate = 0; // ??? ##### + } +} + +void AVISubsessionIOState::afterGettingFrame(unsigned packetDataSize, + struct timeval presentationTime) { + // Begin by checking whether there was a gap in the RTP stream. + // If so, try to compensate for this (if desired): + unsigned short rtpSeqNum + = fOurSubsession.rtpSource()->curPacketRTPSeqNum(); + if (fOurSink.fPacketLossCompensate && fPrevBuffer->bytesInUse() > 0) { + short seqNumGap = rtpSeqNum - fLastPacketRTPSeqNum; + for (short i = 1; i < seqNumGap; ++i) { + // Insert a copy of the previous frame, to compensate for the loss: + useFrame(*fPrevBuffer); + } + } + fLastPacketRTPSeqNum = rtpSeqNum; + + // Now, continue working with the frame that we just got + if (fBuffer->bytesInUse() == 0) { + fBuffer->setPresentationTime(presentationTime); + } + fBuffer->addBytes(packetDataSize); + + useFrame(*fBuffer); + if (fOurSink.fPacketLossCompensate) { + // Save this frame, in case we need it for recovery: + SubsessionBuffer* tmp = fPrevBuffer; // assert: != NULL + fPrevBuffer = fBuffer; + fBuffer = tmp; + } + fBuffer->reset(); // for the next input + + // Now, try getting more frames: + fOurSink.continuePlaying(); +} + +void AVISubsessionIOState::useFrame(SubsessionBuffer& buffer) { + unsigned char* const frameSource = buffer.dataStart(); + unsigned const frameSize = buffer.bytesInUse(); + struct timeval const& presentationTime = buffer.presentationTime(); + if (fPrevPresentationTime.tv_usec != 0||fPrevPresentationTime.tv_sec != 0) { + int uSecondsDiff + = (presentationTime.tv_sec - fPrevPresentationTime.tv_sec)*1000000 + + (presentationTime.tv_usec - fPrevPresentationTime.tv_usec); + if (uSecondsDiff > 0) { + unsigned bytesPerSecond = (unsigned)((frameSize*1000000.0)/uSecondsDiff); + if (bytesPerSecond > fMaxBytesPerSecond) { + fMaxBytesPerSecond = bytesPerSecond; + } + } + } + fPrevPresentationTime = presentationTime; + + if (fIsByteSwappedAudio) { + // We need to swap the 16-bit audio samples from big-endian + // to little-endian order, before writing them to a file: + for (unsigned i = 0; i < frameSize; i += 2) { + unsigned char tmp = frameSource[i]; + frameSource[i] = frameSource[i+1]; + frameSource[i+1] = tmp; + } + } + + // Add an index record for this frame: + AVIIndexRecord* newIndexRecord + = new AVIIndexRecord(fAVISubsessionTag, // chunk id + frameSource[0] == 0x67 ? 0x10 : 0, // flags + fOurSink.fMoviSizePosition + 8 + fOurSink.fNumBytesWritten, // offset (note: 8 == size + 'movi') + frameSize + 4); // size + fOurSink.addIndexRecord(newIndexRecord); + + // Write the data into the file: + fOurSink.fNumBytesWritten += fOurSink.addWord(fAVISubsessionTag); + if (strcmp(fOurSubsession.codecName(), "H264") == 0) { + // Insert a 'start code' (0x00 0x00 0x00 0x01) in front of the frame: + fOurSink.fNumBytesWritten += fOurSink.addWord(4+frameSize); + fOurSink.fNumBytesWritten += fOurSink.addWord(fourChar(0x00, 0x00, 0x00, 0x01));//add start code + } else { + fOurSink.fNumBytesWritten += fOurSink.addWord(frameSize); + } + fwrite(frameSource, 1, frameSize, fOurSink.fOutFid); + fOurSink.fNumBytesWritten += frameSize; + // Pad to an even length: + if (frameSize%2 != 0) fOurSink.fNumBytesWritten += fOurSink.addByte(0); + + ++fNumFrames; +} + +void AVISubsessionIOState::onSourceClosure() { + fOurSourceIsActive = False; + fOurSink.onSourceClosure1(); +} + + +////////// AVI-specific implementation ////////// + +unsigned AVIFileSink::addWord(unsigned word) { + // Add "word" to the file in little-endian order: + addByte(word); addByte(word>>8); + addByte(word>>16); addByte(word>>24); + + return 4; +} + +unsigned AVIFileSink::addHalfWord(unsigned short halfWord) { + // Add "halfWord" to the file in little-endian order: + addByte((unsigned char)halfWord); addByte((unsigned char)(halfWord>>8)); + + return 2; +} + +unsigned AVIFileSink::addZeroWords(unsigned numWords) { + for (unsigned i = 0; i < numWords; ++i) { + addWord(0); + } + + return numWords*4; +} + +unsigned AVIFileSink::add4ByteString(char const* str) { + addByte(str[0]); addByte(str[1]); addByte(str[2]); + addByte(str[3] == '\0' ? ' ' : str[3]); // e.g., for "AVI " + + return 4; +} + +void AVIFileSink::setWord(unsigned filePosn, unsigned size) { + do { + if (SeekFile64(fOutFid, filePosn, SEEK_SET) < 0) break; + addWord(size); + if (SeekFile64(fOutFid, 0, SEEK_END) < 0) break; // go back to where we were + + return; + } while (0); + + // One of the SeekFile64()s failed, probable because we're not a seekable file + envir() << "AVIFileSink::setWord(): SeekFile64 failed (err " + << envir().getErrno() << ")\n"; +} + +// Methods for writing particular file headers. Note the following macros: + +#define addFileHeader(tag,name) \ + unsigned AVIFileSink::addFileHeader_##name() { \ + add4ByteString("" #tag ""); \ + unsigned headerSizePosn = (unsigned)TellFile64(fOutFid); addWord(0); \ + add4ByteString("" #name ""); \ + unsigned ignoredSize = 8;/*don't include size of tag or size fields*/ \ + unsigned size = 12 + +#define addFileHeader1(name) \ + unsigned AVIFileSink::addFileHeader_##name() { \ + add4ByteString("" #name ""); \ + unsigned headerSizePosn = (unsigned)TellFile64(fOutFid); addWord(0); \ + unsigned ignoredSize = 8;/*don't include size of name or size fields*/ \ + unsigned size = 8 + +#define addFileHeaderEnd \ + setWord(headerSizePosn, size-ignoredSize); \ + return size; \ +} + +addFileHeader(RIFF,AVI); + size += addFileHeader_hdrl(); + size += addFileHeader_movi(); + fRIFFSizePosition = headerSizePosn; + fRIFFSizeValue = size-ignoredSize; +addFileHeaderEnd; + +addFileHeader(LIST,hdrl); + size += addFileHeader_avih(); + + // Then, add a "strl" header for each subsession (stream): + // (Make the video subsession (if any) come before the audio subsession.) + unsigned subsessionCount = 0; + MediaSubsessionIterator iter(fInputSession); + MediaSubsession* subsession; + while ((subsession = iter.next()) != NULL) { + fCurrentIOState = (AVISubsessionIOState*)(subsession->miscPtr); + if (fCurrentIOState == NULL) continue; + if (strcmp(subsession->mediumName(), "video") != 0) continue; + + fCurrentIOState->setAVIstate(subsessionCount++); + size += addFileHeader_strl(); + } + iter.reset(); + while ((subsession = iter.next()) != NULL) { + fCurrentIOState = (AVISubsessionIOState*)(subsession->miscPtr); + if (fCurrentIOState == NULL) continue; + if (strcmp(subsession->mediumName(), "video") == 0) continue; + + fCurrentIOState->setAVIstate(subsessionCount++); + size += addFileHeader_strl(); + } + + // Then add another JUNK entry + ++fJunkNumber; + size += addFileHeader_JUNK(); +addFileHeaderEnd; + +#define AVIF_HASINDEX 0x00000010 // Index at end of file? +#define AVIF_MUSTUSEINDEX 0x00000020 +#define AVIF_ISINTERLEAVED 0x00000100 +#define AVIF_TRUSTCKTYPE 0x00000800 // Use CKType to find key frames? +#define AVIF_WASCAPTUREFILE 0x00010000 +#define AVIF_COPYRIGHTED 0x00020000 + +addFileHeader1(avih); + unsigned usecPerFrame = fMovieFPS == 0 ? 0 : 1000000/fMovieFPS; + size += addWord(usecPerFrame); // dwMicroSecPerFrame + fAVIHMaxBytesPerSecondPosition = (unsigned)TellFile64(fOutFid); + size += addWord(0); // dwMaxBytesPerSec (fill in later) + size += addWord(0); // dwPaddingGranularity + size += addWord(AVIF_TRUSTCKTYPE|AVIF_HASINDEX|AVIF_ISINTERLEAVED); // dwFlags + fAVIHFrameCountPosition = (unsigned)TellFile64(fOutFid); + size += addWord(0); // dwTotalFrames (fill in later) + size += addWord(0); // dwInitialFrame + size += addWord(fNumSubsessions); // dwStreams + size += addWord(fBufferSize); // dwSuggestedBufferSize + size += addWord(fMovieWidth); // dwWidth + size += addWord(fMovieHeight); // dwHeight + size += addZeroWords(4); // dwReserved +addFileHeaderEnd; + +addFileHeader(LIST,strl); + size += addFileHeader_strh(); + size += addFileHeader_strf(); + fJunkNumber = 0; + size += addFileHeader_JUNK(); +addFileHeaderEnd; + +addFileHeader1(strh); + size += add4ByteString(fCurrentIOState->fIsVideo ? "vids" : + fCurrentIOState->fIsAudio ? "auds" : + "????"); // fccType + size += addWord(fCurrentIOState->fAVICodecHandlerType); // fccHandler + size += addWord(0); // dwFlags + size += addWord(0); // wPriority + wLanguage + size += addWord(0); // dwInitialFrames + size += addWord(fCurrentIOState->fAVIScale); // dwScale + size += addWord(fCurrentIOState->fAVIRate); // dwRate + size += addWord(0); // dwStart + fCurrentIOState->fSTRHFrameCountPosition = (unsigned)TellFile64(fOutFid); + size += addWord(0); // dwLength (fill in later) + size += addWord(fBufferSize); // dwSuggestedBufferSize + size += addWord((unsigned)-1); // dwQuality + size += addWord(fCurrentIOState->fAVISize); // dwSampleSize + size += addWord(0); // rcFrame (start) + if (fCurrentIOState->fIsVideo) { + size += addHalfWord(fMovieWidth); + size += addHalfWord(fMovieHeight); + } else { + size += addWord(0); + } +addFileHeaderEnd; + +addFileHeader1(strf); + if (fCurrentIOState->fIsVideo) { + // Add a BITMAPINFO header: + unsigned extraDataSize = 0; + size += addWord(10*4 + extraDataSize); // size + size += addWord(fMovieWidth); + size += addWord(fMovieHeight); + size += addHalfWord(1); // planes + size += addHalfWord(24); // bits-per-sample ##### + size += addWord(fCurrentIOState->fAVICodecHandlerType); // compr. type + size += addWord(fCurrentIOState->fAVISize); + size += addZeroWords(4); // ??? ##### + // Later, add extra data here (if any) ##### + } else if (fCurrentIOState->fIsAudio) { + // Add a WAVFORMATEX header: + size += addHalfWord(fCurrentIOState->fWAVCodecTag); + unsigned numChannels = fCurrentIOState->fOurSubsession.numChannels(); + size += addHalfWord(numChannels); + size += addWord(fCurrentIOState->fAVISamplingFrequency); + size += addWord(fCurrentIOState->fAVIRate); // bytes per second + size += addHalfWord(fCurrentIOState->fAVISize); // block alignment + unsigned bitsPerSample = (fCurrentIOState->fAVISize*8)/numChannels; + size += addHalfWord(bitsPerSample); + if (strcmp(fCurrentIOState->fOurSubsession.codecName(), "MPA") == 0) { + // Assume MPEG layer II audio (not MP3): ##### + size += addHalfWord(22); // wav_extra_size + size += addHalfWord(2); // fwHeadLayer + size += addWord(8*fCurrentIOState->fAVIRate); // dwHeadBitrate ##### + size += addHalfWord(numChannels == 2 ? 1: 8); // fwHeadMode + size += addHalfWord(0); // fwHeadModeExt + size += addHalfWord(1); // wHeadEmphasis + size += addHalfWord(16); // fwHeadFlags + size += addWord(0); // dwPTSLow + size += addWord(0); // dwPTSHigh + } + } +addFileHeaderEnd; + +#define AVI_MASTER_INDEX_SIZE 256 + +addFileHeader1(JUNK); + if (fJunkNumber == 0) { + size += addHalfWord(4); // wLongsPerEntry + size += addHalfWord(0); // bIndexSubType + bIndexType + size += addWord(0); // nEntriesInUse ##### + size += addWord(fCurrentIOState->fAVISubsessionTag); // dwChunkId + size += addZeroWords(2); // dwReserved + size += addZeroWords(AVI_MASTER_INDEX_SIZE*4); + } else { + size += add4ByteString("odml"); + size += add4ByteString("dmlh"); + unsigned wtfCount = 248; + size += addWord(wtfCount); // ??? ##### + size += addZeroWords(wtfCount/4); + } +addFileHeaderEnd; + +addFileHeader(LIST,movi); + fMoviSizePosition = headerSizePosn; + fMoviSizeValue = size-ignoredSize; +addFileHeaderEnd; diff --git a/AnyCore/lib_rtsp/liveMedia/AudioInputDevice.cpp b/AnyCore/lib_rtsp/liveMedia/AudioInputDevice.cpp new file mode 100644 index 0000000..2dfdea2 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/AudioInputDevice.cpp @@ -0,0 +1,45 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// Copyright (c) 2001-2003 Live Networks, Inc. All rights reserved. +// Generic audio input device (such as a microphone, or an input sound card) +// Implementation + +#include + +AudioInputDevice +::AudioInputDevice(UsageEnvironment& env, unsigned char bitsPerSample, + unsigned char numChannels, + unsigned samplingFrequency, unsigned granularityInMS) + : FramedSource(env), fBitsPerSample(bitsPerSample), + fNumChannels(numChannels), fSamplingFrequency(samplingFrequency), + fGranularityInMS(granularityInMS) { +} + +AudioInputDevice::~AudioInputDevice() { +} + +char** AudioInputDevice::allowedDeviceNames = NULL; + +////////// AudioPortNames implementation ////////// + +AudioPortNames::AudioPortNames() +: numPorts(0), portName(NULL) { +} + +AudioPortNames::~AudioPortNames() { + for (unsigned i = 0; i < numPorts; ++i) delete portName[i]; + delete portName; +} diff --git a/AnyCore/lib_rtsp/liveMedia/AudioRTPSink.cpp b/AnyCore/lib_rtsp/liveMedia/AudioRTPSink.cpp new file mode 100644 index 0000000..d053dd1 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/AudioRTPSink.cpp @@ -0,0 +1,37 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A generic RTP sink for audio codecs (abstract base class) +// Implementation + +#include "AudioRTPSink.hh" + +AudioRTPSink::AudioRTPSink(UsageEnvironment& env, + Groupsock* rtpgs, unsigned char rtpPayloadType, + unsigned rtpTimestampFrequency, + char const* rtpPayloadFormatName, + unsigned numChannels) + : MultiFramedRTPSink(env, rtpgs, rtpPayloadType, rtpTimestampFrequency, + rtpPayloadFormatName, numChannels) { +} + +AudioRTPSink::~AudioRTPSink() { +} + +char const* AudioRTPSink::sdpMediaType() const { + return "audio"; +} diff --git a/AnyCore/lib_rtsp/liveMedia/Base64.cpp b/AnyCore/lib_rtsp/liveMedia/Base64.cpp new file mode 100644 index 0000000..866f9d9 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/Base64.cpp @@ -0,0 +1,122 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Base64 encoding and decoding +// implementation + +#include "Base64.hh" +#include +#include + +static char base64DecodeTable[256]; + +static void initBase64DecodeTable() { + int i; + for (i = 0; i < 256; ++i) base64DecodeTable[i] = (char)0x80; + // default value: invalid + + for (i = 'A'; i <= 'Z'; ++i) base64DecodeTable[i] = 0 + (i - 'A'); + for (i = 'a'; i <= 'z'; ++i) base64DecodeTable[i] = 26 + (i - 'a'); + for (i = '0'; i <= '9'; ++i) base64DecodeTable[i] = 52 + (i - '0'); + base64DecodeTable[(unsigned char)'+'] = 62; + base64DecodeTable[(unsigned char)'/'] = 63; + base64DecodeTable[(unsigned char)'='] = 0; +} + +unsigned char* base64Decode(char const* in, unsigned& resultSize, + Boolean trimTrailingZeros) { + if (in == NULL) return NULL; // sanity check + return base64Decode(in, strlen(in), resultSize, trimTrailingZeros); +} + +unsigned char* base64Decode(char const* in, unsigned inSize, + unsigned& resultSize, + Boolean trimTrailingZeros) { + static Boolean haveInitializedBase64DecodeTable = False; + if (!haveInitializedBase64DecodeTable) { + initBase64DecodeTable(); + haveInitializedBase64DecodeTable = True; + } + + unsigned char* out = (unsigned char*)strDupSize(in); // ensures we have enough space + int k = 0; + int paddingCount = 0; + int const jMax = inSize - 3; + // in case "inSize" is not a multiple of 4 (although it should be) + for (int j = 0; j < jMax; j += 4) { + char inTmp[4], outTmp[4]; + for (int i = 0; i < 4; ++i) { + inTmp[i] = in[i+j]; + if (inTmp[i] == '=') ++paddingCount; + outTmp[i] = base64DecodeTable[(unsigned char)inTmp[i]]; + if ((outTmp[i]&0x80) != 0) outTmp[i] = 0; // this happens only if there was an invalid character; pretend that it was 'A' + } + + out[k++] = (outTmp[0]<<2) | (outTmp[1]>>4); + out[k++] = (outTmp[1]<<4) | (outTmp[2]>>2); + out[k++] = (outTmp[2]<<6) | outTmp[3]; + } + + if (trimTrailingZeros) { + while (paddingCount > 0 && k > 0 && out[k-1] == '\0') { --k; --paddingCount; } + } + resultSize = k; + unsigned char* result = new unsigned char[resultSize]; + memmove(result, out, resultSize); + delete[] out; + + return result; +} + +static const char base64Char[] = +"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; + +char* base64Encode(char const* origSigned, unsigned origLength) { + unsigned char const* orig = (unsigned char const*)origSigned; // in case any input bytes have the MSB set + if (orig == NULL) return NULL; + + unsigned const numOrig24BitValues = origLength/3; + Boolean havePadding = origLength > numOrig24BitValues*3; + Boolean havePadding2 = origLength == numOrig24BitValues*3 + 2; + unsigned const numResultBytes = 4*(numOrig24BitValues + havePadding); + char* result = new char[numResultBytes+1]; // allow for trailing '\0' + + // Map each full group of 3 input bytes into 4 output base-64 characters: + unsigned i; + for (i = 0; i < numOrig24BitValues; ++i) { + result[4*i+0] = base64Char[(orig[3*i]>>2)&0x3F]; + result[4*i+1] = base64Char[(((orig[3*i]&0x3)<<4) | (orig[3*i+1]>>4))&0x3F]; + result[4*i+2] = base64Char[((orig[3*i+1]<<2) | (orig[3*i+2]>>6))&0x3F]; + result[4*i+3] = base64Char[orig[3*i+2]&0x3F]; + } + + // Now, take padding into account. (Note: i == numOrig24BitValues) + if (havePadding) { + result[4*i+0] = base64Char[(orig[3*i]>>2)&0x3F]; + if (havePadding2) { + result[4*i+1] = base64Char[(((orig[3*i]&0x3)<<4) | (orig[3*i+1]>>4))&0x3F]; + result[4*i+2] = base64Char[(orig[3*i+1]<<2)&0x3F]; + } else { + result[4*i+1] = base64Char[((orig[3*i]&0x3)<<4)&0x3F]; + result[4*i+2] = '='; + } + result[4*i+3] = '='; + } + + result[numResultBytes] = '\0'; + return result; +} diff --git a/AnyCore/lib_rtsp/liveMedia/BasicUDPSink.cpp b/AnyCore/lib_rtsp/liveMedia/BasicUDPSink.cpp new file mode 100644 index 0000000..473801d --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/BasicUDPSink.cpp @@ -0,0 +1,100 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A simple UDP sink (i.e., without RTP or other headers added); one frame per packet +// Implementation + +#include "BasicUDPSink.hh" +#include + +BasicUDPSink* BasicUDPSink::createNew(UsageEnvironment& env, Groupsock* gs, + unsigned maxPayloadSize) { + return new BasicUDPSink(env, gs, maxPayloadSize); +} + +BasicUDPSink::BasicUDPSink(UsageEnvironment& env, Groupsock* gs, + unsigned maxPayloadSize) + : MediaSink(env), + fGS(gs), fMaxPayloadSize(maxPayloadSize) { + fOutputBuffer = new unsigned char[fMaxPayloadSize]; +} + +BasicUDPSink::~BasicUDPSink() { + delete[] fOutputBuffer; +} + +Boolean BasicUDPSink::continuePlaying() { + // Record the fact that we're starting to play now: + gettimeofday(&fNextSendTime, NULL); + + // Arrange to get and send the first payload. + // (This will also schedule any future sends.) + continuePlaying1(); + return True; +} + +void BasicUDPSink::continuePlaying1() { + if (fSource != NULL) { + fSource->getNextFrame(fOutputBuffer, fMaxPayloadSize, + afterGettingFrame, this, + onSourceClosure, this); + } +} + +void BasicUDPSink::afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval /*presentationTime*/, + unsigned durationInMicroseconds) { + BasicUDPSink* sink = (BasicUDPSink*)clientData; + sink->afterGettingFrame1(frameSize, numTruncatedBytes, durationInMicroseconds); +} + +void BasicUDPSink::afterGettingFrame1(unsigned frameSize, unsigned numTruncatedBytes, + unsigned durationInMicroseconds) { + if (numTruncatedBytes > 0) { + envir() << "BasicUDPSink::afterGettingFrame1(): The input frame data was too large for our spcified maximum payload size (" + << fMaxPayloadSize << "). " + << numTruncatedBytes << " bytes of trailing data was dropped!\n"; + } + + // Send the packet: + fGS->output(envir(), fGS->ttl(), fOutputBuffer, frameSize); + + // Figure out the time at which the next packet should be sent, based + // on the duration of the payload that we just read: + fNextSendTime.tv_usec += durationInMicroseconds; + fNextSendTime.tv_sec += fNextSendTime.tv_usec/1000000; + fNextSendTime.tv_usec %= 1000000; + + struct timeval timeNow; + gettimeofday(&timeNow, NULL); + int secsDiff = fNextSendTime.tv_sec - timeNow.tv_sec; + int64_t uSecondsToGo = secsDiff*1000000 + (fNextSendTime.tv_usec - timeNow.tv_usec); + if (uSecondsToGo < 0 || secsDiff < 0) { // sanity check: Make sure that the time-to-delay is non-negative: + uSecondsToGo = 0; + } + + // Delay this amount of time: + nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecondsToGo, + (TaskFunc*)sendNext, this); +} + +// The following is called after each delay between packet sends: +void BasicUDPSink::sendNext(void* firstArg) { + BasicUDPSink* sink = (BasicUDPSink*)firstArg; + sink->continuePlaying1(); +} diff --git a/AnyCore/lib_rtsp/liveMedia/BasicUDPSource.cpp b/AnyCore/lib_rtsp/liveMedia/BasicUDPSource.cpp new file mode 100644 index 0000000..85d9a8c --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/BasicUDPSource.cpp @@ -0,0 +1,73 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A simple UDP source, where every UDP payload is a complete frame +// Implementation + +#include "BasicUDPSource.hh" +#include + +BasicUDPSource* BasicUDPSource::createNew(UsageEnvironment& env, + Groupsock* inputGS) { + return new BasicUDPSource(env, inputGS); +} + +BasicUDPSource::BasicUDPSource(UsageEnvironment& env, Groupsock* inputGS) + : FramedSource(env), fInputGS(inputGS), fHaveStartedReading(False) { + // Try to use a large receive buffer (in the OS): + increaseReceiveBufferTo(env, inputGS->socketNum(), 50*1024); + + // Make the socket non-blocking, even though it will be read from only asynchronously, when packets arrive. + // The reason for this is that, in some OSs, reads on a blocking socket can (allegedly) sometimes block, + // even if the socket was previously reported (e.g., by "select()") as having data available. + // (This can supposedly happen if the UDP checksum fails, for example.) + makeSocketNonBlocking(fInputGS->socketNum()); +} + +BasicUDPSource::~BasicUDPSource(){ + envir().taskScheduler().turnOffBackgroundReadHandling(fInputGS->socketNum()); +} + +void BasicUDPSource::doGetNextFrame() { + if (!fHaveStartedReading) { + // Await incoming packets: + envir().taskScheduler().turnOnBackgroundReadHandling(fInputGS->socketNum(), + (TaskScheduler::BackgroundHandlerProc*)&incomingPacketHandler, this); + fHaveStartedReading = True; + } +} + +void BasicUDPSource::doStopGettingFrames() { + envir().taskScheduler().turnOffBackgroundReadHandling(fInputGS->socketNum()); + fHaveStartedReading = False; +} + + +void BasicUDPSource::incomingPacketHandler(BasicUDPSource* source, int /*mask*/){ + source->incomingPacketHandler1(); +} + +void BasicUDPSource::incomingPacketHandler1() { + if (!isCurrentlyAwaitingData()) return; // we're not ready for the data yet + + // Read the packet into our desired destination: + struct sockaddr_in fromAddress; + if (!fInputGS->handleRead(fTo, fMaxSize, fFrameSize, fromAddress)) return; + + // Tell our client that we have new data: + afterGetting(this); // we're preceded by a net read; no infinite recursion +} diff --git a/AnyCore/lib_rtsp/liveMedia/BitVector.cpp b/AnyCore/lib_rtsp/liveMedia/BitVector.cpp new file mode 100644 index 0000000..691c4f1 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/BitVector.cpp @@ -0,0 +1,174 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Bit Vector data structure +// Implementation + +#include "BitVector.hh" + +BitVector::BitVector(unsigned char* baseBytePtr, + unsigned baseBitOffset, + unsigned totNumBits) { + setup(baseBytePtr, baseBitOffset, totNumBits); +} + +void BitVector::setup(unsigned char* baseBytePtr, + unsigned baseBitOffset, + unsigned totNumBits) { + fBaseBytePtr = baseBytePtr; + fBaseBitOffset = baseBitOffset; + fTotNumBits = totNumBits; + fCurBitIndex = 0; +} + +static unsigned char const singleBitMask[8] + = {0x80, 0x40, 0x20, 0x10, 0x08, 0x04, 0x02, 0x01}; + +#define MAX_LENGTH 32 + +void BitVector::putBits(unsigned from, unsigned numBits) { + if (numBits == 0) return; + + unsigned char tmpBuf[4]; + unsigned overflowingBits = 0; + + if (numBits > MAX_LENGTH) { + numBits = MAX_LENGTH; + } + + if (numBits > fTotNumBits - fCurBitIndex) { + overflowingBits = numBits - (fTotNumBits - fCurBitIndex); + } + + tmpBuf[0] = (unsigned char)(from>>24); + tmpBuf[1] = (unsigned char)(from>>16); + tmpBuf[2] = (unsigned char)(from>>8); + tmpBuf[3] = (unsigned char)from; + + shiftBits(fBaseBytePtr, fBaseBitOffset + fCurBitIndex, /* to */ + tmpBuf, MAX_LENGTH - numBits, /* from */ + numBits - overflowingBits /* num bits */); + fCurBitIndex += numBits - overflowingBits; +} + +void BitVector::put1Bit(unsigned bit) { + // The following is equivalent to "putBits(..., 1)", except faster: + if (fCurBitIndex >= fTotNumBits) { /* overflow */ + return; + } else { + unsigned totBitOffset = fBaseBitOffset + fCurBitIndex++; + unsigned char mask = singleBitMask[totBitOffset%8]; + if (bit) { + fBaseBytePtr[totBitOffset/8] |= mask; + } else { + fBaseBytePtr[totBitOffset/8] &=~ mask; + } + } +} + +unsigned BitVector::getBits(unsigned numBits) { + if (numBits == 0) return 0; + + unsigned char tmpBuf[4]; + unsigned overflowingBits = 0; + + if (numBits > MAX_LENGTH) { + numBits = MAX_LENGTH; + } + + if (numBits > fTotNumBits - fCurBitIndex) { + overflowingBits = numBits - (fTotNumBits - fCurBitIndex); + } + + shiftBits(tmpBuf, 0, /* to */ + fBaseBytePtr, fBaseBitOffset + fCurBitIndex, /* from */ + numBits - overflowingBits /* num bits */); + fCurBitIndex += numBits - overflowingBits; + + unsigned result + = (tmpBuf[0]<<24) | (tmpBuf[1]<<16) | (tmpBuf[2]<<8) | tmpBuf[3]; + result >>= (MAX_LENGTH - numBits); // move into low-order part of word + result &= (0xFFFFFFFF << overflowingBits); // so any overflow bits are 0 + return result; +} + +unsigned BitVector::get1Bit() { + // The following is equivalent to "getBits(1)", except faster: + + if (fCurBitIndex >= fTotNumBits) { /* overflow */ + return 0; + } else { + unsigned totBitOffset = fBaseBitOffset + fCurBitIndex++; + unsigned char curFromByte = fBaseBytePtr[totBitOffset/8]; + unsigned result = (curFromByte >> (7-(totBitOffset%8))) & 0x01; + return result; + } +} + +void BitVector::skipBits(unsigned numBits) { + if (numBits > fTotNumBits - fCurBitIndex) { /* overflow */ + fCurBitIndex = fTotNumBits; + } else { + fCurBitIndex += numBits; + } +} + +unsigned BitVector::get_expGolomb() { + unsigned numLeadingZeroBits = 0; + unsigned codeStart = 1; + + while (get1Bit() == 0 && fCurBitIndex < fTotNumBits) { + ++numLeadingZeroBits; + codeStart *= 2; + } + + return codeStart - 1 + getBits(numLeadingZeroBits); +} + + +void shiftBits(unsigned char* toBasePtr, unsigned toBitOffset, + unsigned char const* fromBasePtr, unsigned fromBitOffset, + unsigned numBits) { + if (numBits == 0) return; + + /* Note that from and to may overlap, if from>to */ + unsigned char const* fromBytePtr = fromBasePtr + fromBitOffset/8; + unsigned fromBitRem = fromBitOffset%8; + unsigned char* toBytePtr = toBasePtr + toBitOffset/8; + unsigned toBitRem = toBitOffset%8; + + while (numBits-- > 0) { + unsigned char fromBitMask = singleBitMask[fromBitRem]; + unsigned char fromBit = (*fromBytePtr)&fromBitMask; + unsigned char toBitMask = singleBitMask[toBitRem]; + + if (fromBit != 0) { + *toBytePtr |= toBitMask; + } else { + *toBytePtr &=~ toBitMask; + } + + if (++fromBitRem == 8) { + ++fromBytePtr; + fromBitRem = 0; + } + if (++toBitRem == 8) { + ++toBytePtr; + toBitRem = 0; + } + } +} diff --git a/AnyCore/lib_rtsp/liveMedia/ByteStreamFileSource.cpp b/AnyCore/lib_rtsp/liveMedia/ByteStreamFileSource.cpp new file mode 100644 index 0000000..78cf60b --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/ByteStreamFileSource.cpp @@ -0,0 +1,184 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A file source that is a plain byte stream (rather than frames) +// Implementation + +#include "ByteStreamFileSource.hh" +#include "InputFile.hh" +#include "GroupsockHelper.hh" + +////////// ByteStreamFileSource ////////// + +ByteStreamFileSource* +ByteStreamFileSource::createNew(UsageEnvironment& env, char const* fileName, + unsigned preferredFrameSize, + unsigned playTimePerFrame) { + FILE* fid = OpenInputFile(env, fileName); + if (fid == NULL) return NULL; + + ByteStreamFileSource* newSource + = new ByteStreamFileSource(env, fid, preferredFrameSize, playTimePerFrame); + newSource->fFileSize = GetFileSize(fileName, fid); + + return newSource; +} + +ByteStreamFileSource* +ByteStreamFileSource::createNew(UsageEnvironment& env, FILE* fid, + unsigned preferredFrameSize, + unsigned playTimePerFrame) { + if (fid == NULL) return NULL; + + ByteStreamFileSource* newSource = new ByteStreamFileSource(env, fid, preferredFrameSize, playTimePerFrame); + newSource->fFileSize = GetFileSize(NULL, fid); + + return newSource; +} + +void ByteStreamFileSource::seekToByteAbsolute(u_int64_t byteNumber, u_int64_t numBytesToStream) { + SeekFile64(fFid, (int64_t)byteNumber, SEEK_SET); + + fNumBytesToStream = numBytesToStream; + fLimitNumBytesToStream = fNumBytesToStream > 0; +} + +void ByteStreamFileSource::seekToByteRelative(int64_t offset, u_int64_t numBytesToStream) { + SeekFile64(fFid, offset, SEEK_CUR); + + fNumBytesToStream = numBytesToStream; + fLimitNumBytesToStream = fNumBytesToStream > 0; +} + +void ByteStreamFileSource::seekToEnd() { + SeekFile64(fFid, 0, SEEK_END); +} + +ByteStreamFileSource::ByteStreamFileSource(UsageEnvironment& env, FILE* fid, + unsigned preferredFrameSize, + unsigned playTimePerFrame) + : FramedFileSource(env, fid), fFileSize(0), fPreferredFrameSize(preferredFrameSize), + fPlayTimePerFrame(playTimePerFrame), fLastPlayTime(0), + fHaveStartedReading(False), fLimitNumBytesToStream(False), fNumBytesToStream(0) { +#ifndef READ_FROM_FILES_SYNCHRONOUSLY + makeSocketNonBlocking(fileno(fFid)); +#endif + + // Test whether the file is seekable + fFidIsSeekable = FileIsSeekable(fFid); +} + +ByteStreamFileSource::~ByteStreamFileSource() { + if (fFid == NULL) return; + +#ifndef READ_FROM_FILES_SYNCHRONOUSLY + envir().taskScheduler().turnOffBackgroundReadHandling(fileno(fFid)); +#endif + + CloseInputFile(fFid); +} + +void ByteStreamFileSource::doGetNextFrame() { + if (feof(fFid) || ferror(fFid) || (fLimitNumBytesToStream && fNumBytesToStream == 0)) { + handleClosure(); + return; + } + +#ifdef READ_FROM_FILES_SYNCHRONOUSLY + doReadFromFile(); +#else + if (!fHaveStartedReading) { + // Await readable data from the file: + envir().taskScheduler().turnOnBackgroundReadHandling(fileno(fFid), + (TaskScheduler::BackgroundHandlerProc*)&fileReadableHandler, this); + fHaveStartedReading = True; + } +#endif +} + +void ByteStreamFileSource::doStopGettingFrames() { + envir().taskScheduler().unscheduleDelayedTask(nextTask()); +#ifndef READ_FROM_FILES_SYNCHRONOUSLY + envir().taskScheduler().turnOffBackgroundReadHandling(fileno(fFid)); + fHaveStartedReading = False; +#endif +} + +void ByteStreamFileSource::fileReadableHandler(ByteStreamFileSource* source, int /*mask*/) { + if (!source->isCurrentlyAwaitingData()) { + source->doStopGettingFrames(); // we're not ready for the data yet + return; + } + source->doReadFromFile(); +} + +void ByteStreamFileSource::doReadFromFile() { + // Try to read as many bytes as will fit in the buffer provided (or "fPreferredFrameSize" if less) + if (fLimitNumBytesToStream && fNumBytesToStream < (u_int64_t)fMaxSize) { + fMaxSize = (unsigned)fNumBytesToStream; + } + if (fPreferredFrameSize > 0 && fPreferredFrameSize < fMaxSize) { + fMaxSize = fPreferredFrameSize; + } +#ifdef READ_FROM_FILES_SYNCHRONOUSLY + fFrameSize = fread(fTo, 1, fMaxSize, fFid); +#else + if (fFidIsSeekable) { + fFrameSize = fread(fTo, 1, fMaxSize, fFid); + } else { + // For non-seekable files (e.g., pipes), call "read()" rather than "fread()", to ensure that the read doesn't block: + fFrameSize = read(fileno(fFid), fTo, fMaxSize); + } +#endif + if (fFrameSize == 0) { + handleClosure(); + return; + } + fNumBytesToStream -= fFrameSize; + + // Set the 'presentation time': + if (fPlayTimePerFrame > 0 && fPreferredFrameSize > 0) { + if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) { + // This is the first frame, so use the current time: + gettimeofday(&fPresentationTime, NULL); + } else { + // Increment by the play time of the previous data: + unsigned uSeconds = fPresentationTime.tv_usec + fLastPlayTime; + fPresentationTime.tv_sec += uSeconds/1000000; + fPresentationTime.tv_usec = uSeconds%1000000; + } + + // Remember the play time of this data: + fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize; + fDurationInMicroseconds = fLastPlayTime; + } else { + // We don't know a specific play time duration for this data, + // so just record the current time as being the 'presentation time': + gettimeofday(&fPresentationTime, NULL); + } + + // Inform the reader that he has data: +#ifdef READ_FROM_FILES_SYNCHRONOUSLY + // To avoid possible infinite recursion, we need to return to the event loop to do this: + nextTask() = envir().taskScheduler().scheduleDelayedTask(0, + (TaskFunc*)FramedSource::afterGetting, this); +#else + // Because the file read was done from the event loop, we can call the + // 'after getting' function directly, without risk of infinite recursion: + FramedSource::afterGetting(this); +#endif +} diff --git a/AnyCore/lib_rtsp/liveMedia/ByteStreamMemoryBufferSource.cpp b/AnyCore/lib_rtsp/liveMedia/ByteStreamMemoryBufferSource.cpp new file mode 100644 index 0000000..8b1631b --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/ByteStreamMemoryBufferSource.cpp @@ -0,0 +1,118 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A class for streaming data from a (static) memory buffer, as if it were a file. +// Implementation + +#include "ByteStreamMemoryBufferSource.hh" +#include "GroupsockHelper.hh" + +////////// ByteStreamMemoryBufferSource ////////// + +ByteStreamMemoryBufferSource* +ByteStreamMemoryBufferSource::createNew(UsageEnvironment& env, + u_int8_t* buffer, u_int64_t bufferSize, + Boolean deleteBufferOnClose, + unsigned preferredFrameSize, + unsigned playTimePerFrame) { + if (buffer == NULL) return NULL; + + return new ByteStreamMemoryBufferSource(env, buffer, bufferSize, deleteBufferOnClose, preferredFrameSize, playTimePerFrame); +} + +ByteStreamMemoryBufferSource::ByteStreamMemoryBufferSource(UsageEnvironment& env, + u_int8_t* buffer, u_int64_t bufferSize, + Boolean deleteBufferOnClose, + unsigned preferredFrameSize, + unsigned playTimePerFrame) + : FramedSource(env), fBuffer(buffer), fBufferSize(bufferSize), fCurIndex(0), fDeleteBufferOnClose(deleteBufferOnClose), + fPreferredFrameSize(preferredFrameSize), fPlayTimePerFrame(playTimePerFrame), fLastPlayTime(0), + fLimitNumBytesToStream(False), fNumBytesToStream(0) { +} + +ByteStreamMemoryBufferSource::~ByteStreamMemoryBufferSource() { + if (fDeleteBufferOnClose) delete[] fBuffer; +} + +void ByteStreamMemoryBufferSource::seekToByteAbsolute(u_int64_t byteNumber, u_int64_t numBytesToStream) { + fCurIndex = byteNumber; + if (fCurIndex > fBufferSize) fCurIndex = fBufferSize; + + fNumBytesToStream = numBytesToStream; + fLimitNumBytesToStream = fNumBytesToStream > 0; +} + +void ByteStreamMemoryBufferSource::seekToByteRelative(int64_t offset, u_int64_t numBytesToStream) { + int64_t newIndex = fCurIndex + offset; + if (newIndex < 0) { + fCurIndex = 0; + } else { + fCurIndex = (u_int64_t)offset; + if (fCurIndex > fBufferSize) fCurIndex = fBufferSize; + } + + fNumBytesToStream = numBytesToStream; + fLimitNumBytesToStream = fNumBytesToStream > 0; +} + +void ByteStreamMemoryBufferSource::doGetNextFrame() { + if (fCurIndex >= fBufferSize || (fLimitNumBytesToStream && fNumBytesToStream == 0)) { + handleClosure(); + return; + } + + // Try to read as many bytes as will fit in the buffer provided (or "fPreferredFrameSize" if less) + fFrameSize = fMaxSize; + if (fLimitNumBytesToStream && fNumBytesToStream < (u_int64_t)fFrameSize) { + fFrameSize = (unsigned)fNumBytesToStream; + } + if (fPreferredFrameSize > 0 && fPreferredFrameSize < fFrameSize) { + fFrameSize = fPreferredFrameSize; + } + + if (fCurIndex + fFrameSize > fBufferSize) { + fFrameSize = (unsigned)(fBufferSize - fCurIndex); + } + + memmove(fTo, &fBuffer[fCurIndex], fFrameSize); + fCurIndex += fFrameSize; + fNumBytesToStream -= fFrameSize; + + // Set the 'presentation time': + if (fPlayTimePerFrame > 0 && fPreferredFrameSize > 0) { + if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) { + // This is the first frame, so use the current time: + gettimeofday(&fPresentationTime, NULL); + } else { + // Increment by the play time of the previous data: + unsigned uSeconds = fPresentationTime.tv_usec + fLastPlayTime; + fPresentationTime.tv_sec += uSeconds/1000000; + fPresentationTime.tv_usec = uSeconds%1000000; + } + + // Remember the play time of this data: + fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize; + fDurationInMicroseconds = fLastPlayTime; + } else { + // We don't know a specific play time duration for this data, + // so just record the current time as being the 'presentation time': + gettimeofday(&fPresentationTime, NULL); + } + + // Inform the downstream object that it has data: + FramedSource::afterGetting(this); +} diff --git a/AnyCore/lib_rtsp/liveMedia/ByteStreamMultiFileSource.cpp b/AnyCore/lib_rtsp/liveMedia/ByteStreamMultiFileSource.cpp new file mode 100644 index 0000000..59bc1e5 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/ByteStreamMultiFileSource.cpp @@ -0,0 +1,133 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A source that consists of multiple byte-stream files, read sequentially +// Implementation + +#include "ByteStreamMultiFileSource.hh" + +ByteStreamMultiFileSource +::ByteStreamMultiFileSource(UsageEnvironment& env, char const** fileNameArray, + unsigned preferredFrameSize, unsigned playTimePerFrame) + : FramedSource(env), + fPreferredFrameSize(preferredFrameSize), fPlayTimePerFrame(playTimePerFrame), + fCurrentlyReadSourceNumber(0), fHaveStartedNewFile(False) { + // Begin by counting the number of sources: + for (fNumSources = 0; ; ++fNumSources) { + if (fileNameArray[fNumSources] == NULL) break; + } + + // Next, copy the source file names into our own array: + fFileNameArray = new char const*[fNumSources]; + if (fFileNameArray == NULL) return; + unsigned i; + for (i = 0; i < fNumSources; ++i) { + fFileNameArray[i] = strDup(fileNameArray[i]); + } + + // Next, set up our array of component ByteStreamFileSources + // Don't actually create these yet; instead, do this on demand + fSourceArray = new ByteStreamFileSource*[fNumSources]; + if (fSourceArray == NULL) return; + for (i = 0; i < fNumSources; ++i) { + fSourceArray[i] = NULL; + } +} + +ByteStreamMultiFileSource::~ByteStreamMultiFileSource() { + unsigned i; + for (i = 0; i < fNumSources; ++i) { + Medium::close(fSourceArray[i]); + } + delete[] fSourceArray; + + for (i = 0; i < fNumSources; ++i) { + delete[] (char*)(fFileNameArray[i]); + } + delete[] fFileNameArray; +} + +ByteStreamMultiFileSource* ByteStreamMultiFileSource +::createNew(UsageEnvironment& env, char const** fileNameArray, + unsigned preferredFrameSize, unsigned playTimePerFrame) { + ByteStreamMultiFileSource* newSource + = new ByteStreamMultiFileSource(env, fileNameArray, + preferredFrameSize, playTimePerFrame); + + return newSource; +} + +void ByteStreamMultiFileSource::doGetNextFrame() { + do { + // First, check whether we've run out of sources: + if (fCurrentlyReadSourceNumber >= fNumSources) break; + + fHaveStartedNewFile = False; + ByteStreamFileSource*& source + = fSourceArray[fCurrentlyReadSourceNumber]; + if (source == NULL) { + // The current source hasn't been created yet. Do this now: + source = ByteStreamFileSource::createNew(envir(), + fFileNameArray[fCurrentlyReadSourceNumber], + fPreferredFrameSize, fPlayTimePerFrame); + if (source == NULL) break; + fHaveStartedNewFile = True; + } + + // (Attempt to) read from the current source. + source->getNextFrame(fTo, fMaxSize, + afterGettingFrame, this, + onSourceClosure, this); + return; + } while (0); + + // An error occurred; consider ourselves closed: + handleClosure(); +} + +void ByteStreamMultiFileSource + ::afterGettingFrame(void* clientData, + unsigned frameSize, unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + ByteStreamMultiFileSource* source + = (ByteStreamMultiFileSource*)clientData; + source->fFrameSize = frameSize; + source->fNumTruncatedBytes = numTruncatedBytes; + source->fPresentationTime = presentationTime; + source->fDurationInMicroseconds = durationInMicroseconds; + FramedSource::afterGetting(source); +} + +void ByteStreamMultiFileSource::onSourceClosure(void* clientData) { + ByteStreamMultiFileSource* source + = (ByteStreamMultiFileSource*)clientData; + source->onSourceClosure1(); +} + +void ByteStreamMultiFileSource::onSourceClosure1() { + // This routine was called because the currently-read source was closed + // (probably due to EOF). Close this source down, and move to the + // next one: + ByteStreamFileSource*& source + = fSourceArray[fCurrentlyReadSourceNumber++]; + Medium::close(source); + source = NULL; + + // Try reading again: + doGetNextFrame(); +} diff --git a/AnyCore/lib_rtsp/liveMedia/COPYING b/AnyCore/lib_rtsp/liveMedia/COPYING new file mode 100644 index 0000000..012065c --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/COPYING @@ -0,0 +1 @@ +../COPYING \ No newline at end of file diff --git a/AnyCore/lib_rtsp/liveMedia/DVVideoFileServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/DVVideoFileServerMediaSubsession.cpp new file mode 100644 index 0000000..91e8133 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/DVVideoFileServerMediaSubsession.cpp @@ -0,0 +1,103 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a DV video file. +// Implementation + +#include "DVVideoFileServerMediaSubsession.hh" +#include "DVVideoRTPSink.hh" +#include "ByteStreamFileSource.hh" +#include "DVVideoStreamFramer.hh" + +DVVideoFileServerMediaSubsession* +DVVideoFileServerMediaSubsession::createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource) { + return new DVVideoFileServerMediaSubsession(env, fileName, reuseFirstSource); +} + +DVVideoFileServerMediaSubsession +::DVVideoFileServerMediaSubsession(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource) + : FileServerMediaSubsession(env, fileName, reuseFirstSource), + fFileDuration(0.0) { +} + +DVVideoFileServerMediaSubsession::~DVVideoFileServerMediaSubsession() { +} + +FramedSource* DVVideoFileServerMediaSubsession +::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) { + // Create the video source: + ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(envir(), fFileName); + if (fileSource == NULL) return NULL; + fFileSize = fileSource->fileSize(); + + // Create a framer for the Video Elementary Stream: + DVVideoStreamFramer* framer = DVVideoStreamFramer::createNew(envir(), fileSource, True/*the file source is seekable*/); + + // Use the framer to figure out the file's duration: + unsigned frameSize; + double frameDuration; + if (framer->getFrameParameters(frameSize, frameDuration)) { + fFileDuration = (float)(((int64_t)fFileSize*frameDuration)/(frameSize*1000000.0)); + estBitrate = (unsigned)((8000.0*frameSize)/frameDuration); // in kbps + } else { + estBitrate = 50000; // kbps, estimate + } + + return framer; +} + +RTPSink* DVVideoFileServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* /*inputSource*/) { + return DVVideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); +} + +char const* DVVideoFileServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource) { + return ((DVVideoRTPSink*)rtpSink)->auxSDPLineFromFramer((DVVideoStreamFramer*)inputSource); +} + +float DVVideoFileServerMediaSubsession::duration() const { + return fFileDuration; +} + +void DVVideoFileServerMediaSubsession +::seekStreamSource(FramedSource* inputSource, double& seekNPT, double streamDuration, u_int64_t& numBytes) { + // First, get the file source from "inputSource" (a framer): + DVVideoStreamFramer* framer = (DVVideoStreamFramer*)inputSource; + ByteStreamFileSource* fileSource = (ByteStreamFileSource*)(framer->inputSource()); + + // Then figure out where to seek to within the file: + if (fFileDuration > 0.0) { + u_int64_t seekByteNumber = (u_int64_t)(((int64_t)fFileSize*seekNPT)/fFileDuration); + numBytes = (u_int64_t)(((int64_t)fFileSize*streamDuration)/fFileDuration); + fileSource->seekToByteAbsolute(seekByteNumber, numBytes); + } +} + +void DVVideoFileServerMediaSubsession +::setStreamSourceDuration(FramedSource* inputSource, double streamDuration, u_int64_t& numBytes) { + // First, get the file source from "inputSource" (a framer): + DVVideoStreamFramer* framer = (DVVideoStreamFramer*)inputSource; + ByteStreamFileSource* fileSource = (ByteStreamFileSource*)(framer->inputSource()); + + // Then figure out how many bytes to limit the streaming to: + if (fFileDuration > 0.0) { + numBytes = (u_int64_t)(((int64_t)fFileSize*streamDuration)/fFileDuration); + fileSource->seekToByteRelative(0, numBytes); + } +} diff --git a/AnyCore/lib_rtsp/liveMedia/DVVideoRTPSink.cpp b/AnyCore/lib_rtsp/liveMedia/DVVideoRTPSink.cpp new file mode 100644 index 0000000..12bd64d --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/DVVideoRTPSink.cpp @@ -0,0 +1,95 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for DV video (RFC 3189) +// (Thanks to Ben Hutchings for prototyping this.) +// Implementation + +#include "DVVideoRTPSink.hh" + +////////// DVVideoRTPSink implementation ////////// + +DVVideoRTPSink +::DVVideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat) + : VideoRTPSink(env, RTPgs, rtpPayloadFormat, 90000, "DV"), + fFmtpSDPLine(NULL) { +} + +DVVideoRTPSink::~DVVideoRTPSink() { + delete[] fFmtpSDPLine; +} + +DVVideoRTPSink* +DVVideoRTPSink::createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat) { + return new DVVideoRTPSink(env, RTPgs, rtpPayloadFormat); +} + +Boolean DVVideoRTPSink::sourceIsCompatibleWithUs(MediaSource& source) { + // Our source must be an appropriate framer: + return source.isDVVideoStreamFramer(); +} + +void DVVideoRTPSink::doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* /*frameStart*/, + unsigned /*numBytesInFrame*/, + struct timeval framePresentationTime, + unsigned numRemainingBytes) { + if (numRemainingBytes == 0) { + // This packet contains the last (or only) fragment of the frame. + // Set the RTP 'M' ('marker') bit: + setMarkerBit(); + } + + // Also set the RTP timestamp: + setTimestamp(framePresentationTime); +} + +unsigned DVVideoRTPSink::computeOverflowForNewFrame(unsigned newFrameSize) const { + unsigned initialOverflow = MultiFramedRTPSink::computeOverflowForNewFrame(newFrameSize); + + // Adjust (increase) this overflow, if necessary, so that the amount of frame data that we use is an integral number + // of DIF blocks: + unsigned numFrameBytesUsed = newFrameSize - initialOverflow; + initialOverflow += numFrameBytesUsed%DV_DIF_BLOCK_SIZE; + + return initialOverflow; +} + +char const* DVVideoRTPSink::auxSDPLine() { + // Generate a new "a=fmtp:" line each time, using parameters from + // our framer source (in case they've changed since the last time that + // we were called): + DVVideoStreamFramer* framerSource = (DVVideoStreamFramer*)fSource; + if (framerSource == NULL) return NULL; // we don't yet have a source + + return auxSDPLineFromFramer(framerSource); +} + +char const* DVVideoRTPSink::auxSDPLineFromFramer(DVVideoStreamFramer* framerSource) { + char const* const profileName = framerSource->profileName(); + if (profileName == NULL) return NULL; + + char const* const fmtpSDPFmt = "a=fmtp:%d encode=%s;audio=bundled\r\n"; + unsigned fmtpSDPFmtSize = strlen(fmtpSDPFmt) + + 3 // max payload format code length + + strlen(profileName); + delete[] fFmtpSDPLine; // if it already exists + fFmtpSDPLine = new char[fmtpSDPFmtSize]; + sprintf(fFmtpSDPLine, fmtpSDPFmt, rtpPayloadType(), profileName); + + return fFmtpSDPLine; +} diff --git a/AnyCore/lib_rtsp/liveMedia/DVVideoRTPSource.cpp b/AnyCore/lib_rtsp/liveMedia/DVVideoRTPSource.cpp new file mode 100644 index 0000000..ebfc2f3 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/DVVideoRTPSource.cpp @@ -0,0 +1,65 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// DV Video RTP Sources +// Implementation + +#include "DVVideoRTPSource.hh" + +DVVideoRTPSource* +DVVideoRTPSource::createNew(UsageEnvironment& env, + Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) { + return new DVVideoRTPSource(env, RTPgs, rtpPayloadFormat, rtpTimestampFrequency); +} + +DVVideoRTPSource::DVVideoRTPSource(UsageEnvironment& env, + Groupsock* rtpGS, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) + : MultiFramedRTPSource(env, rtpGS, + rtpPayloadFormat, rtpTimestampFrequency) { +} + +DVVideoRTPSource::~DVVideoRTPSource() { +} + +#define DV_DIF_BLOCK_SIZE 80 +#define DV_SECTION_HEADER 0x1F + +Boolean DVVideoRTPSource +::processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize) { + unsigned const packetSize = packet->dataSize(); + if (packetSize < DV_DIF_BLOCK_SIZE) return False; // TARFU! + + u_int8_t const* data = packet->data(); + fCurrentPacketBeginsFrame = data[0] == DV_SECTION_HEADER && (data[1]&0xf8) == 0 && data[2] == 0; // thanks to Ben Hutchings + + // The RTP "M" (marker) bit indicates the last fragment of a frame: + fCurrentPacketCompletesFrame = packet->rtpMarkerBit(); + + // There is no special header + resultSpecialHeaderSize = 0; + return True; +} + +char const* DVVideoRTPSource::MIMEtype() const { + return "video/DV"; +} + diff --git a/AnyCore/lib_rtsp/liveMedia/DVVideoStreamFramer.cpp b/AnyCore/lib_rtsp/liveMedia/DVVideoStreamFramer.cpp new file mode 100644 index 0000000..0a3d719 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/DVVideoStreamFramer.cpp @@ -0,0 +1,220 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that parses a DV input stream into DV frames to deliver to the downstream object +// Implementation +// (Thanks to Ben Hutchings for his help, including a prototype implementation.) + +#include "DVVideoStreamFramer.hh" +#include "GroupsockHelper.hh" + +////////// DVVideoStreamFramer implementation ////////// + +DVVideoStreamFramer::DVVideoStreamFramer(UsageEnvironment& env, FramedSource* inputSource, + Boolean sourceIsSeekable, Boolean leavePresentationTimesUnmodified) + : FramedFilter(env, inputSource), + fLeavePresentationTimesUnmodified(leavePresentationTimesUnmodified), + fOurProfile(NULL), fInitialBlocksPresent(False), fSourceIsSeekable(sourceIsSeekable) { + fTo = NULL; // hack used when reading "fSavedInitialBlocks" + // Use the current wallclock time as the initial 'presentation time': + gettimeofday(&fNextFramePresentationTime, NULL); +} + +DVVideoStreamFramer::~DVVideoStreamFramer() { +} + +DVVideoStreamFramer* +DVVideoStreamFramer::createNew(UsageEnvironment& env, FramedSource* inputSource, + Boolean sourceIsSeekable, Boolean leavePresentationTimesUnmodified) { + return new DVVideoStreamFramer(env, inputSource, sourceIsSeekable, leavePresentationTimesUnmodified); +} + +// Define the parameters for the profiles that we understand: +struct DVVideoProfile { + char const* name; + unsigned apt; + unsigned sType; + unsigned sequenceCount; + unsigned channelCount; + unsigned dvFrameSize; // in bytes (== sequenceCount*channelCount*(DV_NUM_BLOCKS_PER_SEQUENCE*DV_DIF_BLOCK_SIZE i.e. 12000)) + double frameDuration; // duration of the above, in microseconds. (1000000/this == frame rate) +}; + +static DVVideoProfile const profiles[] = { + { "SD-VCR/525-60", 0, 0x00, 10, 1, 120000, (1000000*1001)/30000.0 }, + { "SD-VCR/625-50", 0, 0x00, 12, 1, 144000, 1000000/25.0 }, + { "314M-25/525-60", 1, 0x00, 10, 1, 120000, (1000000*1001)/30000.0 }, + { "314M-25/625-50", 1, 0x00, 12, 1, 144000, 1000000/25.0 }, + { "314M-50/525-60", 1, 0x04, 10, 2, 240000, (1000000*1001)/30000.0 }, + { "314M-50/625-50", 1, 0x04, 12, 2, 288000, 1000000/25.0 }, + { "370M/1080-60i", 1, 0x14, 10, 4, 480000, (1000000*1001)/30000.0 }, + { "370M/1080-50i", 1, 0x14, 12, 4, 576000, 1000000/25.0 }, + { "370M/720-60p", 1, 0x18, 10, 2, 240000, (1000000*1001)/60000.0 }, + { "370M/720-50p", 1, 0x18, 12, 2, 288000, 1000000/50.0 }, + { NULL, 0, 0, 0, 0, 0, 0.0 } + }; + + +char const* DVVideoStreamFramer::profileName() { + if (fOurProfile == NULL) getProfile(); + + return fOurProfile != NULL ? ((DVVideoProfile const*)fOurProfile)->name : NULL; +} + +Boolean DVVideoStreamFramer::getFrameParameters(unsigned& frameSize, double& frameDuration) { + if (fOurProfile == NULL) getProfile(); + if (fOurProfile == NULL) return False; + + frameSize = ((DVVideoProfile const*)fOurProfile)->dvFrameSize; + frameDuration = ((DVVideoProfile const*)fOurProfile)->frameDuration; + return True; +} + +void DVVideoStreamFramer::getProfile() { + // To determine the stream's profile, we need to first read a chunk of data that we can parse: + fInputSource->getNextFrame(fSavedInitialBlocks, DV_SAVED_INITIAL_BLOCKS_SIZE, + afterGettingFrame, this, FramedSource::handleClosure, this); + + // Handle events until the requested data arrives: + envir().taskScheduler().doEventLoop(&fInitialBlocksPresent); +} + +Boolean DVVideoStreamFramer::isDVVideoStreamFramer() const { + return True; +} + +void DVVideoStreamFramer::doGetNextFrame() { + fFrameSize = 0; // initially, until we deliver data + + // If we have saved initial blocks (and won't be seeking back to re-read this data), so use this data first. + if (fInitialBlocksPresent && !fSourceIsSeekable) { + // For simplicity, we require the downstream object's buffer to be >= this data's size: + if (fMaxSize < DV_SAVED_INITIAL_BLOCKS_SIZE) { + fNumTruncatedBytes = fMaxSize; + afterGetting(this); + return; + } + + memmove(fTo, fSavedInitialBlocks, DV_SAVED_INITIAL_BLOCKS_SIZE); + fFrameSize = DV_SAVED_INITIAL_BLOCKS_SIZE; + fTo += DV_SAVED_INITIAL_BLOCKS_SIZE; + fInitialBlocksPresent = False; // for the future + } + + // Arrange to read the (rest of the) requested data. + // (But first, make sure that we read an integral multiple of the DV block size.) + fMaxSize -= fMaxSize%DV_DIF_BLOCK_SIZE; + getAndDeliverData(); +} + +#define DV_SMALLEST_POSSIBLE_FRAME_SIZE 120000 + +void DVVideoStreamFramer::getAndDeliverData() { + unsigned const totFrameSize + = fOurProfile != NULL ? ((DVVideoProfile const*)fOurProfile)->dvFrameSize : DV_SMALLEST_POSSIBLE_FRAME_SIZE; + unsigned totBytesToDeliver = totFrameSize < fMaxSize ? totFrameSize : fMaxSize; + unsigned numBytesToRead = totBytesToDeliver - fFrameSize; + + fInputSource->getNextFrame(fTo, numBytesToRead, afterGettingFrame, this, FramedSource::handleClosure, this); +} + +void DVVideoStreamFramer::afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, unsigned /*durationInMicroseconds*/) { + DVVideoStreamFramer* source = (DVVideoStreamFramer*)clientData; + source->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime); +} + +#define DVSectionId(n) ptr[(n)*DV_DIF_BLOCK_SIZE + 0] +#define DVData(n,i) ptr[(n)*DV_DIF_BLOCK_SIZE + 3+(i)] + +#define DV_SECTION_HEADER 0x1F +#define DV_PACK_HEADER_10 0x3F +#define DV_PACK_HEADER_12 0xBF +#define DV_SECTION_VAUX_MIN 0x50 +#define DV_SECTION_VAUX_MAX 0x5F +#define DV_PACK_VIDEO_SOURCE 60 +#ifndef MILLION +#define MILLION 1000000 +#endif + +void DVVideoStreamFramer::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, struct timeval presentationTime) { + if (fOurProfile == NULL && frameSize >= DV_SAVED_INITIAL_BLOCKS_SIZE) { + // (Try to) parse this data enough to figure out its profile. + // We assume that the data begins on a (80-byte) block boundary, but not necessarily on a (150-block) sequence boundary. + // We therefore scan each 80-byte block, until we find the 6-block header that begins a sequence: + u_int8_t const* data = (fTo == NULL) ? fSavedInitialBlocks : fTo; + for (u_int8_t const* ptr = data; ptr + 6*DV_DIF_BLOCK_SIZE <= &data[DV_SAVED_INITIAL_BLOCKS_SIZE]; ptr += DV_DIF_BLOCK_SIZE) { + // Check whether "ptr" points to an appropriate header: + u_int8_t const sectionHeader = DVSectionId(0); + u_int8_t const sectionVAUX = DVSectionId(5); + u_int8_t const packHeaderNum = DVData(0,0); + + if (sectionHeader == DV_SECTION_HEADER + && (packHeaderNum == DV_PACK_HEADER_10 || packHeaderNum == DV_PACK_HEADER_12) + && (sectionVAUX >= DV_SECTION_VAUX_MIN && sectionVAUX <= DV_SECTION_VAUX_MAX)) { + // This data begins a sequence; look up the DV profile from this: + u_int8_t const apt = DVData(0,1)&0x07; + u_int8_t const sType = DVData(5,48)&0x1F; + u_int8_t const sequenceCount = (packHeaderNum == DV_PACK_HEADER_10) ? 10 : 12; + + // Use these three parameters (apt, sType, sequenceCount) to look up the DV profile: + for (DVVideoProfile const* profile = profiles; profile->name != NULL; ++profile) { + if (profile->apt == apt && profile->sType == sType && profile->sequenceCount == sequenceCount) { + fOurProfile = profile; + break; + } + } + break; // because we found a correct sequence header (even if we don't happen to define a profile for it) + } + } + } + + if (fTo != NULL) { // There is a downstream object; complete delivery to it (or read more data, if necessary) + unsigned const totFrameSize + = fOurProfile != NULL ? ((DVVideoProfile const*)fOurProfile)->dvFrameSize : DV_SMALLEST_POSSIBLE_FRAME_SIZE; + fFrameSize += frameSize; + fTo += frameSize; + fPresentationTime = presentationTime; // by default; may get changed below + + if (fFrameSize < totFrameSize && fFrameSize < fMaxSize && numTruncatedBytes == 0) { + // We have more data to deliver; get it now: + getAndDeliverData(); + } else { + // We're done delivering this DV frame (but check for truncation): + fNumTruncatedBytes = totFrameSize - fFrameSize; + + if (fOurProfile != NULL) { + // Also set the presentation time, and increment it for next time, + // based on the length of this frame: + if (!fLeavePresentationTimesUnmodified) fPresentationTime = fNextFramePresentationTime; + + DVVideoProfile const* ourProfile =(DVVideoProfile const*)fOurProfile; + double durationInMicroseconds = (fFrameSize*ourProfile->frameDuration)/ourProfile->dvFrameSize; + fDurationInMicroseconds = (unsigned)durationInMicroseconds; + fNextFramePresentationTime.tv_usec += fDurationInMicroseconds; + fNextFramePresentationTime.tv_sec += fNextFramePresentationTime.tv_usec/MILLION; + fNextFramePresentationTime.tv_usec %= MILLION; + } + + afterGetting(this); + } + } else { + // We read data into our special buffer; signal that it has arrived: + fInitialBlocksPresent = True; + } +} diff --git a/AnyCore/lib_rtsp/liveMedia/DarwinInjector.cpp b/AnyCore/lib_rtsp/liveMedia/DarwinInjector.cpp new file mode 100644 index 0000000..758a188 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/DarwinInjector.cpp @@ -0,0 +1,349 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// An object that redirects one or more RTP/RTCP streams - forming a single +// multimedia session - into a 'Darwin Streaming Server' (for subsequent +// reflection to potentially arbitrarily many remote RTSP clients). +// Implementation + +#include "DarwinInjector.hh" +#include + +////////// SubstreamDescriptor definition ////////// + +class SubstreamDescriptor { +public: + SubstreamDescriptor(RTPSink* rtpSink, RTCPInstance* rtcpInstance, unsigned trackId); + ~SubstreamDescriptor(); + + SubstreamDescriptor*& next() { return fNext; } + RTPSink* rtpSink() const { return fRTPSink; } + RTCPInstance* rtcpInstance() const { return fRTCPInstance; } + char const* sdpLines() const { return fSDPLines; } + +private: + SubstreamDescriptor* fNext; + RTPSink* fRTPSink; + RTCPInstance* fRTCPInstance; + char* fSDPLines; +}; + + +////////// DarwinInjector implementation ////////// + +DarwinInjector* DarwinInjector::createNew(UsageEnvironment& env, + char const* applicationName, + int verbosityLevel) { + return new DarwinInjector(env, applicationName, verbosityLevel); +} + +Boolean DarwinInjector::lookupByName(UsageEnvironment& env, char const* name, + DarwinInjector*& result) { + result = NULL; // unless we succeed + + Medium* medium; + if (!Medium::lookupByName(env, name, medium)) return False; + + if (!medium->isDarwinInjector()) { + env.setResultMsg(name, " is not a 'Darwin injector'"); + return False; + } + + result = (DarwinInjector*)medium; + return True; +} + +DarwinInjector::DarwinInjector(UsageEnvironment& env, + char const* applicationName, int verbosityLevel) + : Medium(env), + fApplicationName(strDup(applicationName)), fVerbosityLevel(verbosityLevel), + fRTSPClient(NULL), fSubstreamSDPSizes(0), + fHeadSubstream(NULL), fTailSubstream(NULL), fSession(NULL), fLastTrackId(0), fResultString(NULL) { +} + +DarwinInjector::~DarwinInjector() { + if (fSession != NULL) { // close down and delete the session + fRTSPClient->sendTeardownCommand(*fSession, NULL); + Medium::close(fSession); + } + + delete fHeadSubstream; + delete[] (char*)fApplicationName; + Medium::close(fRTSPClient); +} + +void DarwinInjector::addStream(RTPSink* rtpSink, RTCPInstance* rtcpInstance) { + if (rtpSink == NULL) return; // "rtpSink" should be non-NULL + + SubstreamDescriptor* newDescriptor = new SubstreamDescriptor(rtpSink, rtcpInstance, ++fLastTrackId); + if (fHeadSubstream == NULL) { + fHeadSubstream = fTailSubstream = newDescriptor; + } else { + fTailSubstream->next() = newDescriptor; + fTailSubstream = newDescriptor; + } + + fSubstreamSDPSizes += strlen(newDescriptor->sdpLines()); +} + +// Define a special subclass of "RTSPClient" that has a pointer field to a "DarwinInjector". We'll use this to implement RTSP ops: +class RTSPClientForDarwinInjector: public RTSPClient { +public: + RTSPClientForDarwinInjector(UsageEnvironment& env, char const* rtspURL, int verbosityLevel, char const* applicationName, + DarwinInjector* ourDarwinInjector) + : RTSPClient(env, rtspURL, verbosityLevel, applicationName, 0, -1), + fOurDarwinInjector(ourDarwinInjector) {} + virtual ~RTSPClientForDarwinInjector() {} + DarwinInjector* fOurDarwinInjector; +}; + +Boolean DarwinInjector +::setDestination(char const* remoteRTSPServerNameOrAddress, + char const* remoteFileName, + char const* sessionName, + char const* sessionInfo, + portNumBits remoteRTSPServerPortNumber, + char const* remoteUserName, + char const* remotePassword, + char const* sessionAuthor, + char const* sessionCopyright, + int timeout) { + char* sdp = NULL; + char* url = NULL; + Boolean success = False; // until we learn otherwise + + do { + // Construct a RTSP URL for the remote stream: + char const* const urlFmt = "rtsp://%s:%u/%s"; + unsigned urlLen + = strlen(urlFmt) + strlen(remoteRTSPServerNameOrAddress) + 5 /* max short len */ + strlen(remoteFileName); + url = new char[urlLen]; + sprintf(url, urlFmt, remoteRTSPServerNameOrAddress, remoteRTSPServerPortNumber, remoteFileName); + + // Begin by creating our RTSP client object: + fRTSPClient = new RTSPClientForDarwinInjector(envir(), url, fVerbosityLevel, fApplicationName, this); + if (fRTSPClient == NULL) break; + + // Get the remote RTSP server's IP address: + struct in_addr addr; + { + NetAddressList addresses(remoteRTSPServerNameOrAddress); + if (addresses.numAddresses() == 0) break; + NetAddress const* address = addresses.firstAddress(); + addr.s_addr = *(unsigned*)(address->data()); + } + AddressString remoteRTSPServerAddressStr(addr); + + // Construct a SDP description for the session that we'll be streaming: + char const* const sdpFmt = + "v=0\r\n" + "o=- %u %u IN IP4 127.0.0.1\r\n" + "s=%s\r\n" + "i=%s\r\n" + "c=IN IP4 %s\r\n" + "t=0 0\r\n" + "a=x-qt-text-nam:%s\r\n" + "a=x-qt-text-inf:%s\r\n" + "a=x-qt-text-cmt:source application:%s\r\n" + "a=x-qt-text-aut:%s\r\n" + "a=x-qt-text-cpy:%s\r\n"; + // plus, %s for each substream SDP + unsigned sdpLen = strlen(sdpFmt) + + 20 /* max int len */ + 20 /* max int len */ + + strlen(sessionName) + + strlen(sessionInfo) + + strlen(remoteRTSPServerAddressStr.val()) + + strlen(sessionName) + + strlen(sessionInfo) + + strlen(fApplicationName) + + strlen(sessionAuthor) + + strlen(sessionCopyright) + + fSubstreamSDPSizes; + unsigned const sdpSessionId = our_random32(); + unsigned const sdpVersion = sdpSessionId; + sdp = new char[sdpLen]; + sprintf(sdp, sdpFmt, + sdpSessionId, sdpVersion, // o= line + sessionName, // s= line + sessionInfo, // i= line + remoteRTSPServerAddressStr.val(), // c= line + sessionName, // a=x-qt-text-nam: line + sessionInfo, // a=x-qt-text-inf: line + fApplicationName, // a=x-qt-text-cmt: line + sessionAuthor, // a=x-qt-text-aut: line + sessionCopyright // a=x-qt-text-cpy: line + ); + char* p = &sdp[strlen(sdp)]; + SubstreamDescriptor* ss; + for (ss = fHeadSubstream; ss != NULL; ss = ss->next()) { + sprintf(p, "%s", ss->sdpLines()); + p += strlen(p); + } + + // Do a RTSP "ANNOUNCE" with this SDP description: + Authenticator auth; + Authenticator* authToUse = NULL; + if (remoteUserName[0] != '\0' || remotePassword[0] != '\0') { + auth.setUsernameAndPassword(remoteUserName, remotePassword); + authToUse = &auth; + } + fWatchVariable = 0; + (void)fRTSPClient->sendAnnounceCommand(sdp, genericResponseHandler, authToUse); + + // Now block (but handling events) until we get a response: + envir().taskScheduler().doEventLoop(&fWatchVariable); + + delete[] fResultString; + if (fResultCode != 0) break; // an error occurred with the RTSP "ANNOUNCE" command + + // Next, tell the remote server to start receiving the stream from us. + // (To do this, we first create a "MediaSession" object from the SDP description.) + fSession = MediaSession::createNew(envir(), sdp); + if (fSession == NULL) break; + + ss = fHeadSubstream; + MediaSubsessionIterator iter(*fSession); + MediaSubsession* subsession; + ss = fHeadSubstream; + unsigned streamChannelId = 0; + while ((subsession = iter.next()) != NULL) { + if (!subsession->initiate()) break; + + fWatchVariable = 0; + (void)fRTSPClient->sendSetupCommand(*subsession, genericResponseHandler, + True /*streamOutgoing*/, + True /*streamUsingTCP*/); + // Now block (but handling events) until we get a response: + envir().taskScheduler().doEventLoop(&fWatchVariable); + + delete[] fResultString; + if (fResultCode != 0) break; // an error occurred with the RTSP "SETUP" command + + // Tell this subsession's RTPSink and RTCPInstance to use + // the RTSP TCP connection: + ss->rtpSink()->setStreamSocket(fRTSPClient->socketNum(), streamChannelId++); + if (ss->rtcpInstance() != NULL) { + ss->rtcpInstance()->setStreamSocket(fRTSPClient->socketNum(), + streamChannelId++); + } + ss = ss->next(); + } + if (subsession != NULL) break; // an error occurred above + + // Tell the RTSP server to start: + fWatchVariable = 0; + (void)fRTSPClient->sendPlayCommand(*fSession, genericResponseHandler); + + // Now block (but handling events) until we get a response: + envir().taskScheduler().doEventLoop(&fWatchVariable); + + delete[] fResultString; + if (fResultCode != 0) break; // an error occurred with the RTSP "PLAY" command + + // Finally, make sure that the output TCP buffer is a reasonable size: + increaseSendBufferTo(envir(), fRTSPClient->socketNum(), 100*1024); + + success = True; + } while (0); + + delete[] sdp; + delete[] url; + return success; +} + +Boolean DarwinInjector::isDarwinInjector() const { + return True; +} + +void DarwinInjector::genericResponseHandler(RTSPClient* rtspClient, int responseCode, char* responseString) { + DarwinInjector* di = ((RTSPClientForDarwinInjector*)rtspClient)-> fOurDarwinInjector; + di->genericResponseHandler1(responseCode, responseString); +} + +void DarwinInjector::genericResponseHandler1(int responseCode, char* responseString) { + // Set result values: + fResultCode = responseCode; + fResultString = responseString; + + // Signal a break from the event loop (thereby returning from the blocking command): + fWatchVariable = ~0; +} + +////////// SubstreamDescriptor implementation ////////// + +SubstreamDescriptor::SubstreamDescriptor(RTPSink* rtpSink, + RTCPInstance* rtcpInstance, unsigned trackId) + : fNext(NULL), fRTPSink(rtpSink), fRTCPInstance(rtcpInstance) { + // Create the SDP description for this substream + char const* mediaType = fRTPSink->sdpMediaType(); + unsigned char rtpPayloadType = fRTPSink->rtpPayloadType(); + char const* rtpPayloadFormatName = fRTPSink->rtpPayloadFormatName(); + unsigned rtpTimestampFrequency = fRTPSink->rtpTimestampFrequency(); + unsigned numChannels = fRTPSink->numChannels(); + char* rtpmapLine; + if (rtpPayloadType >= 96) { + char* encodingParamsPart; + if (numChannels != 1) { + encodingParamsPart = new char[1 + 20 /* max int len */]; + sprintf(encodingParamsPart, "/%d", numChannels); + } else { + encodingParamsPart = strDup(""); + } + char const* const rtpmapFmt = "a=rtpmap:%d %s/%d%s\r\n"; + unsigned rtpmapFmtSize = strlen(rtpmapFmt) + + 3 /* max char len */ + strlen(rtpPayloadFormatName) + + 20 /* max int len */ + strlen(encodingParamsPart); + rtpmapLine = new char[rtpmapFmtSize]; + sprintf(rtpmapLine, rtpmapFmt, + rtpPayloadType, rtpPayloadFormatName, + rtpTimestampFrequency, encodingParamsPart); + delete[] encodingParamsPart; + } else { + // Static payload type => no "a=rtpmap:" line + rtpmapLine = strDup(""); + } + unsigned rtpmapLineSize = strlen(rtpmapLine); + char const* auxSDPLine = fRTPSink->auxSDPLine(); + if (auxSDPLine == NULL) auxSDPLine = ""; + unsigned auxSDPLineSize = strlen(auxSDPLine); + + char const* const sdpFmt = + "m=%s 0 RTP/AVP %u\r\n" + "%s" // "a=rtpmap:" line (if present) + "%s" // auxilliary (e.g., "a=fmtp:") line (if present) + "a=control:trackID=%u\r\n"; + unsigned sdpFmtSize = strlen(sdpFmt) + + strlen(mediaType) + 3 /* max char len */ + + rtpmapLineSize + + auxSDPLineSize + + 20 /* max int len */; + char* sdpLines = new char[sdpFmtSize]; + sprintf(sdpLines, sdpFmt, + mediaType, // m= + rtpPayloadType, // m= + rtpmapLine, // a=rtpmap:... (if present) + auxSDPLine, // optional extra SDP line + trackId); // a=control: + fSDPLines = strDup(sdpLines); + delete[] sdpLines; + delete[] rtpmapLine; +} + +SubstreamDescriptor::~SubstreamDescriptor() { + delete fSDPLines; + delete fNext; +} diff --git a/AnyCore/lib_rtsp/liveMedia/DeviceSource.cpp b/AnyCore/lib_rtsp/liveMedia/DeviceSource.cpp new file mode 100644 index 0000000..cc884b3 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/DeviceSource.cpp @@ -0,0 +1,156 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A template for a MediaSource encapsulating an audio/video input device +// +// NOTE: Sections of this code labeled "%%% TO BE WRITTEN %%%" are incomplete, and need to be written by the programmer +// (depending on the features of the particular device). +// Implementation + +#include "DeviceSource.hh" +#include // for "gettimeofday()" + +DeviceSource* +DeviceSource::createNew(UsageEnvironment& env, + DeviceParameters params) { + return new DeviceSource(env, params); +} + +EventTriggerId DeviceSource::eventTriggerId = 0; + +unsigned DeviceSource::referenceCount = 0; + +DeviceSource::DeviceSource(UsageEnvironment& env, + DeviceParameters params) + : FramedSource(env), fParams(params) { + if (referenceCount == 0) { + // Any global initialization of the device would be done here: + //%%% TO BE WRITTEN %%% + } + ++referenceCount; + + // Any instance-specific initialization of the device would be done here: + //%%% TO BE WRITTEN %%% + + // We arrange here for our "deliverFrame" member function to be called + // whenever the next frame of data becomes available from the device. + // + // If the device can be accessed as a readable socket, then one easy way to do this is using a call to + // envir().taskScheduler().turnOnBackgroundReadHandling( ... ) + // (See examples of this call in the "liveMedia" directory.) + // + // If, however, the device *cannot* be accessed as a readable socket, then instead we can implement it using 'event triggers': + // Create an 'event trigger' for this device (if it hasn't already been done): + if (eventTriggerId == 0) { + eventTriggerId = envir().taskScheduler().createEventTrigger(deliverFrame0); + } +} + +DeviceSource::~DeviceSource() { + // Any instance-specific 'destruction' (i.e., resetting) of the device would be done here: + //%%% TO BE WRITTEN %%% + + --referenceCount; + if (referenceCount == 0) { + // Any global 'destruction' (i.e., resetting) of the device would be done here: + //%%% TO BE WRITTEN %%% + + // Reclaim our 'event trigger' + envir().taskScheduler().deleteEventTrigger(eventTriggerId); + eventTriggerId = 0; + } +} + +void DeviceSource::doGetNextFrame() { + // This function is called (by our 'downstream' object) when it asks for new data. + + // Note: If, for some reason, the source device stops being readable (e.g., it gets closed), then you do the following: + if (0 /* the source stops being readable */ /*%%% TO BE WRITTEN %%%*/) { + handleClosure(); + return; + } + + // If a new frame of data is immediately available to be delivered, then do this now: + if (0 /* a new frame of data is immediately available to be delivered*/ /*%%% TO BE WRITTEN %%%*/) { + deliverFrame(); + } + + // No new data is immediately available to be delivered. We don't do anything more here. + // Instead, our event trigger must be called (e.g., from a separate thread) when new data becomes available. +} + +void DeviceSource::deliverFrame0(void* clientData) { + ((DeviceSource*)clientData)->deliverFrame(); +} + +void DeviceSource::deliverFrame() { + // This function is called when new frame data is available from the device. + // We deliver this data by copying it to the 'downstream' object, using the following parameters (class members): + // 'in' parameters (these should *not* be modified by this function): + // fTo: The frame data is copied to this address. + // (Note that the variable "fTo" is *not* modified. Instead, + // the frame data is copied to the address pointed to by "fTo".) + // fMaxSize: This is the maximum number of bytes that can be copied + // (If the actual frame is larger than this, then it should + // be truncated, and "fNumTruncatedBytes" set accordingly.) + // 'out' parameters (these are modified by this function): + // fFrameSize: Should be set to the delivered frame size (<= fMaxSize). + // fNumTruncatedBytes: Should be set iff the delivered frame would have been + // bigger than "fMaxSize", in which case it's set to the number of bytes + // that have been omitted. + // fPresentationTime: Should be set to the frame's presentation time + // (seconds, microseconds). This time must be aligned with 'wall-clock time' - i.e., the time that you would get + // by calling "gettimeofday()". + // fDurationInMicroseconds: Should be set to the frame's duration, if known. + // If, however, the device is a 'live source' (e.g., encoded from a camera or microphone), then we probably don't need + // to set this variable, because - in this case - data will never arrive 'early'. + // Note the code below. + + if (!isCurrentlyAwaitingData()) return; // we're not ready for the data yet + + u_int8_t* newFrameDataStart = (u_int8_t*)0xDEADBEEF; //%%% TO BE WRITTEN %%% + unsigned newFrameSize = 0; //%%% TO BE WRITTEN %%% + + // Deliver the data here: + if (newFrameSize > fMaxSize) { + fFrameSize = fMaxSize; + fNumTruncatedBytes = newFrameSize - fMaxSize; + } else { + fFrameSize = newFrameSize; + } + gettimeofday(&fPresentationTime, NULL); // If you have a more accurate time - e.g., from an encoder - then use that instead. + // If the device is *not* a 'live source' (e.g., it comes instead from a file or buffer), then set "fDurationInMicroseconds" here. + memmove(fTo, newFrameDataStart, fFrameSize); + + // After delivering the data, inform the reader that it is now available: + FramedSource::afterGetting(this); +} + + +// The following code would be called to signal that a new frame of data has become available. +// This (unlike other "XRtsp Media" library code) may be called from a separate thread. +// (Note, however, that "triggerEvent()" cannot be called with the same 'event trigger id' from different threads. +// Also, if you want to have multiple device threads, each one using a different 'event trigger id', then you will need +// to make "eventTriggerId" a non-static member variable of "DeviceSource".) +void signalNewFrameData() { + TaskScheduler* ourScheduler = NULL; //%%% TO BE WRITTEN %%% + DeviceSource* ourDevice = NULL; //%%% TO BE WRITTEN %%% + + if (ourScheduler != NULL) { // sanity check + ourScheduler->triggerEvent(DeviceSource::eventTriggerId, ourDevice); + } +} diff --git a/AnyCore/lib_rtsp/liveMedia/DigestAuthentication.cpp b/AnyCore/lib_rtsp/liveMedia/DigestAuthentication.cpp new file mode 100644 index 0000000..06a09bb --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/DigestAuthentication.cpp @@ -0,0 +1,157 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A class used for digest authentication. +// Implementation + +#include "DigestAuthentication.hh" +#include "ourMD5.hh" +#include +#include // for gettimeofday() +#include +#include +#include + +Authenticator::Authenticator() { + assign(NULL, NULL, NULL, NULL, False); +} + +Authenticator::Authenticator(char const* username, char const* password, Boolean passwordIsMD5) { + assign(NULL, NULL, username, password, passwordIsMD5); +} + +Authenticator::Authenticator(const Authenticator& orig) { + assign(orig.realm(), orig.nonce(), orig.username(), orig.password(), orig.fPasswordIsMD5); +} + +Authenticator& Authenticator::operator=(const Authenticator& rightSide) { + if (&rightSide != this) { + reset(); + assign(rightSide.realm(), rightSide.nonce(), + rightSide.username(), rightSide.password(), rightSide.fPasswordIsMD5); + } + + return *this; +} + +Authenticator::~Authenticator() { + reset(); +} + +void Authenticator::reset() { + resetRealmAndNonce(); + resetUsernameAndPassword(); +} + +void Authenticator::setRealmAndNonce(char const* realm, char const* nonce) { + resetRealmAndNonce(); + assignRealmAndNonce(realm, nonce); +} + +void Authenticator::setRealmAndRandomNonce(char const* realm) { + resetRealmAndNonce(); + + // Construct data to seed the random nonce: + struct { + struct timeval timestamp; + unsigned counter; + } seedData; + gettimeofday(&seedData.timestamp, NULL); + static unsigned counter = 0; + seedData.counter = ++counter; + + // Use MD5 to compute a 'random' nonce from this seed data: + char nonceBuf[33]; + our_MD5Data((unsigned char*)(&seedData), sizeof seedData, nonceBuf); + + assignRealmAndNonce(realm, nonceBuf); +} + +void Authenticator::setUsernameAndPassword(char const* username, + char const* password, + Boolean passwordIsMD5) { + resetUsernameAndPassword(); + assignUsernameAndPassword(username, password, passwordIsMD5); +} + +char const* Authenticator::computeDigestResponse(char const* cmd, + char const* url) const { + // The "response" field is computed as: + // md5(md5(::)::md5(:)) + // or, if "fPasswordIsMD5" is True: + // md5(::md5(:)) + char ha1Buf[33]; + if (fPasswordIsMD5) { + strncpy(ha1Buf, password(), 32); + ha1Buf[32] = '\0'; // just in case + } else { + unsigned const ha1DataLen = strlen(username()) + 1 + + strlen(realm()) + 1 + strlen(password()); + unsigned char* ha1Data = new unsigned char[ha1DataLen+1]; + sprintf((char*)ha1Data, "%s:%s:%s", username(), realm(), password()); + our_MD5Data(ha1Data, ha1DataLen, ha1Buf); + delete[] ha1Data; + } + + unsigned const ha2DataLen = strlen(cmd) + 1 + strlen(url); + unsigned char* ha2Data = new unsigned char[ha2DataLen+1]; + sprintf((char*)ha2Data, "%s:%s", cmd, url); + char ha2Buf[33]; + our_MD5Data(ha2Data, ha2DataLen, ha2Buf); + delete[] ha2Data; + + unsigned const digestDataLen + = 32 + 1 + strlen(nonce()) + 1 + 32; + unsigned char* digestData = new unsigned char[digestDataLen+1]; + sprintf((char*)digestData, "%s:%s:%s", + ha1Buf, nonce(), ha2Buf); + char const* result = our_MD5Data(digestData, digestDataLen, NULL); + delete[] digestData; + return result; +} + +void Authenticator::reclaimDigestResponse(char const* responseStr) const { + delete[](char*)responseStr; +} + +void Authenticator::resetRealmAndNonce() { + delete[] fRealm; fRealm = NULL; + delete[] fNonce; fNonce = NULL; +} + +void Authenticator::resetUsernameAndPassword() { + delete[] fUsername; fUsername = NULL; + delete[] fPassword; fPassword = NULL; + fPasswordIsMD5 = False; +} + +void Authenticator::assignRealmAndNonce(char const* realm, char const* nonce) { + fRealm = strDup(realm); + fNonce = strDup(nonce); +} + +void Authenticator::assignUsernameAndPassword(char const* username, char const* password, Boolean passwordIsMD5) { + fUsername = strDup(username); + fPassword = strDup(password); + fPasswordIsMD5 = passwordIsMD5; +} + +void Authenticator::assign(char const* realm, char const* nonce, + char const* username, char const* password, Boolean passwordIsMD5) { + assignRealmAndNonce(realm, nonce); + assignUsernameAndPassword(username, password, passwordIsMD5); +} diff --git a/AnyCore/lib_rtsp/liveMedia/EBMLNumber.cpp b/AnyCore/lib_rtsp/liveMedia/EBMLNumber.cpp new file mode 100644 index 0000000..454b3b0 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/EBMLNumber.cpp @@ -0,0 +1,150 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// EBML numbers (ids and sizes) +// Implementation + +#include "EBMLNumber.hh" + +EBMLNumber::EBMLNumber(Boolean stripLeading1) + : stripLeading1(stripLeading1), len(0) { +} + +EBMLNumber::~EBMLNumber() { +} + +char* EBMLNumber::hexString() const { + static char printBuf[2*EBML_NUMBER_MAX_LEN + 1]; + + char* to = printBuf; + for (unsigned i = 0; i < len; ++i) { + sprintf(to, "%02X", data[i]); + to += 2; + } + + return printBuf; +} + +u_int64_t EBMLNumber::val() const { + u_int64_t result = 0; + + for (unsigned i = 0; i < len; ++i) { + result = result*256 + data[i]; + } + + return result; +} + +EBMLId::EBMLId() + : EBMLNumber(False) { +} + +EBMLId::~EBMLId() { +} + +char const* EBMLId::stringName() const { + switch (val()) { + case MATROSKA_ID_EBML: { return "EBML"; } + case MATROSKA_ID_VOID: { return "Void"; } + case MATROSKA_ID_CRC_32: { return "CRC-32"; } + case MATROSKA_ID_SEGMENT: { return "Segment"; } + case MATROSKA_ID_SEEK_HEAD: { return "Seek Head"; } + case MATROSKA_ID_SEEK: { return "Seek"; } + case MATROSKA_ID_SEEK_ID: { return "Seek ID"; } + case MATROSKA_ID_SEEK_POSITION: { return "Seek Position"; } + case MATROSKA_ID_INFO: { return "Segment Info"; } + case MATROSKA_ID_SEGMENT_UID: { return "Segment UID"; } + case MATROSKA_ID_DURATION: { return "Segment Duration"; } + case MATROSKA_ID_TIMECODE_SCALE: { return "Timecode Scale"; } + case MATROSKA_ID_DATE_UTC: { return "Date (UTC)"; } + case MATROSKA_ID_TITLE: { return "Title"; } + case MATROSKA_ID_MUXING_APP: { return "Muxing App"; } + case MATROSKA_ID_WRITING_APP: { return "Writing App"; } + case MATROSKA_ID_CLUSTER: { return "Cluster"; } + case MATROSKA_ID_TIMECODE: { return "TimeCode"; } + case MATROSKA_ID_POSITION: { return "Position"; } + case MATROSKA_ID_PREV_SIZE: { return "Prev. Size"; } + case MATROSKA_ID_SIMPLEBLOCK: { return "SimpleBlock"; } + case MATROSKA_ID_BLOCK_GROUP: { return "Block Group"; } + case MATROSKA_ID_BLOCK: { return "Block"; } + case MATROSKA_ID_BLOCK_DURATION: { return "Block Duration"; } + case MATROSKA_ID_REFERENCE_BLOCK: { return "Reference Block"; } + case MATROSKA_ID_TRACKS: { return "Tracks"; } + case MATROSKA_ID_TRACK_ENTRY: { return "Track Entry"; } + case MATROSKA_ID_TRACK_NUMBER: { return "Track Number"; } + case MATROSKA_ID_TRACK_UID: { return "Track UID"; } + case MATROSKA_ID_TRACK_TYPE: { return "Track Type"; } + case MATROSKA_ID_FLAG_ENABLED: { return "Flag Enabled"; } + case MATROSKA_ID_FLAG_DEFAULT: { return "Flag Default"; } + case MATROSKA_ID_FLAG_FORCED: { return "Flag Forced"; } + case MATROSKA_ID_FLAG_LACING: { return "Flag Lacing"; } + case MATROSKA_ID_MIN_CACHE: { return "Min Cache"; } + case MATROSKA_ID_DEFAULT_DURATION: { return "Default Duration"; } + case MATROSKA_ID_TRACK_TIMECODE_SCALE: { return "Track Timecode Scale"; } + case MATROSKA_ID_MAX_BLOCK_ADDITION_ID: { return "Max Block Addition ID"; } + case MATROSKA_ID_NAME: { return "Name"; } + case MATROSKA_ID_LANGUAGE: { return "Language"; } + case MATROSKA_ID_CODEC: { return "Codec ID"; } + case MATROSKA_ID_CODEC_PRIVATE: { return "Codec Private"; } + case MATROSKA_ID_CODEC_NAME: { return "Codec Name"; } + case MATROSKA_ID_CODEC_DECODE_ALL: { return "Codec Decode All"; } + case MATROSKA_ID_VIDEO: { return "Video Settings"; } + case MATROSKA_ID_FLAG_INTERLACED: { return "Flag Interlaced"; } + case MATROSKA_ID_PIXEL_WIDTH: { return "Pixel Width"; } + case MATROSKA_ID_PIXEL_HEIGHT: { return "Pixel Height"; } + case MATROSKA_ID_DISPLAY_WIDTH: { return "Display Width"; } + case MATROSKA_ID_DISPLAY_HEIGHT: { return "Display Height"; } + case MATROSKA_ID_DISPLAY_UNIT: { return "Display Unit"; } + case MATROSKA_ID_AUDIO: { return "Audio Settings"; } + case MATROSKA_ID_SAMPLING_FREQUENCY: { return "Sampling Frequency"; } + case MATROSKA_ID_OUTPUT_SAMPLING_FREQUENCY: { return "Output Sampling Frequency"; } + case MATROSKA_ID_CHANNELS: { return "Channels"; } + case MATROSKA_ID_BIT_DEPTH: { return "Bit Depth"; } + case MATROSKA_ID_CONTENT_ENCODINGS: { return "Content Encodings"; } + case MATROSKA_ID_CONTENT_ENCODING: { return "Content Encoding"; } + case MATROSKA_ID_CONTENT_COMPRESSION: { return "Content Compression"; } + case MATROSKA_ID_CONTENT_COMP_ALGO: { return "Content Compression Algorithm"; } + case MATROSKA_ID_CONTENT_COMP_SETTINGS: { return "Content Compression Settings"; } + case MATROSKA_ID_CONTENT_ENCRYPTION: { return "Content Encryption"; } + case MATROSKA_ID_ATTACHMENTS: { return "Attachments"; } + case MATROSKA_ID_ATTACHED_FILE: { return "Attached File"; } + case MATROSKA_ID_FILE_DESCRIPTION: { return "File Description"; } + case MATROSKA_ID_FILE_NAME: { return "File Name"; } + case MATROSKA_ID_FILE_MIME_TYPE: { return "File MIME Type"; } + case MATROSKA_ID_FILE_DATA: { return "File Data"; } + case MATROSKA_ID_FILE_UID: { return "File UID"; } + case MATROSKA_ID_CUES: { return "Cues"; } + case MATROSKA_ID_CUE_POINT: { return "Cue Point"; } + case MATROSKA_ID_CUE_TIME: { return "Cue Time"; } + case MATROSKA_ID_CUE_TRACK_POSITIONS: { return "Cue Track Positions"; } + case MATROSKA_ID_CUE_TRACK: { return "Cue Track"; } + case MATROSKA_ID_CUE_CLUSTER_POSITION: { return "Cue Cluster Position"; } + case MATROSKA_ID_CUE_BLOCK_NUMBER: { return "Cue Block Number"; } + case MATROSKA_ID_TAGS: { return "Tags"; } + case MATROSKA_ID_SEEK_PRE_ROLL: { return "SeekPreRoll"; } + case MATROSKA_ID_CODEC_DELAY: { return "CodecDelay"; } + case MATROSKA_ID_DISCARD_PADDING: { return "DiscardPadding"; } + default: { return "*****unknown*****"; } + } +} + +EBMLDataSize::EBMLDataSize() + : EBMLNumber(True) { +} + +EBMLDataSize::~EBMLDataSize() { +} diff --git a/AnyCore/lib_rtsp/liveMedia/EBMLNumber.hh b/AnyCore/lib_rtsp/liveMedia/EBMLNumber.hh new file mode 100644 index 0000000..195db27 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/EBMLNumber.hh @@ -0,0 +1,142 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// EBML numbers (ids and sizes) +// C++ header + +#ifndef _EBML_NUMBER_HH +#define _EBML_NUMBER_HH + +#include "NetCommon.h" +#include "Boolean.hh" +#include + +#define EBML_NUMBER_MAX_LEN 8 + +class EBMLNumber { +public: + EBMLNumber(Boolean stripLeading1 = True); + virtual ~EBMLNumber(); + + u_int64_t val() const; + char* hexString() const; // used for debugging + Boolean operator==(u_int64_t arg2) const { return val() == arg2; } + Boolean operator!=(u_int64_t arg2) const { return !(*this == arg2); } + +public: + Boolean stripLeading1; + unsigned len; + u_int8_t data[EBML_NUMBER_MAX_LEN]; +}; + +// Definitions of some Matroska/EBML IDs (including the ones that we check for): +#define MATROSKA_ID_EBML 0x1A45DFA3 +#define MATROSKA_ID_VOID 0xEC +#define MATROSKA_ID_CRC_32 0xBF +#define MATROSKA_ID_SEGMENT 0x18538067 +#define MATROSKA_ID_SEEK_HEAD 0x114D9B74 +#define MATROSKA_ID_SEEK 0x4DBB +#define MATROSKA_ID_SEEK_ID 0x53AB +#define MATROSKA_ID_SEEK_POSITION 0x53AC +#define MATROSKA_ID_INFO 0x1549A966 +#define MATROSKA_ID_SEGMENT_UID 0x73A4 +#define MATROSKA_ID_TIMECODE_SCALE 0x2AD7B1 +#define MATROSKA_ID_DURATION 0x4489 +#define MATROSKA_ID_DATE_UTC 0x4461 +#define MATROSKA_ID_TITLE 0x7BA9 +#define MATROSKA_ID_MUXING_APP 0x4D80 +#define MATROSKA_ID_WRITING_APP 0x5741 +#define MATROSKA_ID_CLUSTER 0x1F43B675 +#define MATROSKA_ID_TIMECODE 0xE7 +#define MATROSKA_ID_POSITION 0xA7 +#define MATROSKA_ID_PREV_SIZE 0xAB +#define MATROSKA_ID_SIMPLEBLOCK 0xA3 +#define MATROSKA_ID_BLOCK_GROUP 0xA0 +#define MATROSKA_ID_BLOCK 0xA1 +#define MATROSKA_ID_BLOCK_DURATION 0x9B +#define MATROSKA_ID_REFERENCE_BLOCK 0xFB +#define MATROSKA_ID_TRACKS 0x1654AE6B +#define MATROSKA_ID_TRACK_ENTRY 0xAE +#define MATROSKA_ID_TRACK_NUMBER 0xD7 +#define MATROSKA_ID_TRACK_UID 0x73C5 +#define MATROSKA_ID_TRACK_TYPE 0x83 +#define MATROSKA_ID_FLAG_ENABLED 0xB9 +#define MATROSKA_ID_FLAG_DEFAULT 0x88 +#define MATROSKA_ID_FLAG_FORCED 0x55AA +#define MATROSKA_ID_FLAG_LACING 0x9C +#define MATROSKA_ID_MIN_CACHE 0x6DE7 +#define MATROSKA_ID_DEFAULT_DURATION 0x23E383 +#define MATROSKA_ID_TRACK_TIMECODE_SCALE 0x23314F +#define MATROSKA_ID_MAX_BLOCK_ADDITION_ID 0x55EE +#define MATROSKA_ID_NAME 0x536E +#define MATROSKA_ID_LANGUAGE 0x22B59C +#define MATROSKA_ID_CODEC 0x86 +#define MATROSKA_ID_CODEC_PRIVATE 0x63A2 +#define MATROSKA_ID_CODEC_NAME 0x258688 +#define MATROSKA_ID_CODEC_DECODE_ALL 0xAA +#define MATROSKA_ID_VIDEO 0xE0 +#define MATROSKA_ID_FLAG_INTERLACED 0x9A +#define MATROSKA_ID_PIXEL_WIDTH 0xB0 +#define MATROSKA_ID_PIXEL_HEIGHT 0xBA +#define MATROSKA_ID_DISPLAY_WIDTH 0x54B0 +#define MATROSKA_ID_DISPLAY_HEIGHT 0x54BA +#define MATROSKA_ID_DISPLAY_UNIT 0x54B2 +#define MATROSKA_ID_AUDIO 0xE1 +#define MATROSKA_ID_SAMPLING_FREQUENCY 0xB5 +#define MATROSKA_ID_OUTPUT_SAMPLING_FREQUENCY 0x78B5 +#define MATROSKA_ID_CHANNELS 0x9F +#define MATROSKA_ID_BIT_DEPTH 0x6264 +#define MATROSKA_ID_CONTENT_ENCODINGS 0x6D80 +#define MATROSKA_ID_CONTENT_ENCODING 0x6240 +#define MATROSKA_ID_CONTENT_COMPRESSION 0x5034 +#define MATROSKA_ID_CONTENT_COMP_ALGO 0x4254 +#define MATROSKA_ID_CONTENT_COMP_SETTINGS 0x4255 +#define MATROSKA_ID_CONTENT_ENCRYPTION 0x5035 +#define MATROSKA_ID_ATTACHMENTS 0x1941A469 +#define MATROSKA_ID_ATTACHED_FILE 0x61A7 +#define MATROSKA_ID_FILE_DESCRIPTION 0x467E +#define MATROSKA_ID_FILE_NAME 0x466E +#define MATROSKA_ID_FILE_MIME_TYPE 0x4660 +#define MATROSKA_ID_FILE_DATA 0x465C +#define MATROSKA_ID_FILE_UID 0x46AE +#define MATROSKA_ID_CUES 0x1C53BB6B +#define MATROSKA_ID_CUE_POINT 0xBB +#define MATROSKA_ID_CUE_TIME 0xB3 +#define MATROSKA_ID_CUE_TRACK_POSITIONS 0xB7 +#define MATROSKA_ID_CUE_TRACK 0xF7 +#define MATROSKA_ID_CUE_CLUSTER_POSITION 0xF1 +#define MATROSKA_ID_CUE_BLOCK_NUMBER 0x5378 +#define MATROSKA_ID_TAGS 0x1254C367 +#define MATROSKA_ID_SEEK_PRE_ROLL 0x56BB +#define MATROSKA_ID_CODEC_DELAY 0x56AA +#define MATROSKA_ID_DISCARD_PADDING 0x75A2 + +class EBMLId: public EBMLNumber { +public: + EBMLId(); + virtual ~EBMLId(); + + char const* stringName() const; // used for debugging +}; + +class EBMLDataSize: public EBMLNumber { +public: + EBMLDataSize(); + virtual ~EBMLDataSize(); +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/FileServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/FileServerMediaSubsession.cpp new file mode 100644 index 0000000..5404919 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/FileServerMediaSubsession.cpp @@ -0,0 +1,34 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a file. +// Implementation + +#include "FileServerMediaSubsession.hh" + +FileServerMediaSubsession +::FileServerMediaSubsession(UsageEnvironment& env, char const* fileName, + Boolean reuseFirstSource) + : OnDemandServerMediaSubsession(env, reuseFirstSource), + fFileSize(0) { + fFileName = strDup(fileName); +} + +FileServerMediaSubsession::~FileServerMediaSubsession() { + delete[] (char*)fFileName; +} diff --git a/AnyCore/lib_rtsp/liveMedia/FileSink.cpp b/AnyCore/lib_rtsp/liveMedia/FileSink.cpp new file mode 100644 index 0000000..04ae66f --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/FileSink.cpp @@ -0,0 +1,151 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// File sinks +// Implementation + +#if (defined(__WIN32__) || defined(_WIN32)) && !defined(_WIN32_WCE) +#include +#include +#endif +#include "FileSink.hh" +#include "GroupsockHelper.hh" +#include "OutputFile.hh" + +////////// FileSink ////////// + +FileSink::FileSink(UsageEnvironment& env, FILE* fid, unsigned bufferSize, + char const* perFrameFileNamePrefix) + : MediaSink(env), fOutFid(fid), fBufferSize(bufferSize), fSamePresentationTimeCounter(0) { + fBuffer = new unsigned char[bufferSize]; + if (perFrameFileNamePrefix != NULL) { + fPerFrameFileNamePrefix = strDup(perFrameFileNamePrefix); + fPerFrameFileNameBuffer = new char[strlen(perFrameFileNamePrefix) + 100]; + } else { + fPerFrameFileNamePrefix = NULL; + fPerFrameFileNameBuffer = NULL; + } + fPrevPresentationTime.tv_sec = ~0; fPrevPresentationTime.tv_usec = 0; +} + +FileSink::~FileSink() { + delete[] fPerFrameFileNameBuffer; + delete[] fPerFrameFileNamePrefix; + delete[] fBuffer; + if (fOutFid != NULL) fclose(fOutFid); +} + +FileSink* FileSink::createNew(UsageEnvironment& env, char const* fileName, + unsigned bufferSize, Boolean oneFilePerFrame) { + do { + FILE* fid; + char const* perFrameFileNamePrefix; + if (oneFilePerFrame) { + // Create the fid for each frame + fid = NULL; + perFrameFileNamePrefix = fileName; + } else { + // Normal case: create the fid once + fid = OpenOutputFile(env, fileName); + if (fid == NULL) break; + perFrameFileNamePrefix = NULL; + } + + return new FileSink(env, fid, bufferSize, perFrameFileNamePrefix); + } while (0); + + return NULL; +} + +Boolean FileSink::continuePlaying() { + if (fSource == NULL) return False; + + fSource->getNextFrame(fBuffer, fBufferSize, + afterGettingFrame, this, + onSourceClosure, this); + + return True; +} + +void FileSink::afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned /*durationInMicroseconds*/) { + FileSink* sink = (FileSink*)clientData; + sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime); +} + +void FileSink::addData(unsigned char const* data, unsigned dataSize, + struct timeval presentationTime) { + if (fPerFrameFileNameBuffer != NULL && fOutFid == NULL) { + // Special case: Open a new file on-the-fly for this frame + if (presentationTime.tv_usec == fPrevPresentationTime.tv_usec && + presentationTime.tv_sec == fPrevPresentationTime.tv_sec) { + // The presentation time is unchanged from the previous frame, so we add a 'counter' + // suffix to the file name, to distinguish them: + sprintf(fPerFrameFileNameBuffer, "%s-%lu.%06lu-%u", fPerFrameFileNamePrefix, + presentationTime.tv_sec, presentationTime.tv_usec, ++fSamePresentationTimeCounter); + } else { + sprintf(fPerFrameFileNameBuffer, "%s-%lu.%06lu", fPerFrameFileNamePrefix, + presentationTime.tv_sec, presentationTime.tv_usec); + fPrevPresentationTime = presentationTime; // for next time + fSamePresentationTimeCounter = 0; // for next time + } + fOutFid = OpenOutputFile(envir(), fPerFrameFileNameBuffer); + } + + // Write to our file: +#ifdef TEST_LOSS + static unsigned const framesPerPacket = 10; + static unsigned const frameCount = 0; + static Boolean const packetIsLost; + if ((frameCount++)%framesPerPacket == 0) { + packetIsLost = (our_random()%10 == 0); // simulate 10% packet loss ##### + } + + if (!packetIsLost) +#endif + if (fOutFid != NULL && data != NULL) { + fwrite(data, 1, dataSize, fOutFid); + } +} + +void FileSink::afterGettingFrame(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime) { + if (numTruncatedBytes > 0) { + envir() << "FileSink::afterGettingFrame(): The input frame data was too large for our buffer size (" + << fBufferSize << "). " + << numTruncatedBytes << " bytes of trailing data was dropped! Correct this by increasing the \"bufferSize\" parameter in the \"createNew()\" call to at least " + << fBufferSize + numTruncatedBytes << "\n"; + } + addData(fBuffer, frameSize, presentationTime); + + if (fOutFid == NULL || fflush(fOutFid) == EOF) { + // The output file has closed. Handle this the same way as if the input source had closed: + if (fSource != NULL) fSource->stopGettingFrames(); + onSourceClosure(); + return; + } + + if (fPerFrameFileNameBuffer != NULL) { + if (fOutFid != NULL) { fclose(fOutFid); fOutFid = NULL; } + } + + // Then try getting the next frame: + continuePlaying(); +} diff --git a/AnyCore/lib_rtsp/liveMedia/FramedFileSource.cpp b/AnyCore/lib_rtsp/liveMedia/FramedFileSource.cpp new file mode 100644 index 0000000..1f21646 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/FramedFileSource.cpp @@ -0,0 +1,30 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Framed File Sources +// Implementation + +#include "FramedFileSource.hh" + +////////// FramedFileSource ////////// + +FramedFileSource::FramedFileSource(UsageEnvironment& env, FILE* fid) + : FramedSource(env), fFid(fid) { +} + +FramedFileSource::~FramedFileSource() { +} diff --git a/AnyCore/lib_rtsp/liveMedia/FramedFilter.cpp b/AnyCore/lib_rtsp/liveMedia/FramedFilter.cpp new file mode 100644 index 0000000..fa1ae66 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/FramedFilter.cpp @@ -0,0 +1,59 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Framed Filters +// Implementation + +#include "FramedFilter.hh" + +////////// FramedFilter ////////// +#include + +void FramedFilter::detachInputSource() { + if (fInputSource != NULL) { + fInputSource->stopGettingFrames(); + reassignInputSource(NULL); + } +} + +FramedFilter::FramedFilter(UsageEnvironment& env, + FramedSource* inputSource) + : FramedSource(env), + fInputSource(inputSource) { +} + +FramedFilter::~FramedFilter() { + Medium::close(fInputSource); +} + +// Default implementations of needed virtual functions. These merely +// call the same function in the input source - i.e., act like a 'null filter + +char const* FramedFilter::MIMEtype() const { + if (fInputSource == NULL) return ""; + + return fInputSource->MIMEtype(); +} + +void FramedFilter::getAttributes() const { + if (fInputSource != NULL) fInputSource->getAttributes(); +} + +void FramedFilter::doStopGettingFrames() { + FramedSource::doStopGettingFrames(); + if (fInputSource != NULL) fInputSource->stopGettingFrames(); +} diff --git a/AnyCore/lib_rtsp/liveMedia/FramedSource.cpp b/AnyCore/lib_rtsp/liveMedia/FramedSource.cpp new file mode 100644 index 0000000..4938aa7 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/FramedSource.cpp @@ -0,0 +1,125 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Framed Sources +// Implementation + +#include "FramedSource.hh" +#include + +////////// FramedSource ////////// + +FramedSource::FramedSource(UsageEnvironment& env) + : MediaSource(env), + fAfterGettingFunc(NULL), fAfterGettingClientData(NULL), + fOnCloseFunc(NULL), fOnCloseClientData(NULL), + fIsCurrentlyAwaitingData(False) { + fPresentationTime.tv_sec = fPresentationTime.tv_usec = 0; // initially +} + +FramedSource::~FramedSource() { +} + +Boolean FramedSource::isFramedSource() const { + return True; +} + +Boolean FramedSource::lookupByName(UsageEnvironment& env, char const* sourceName, + FramedSource*& resultSource) { + resultSource = NULL; // unless we succeed + + MediaSource* source; + if (!MediaSource::lookupByName(env, sourceName, source)) return False; + + if (!source->isFramedSource()) { + env.setResultMsg(sourceName, " is not a framed source"); + return False; + } + + resultSource = (FramedSource*)source; + return True; +} + +void FramedSource::getNextFrame(unsigned char* to, unsigned maxSize, + afterGettingFunc* afterGettingFunc, + void* afterGettingClientData, + onCloseFunc* onCloseFunc, + void* onCloseClientData) { + // Make sure we're not already being read: + if (fIsCurrentlyAwaitingData) { + envir() << "FramedSource[" << this << "]::getNextFrame(): attempting to read more than once at the same time!\n"; + envir().internalError(); + } + + fTo = to; + fMaxSize = maxSize; + fNumTruncatedBytes = 0; // by default; could be changed by doGetNextFrame() + fDurationInMicroseconds = 0; // by default; could be changed by doGetNextFrame() + fAfterGettingFunc = afterGettingFunc; + fAfterGettingClientData = afterGettingClientData; + fOnCloseFunc = onCloseFunc; + fOnCloseClientData = onCloseClientData; + fIsCurrentlyAwaitingData = True; + + doGetNextFrame(); +} + +void FramedSource::afterGetting(FramedSource* source) { + source->fIsCurrentlyAwaitingData = False; + // indicates that we can be read again + // Note that this needs to be done here, in case the "fAfterFunc" + // called below tries to read another frame (which it usually will) + + if (source->fAfterGettingFunc != NULL) { + (*(source->fAfterGettingFunc))(source->fAfterGettingClientData, + source->fFrameSize, source->fNumTruncatedBytes, + source->fPresentationTime, + source->fDurationInMicroseconds); + } +} + +void FramedSource::handleClosure(void* clientData) { + FramedSource* source = (FramedSource*)clientData; + source->handleClosure(); +} + +void FramedSource::handleClosure() { + fIsCurrentlyAwaitingData = False; // because we got a close instead + if (fOnCloseFunc != NULL) { + (*fOnCloseFunc)(fOnCloseClientData); + } +} + +void FramedSource::stopGettingFrames() { + fIsCurrentlyAwaitingData = False; // indicates that we can be read again + fAfterGettingFunc = NULL; + fOnCloseFunc = NULL; + + // Perform any specialized action now: + doStopGettingFrames(); +} + +void FramedSource::doStopGettingFrames() { + // Default implementation: Do nothing except cancel any pending 'delivery' task: + envir().taskScheduler().unscheduleDelayedTask(nextTask()); + // Subclasses may wish to redefine this function. +} + +unsigned FramedSource::maxFrameSize() const { + // By default, this source has no maximum frame size. + return 0; +} diff --git a/AnyCore/lib_rtsp/liveMedia/GSMAudioRTPSink.cpp b/AnyCore/lib_rtsp/liveMedia/GSMAudioRTPSink.cpp new file mode 100644 index 0000000..aa81693 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/GSMAudioRTPSink.cpp @@ -0,0 +1,40 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for GSM audio +// Implementation + +#include "GSMAudioRTPSink.hh" + +GSMAudioRTPSink::GSMAudioRTPSink(UsageEnvironment& env, Groupsock* RTPgs) + : AudioRTPSink(env, RTPgs, 3, 8000, "GSM") { +} + +GSMAudioRTPSink::~GSMAudioRTPSink() { +} + +GSMAudioRTPSink* +GSMAudioRTPSink::createNew(UsageEnvironment& env, Groupsock* RTPgs) { + return new GSMAudioRTPSink(env, RTPgs); +} + +Boolean GSMAudioRTPSink +::frameCanAppearAfterPacketStart(unsigned char const* /*frameStart*/, + unsigned /*numBytesInFrame*/) const { + // Allow at most 5 frames in a single packet: + return numFramesUsedSoFar() < 5; +} diff --git a/AnyCore/lib_rtsp/liveMedia/H261VideoRTPSource.cpp b/AnyCore/lib_rtsp/liveMedia/H261VideoRTPSource.cpp new file mode 100644 index 0000000..48c1462 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H261VideoRTPSource.cpp @@ -0,0 +1,67 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// H.261 Video RTP Sources +// Implementation + +#include "H261VideoRTPSource.hh" + +H261VideoRTPSource* +H261VideoRTPSource::createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) { + return new H261VideoRTPSource(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency); +} + +H261VideoRTPSource +::H261VideoRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) + : MultiFramedRTPSource(env, RTPgs, + rtpPayloadFormat, rtpTimestampFrequency), + fLastSpecialHeader(0) { +} + +H261VideoRTPSource::~H261VideoRTPSource() { +} + +Boolean H261VideoRTPSource +::processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize) { + // There's a 4-byte video-specific header + if (packet->dataSize() < 4) return False; + + unsigned char* headerStart = packet->data(); + fLastSpecialHeader + = (headerStart[0]<<24)|(headerStart[1]<<16)|(headerStart[2]<<8)|headerStart[3]; + +#ifdef DELIVER_COMPLETE_FRAMES + fCurrentPacketBeginsFrame = fCurrentPacketCompletesFrame; + // whether the *previous* packet ended a frame + + // The RTP "M" (marker) bit indicates the last fragment of a frame: + fCurrentPacketCompletesFrame = packet->rtpMarkerBit(); +#endif + + resultSpecialHeaderSize = 4; + return True; +} + +char const* H261VideoRTPSource::MIMEtype() const { + return "video/H261"; +} diff --git a/AnyCore/lib_rtsp/liveMedia/H263plusVideoFileServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/H263plusVideoFileServerMediaSubsession.cpp new file mode 100644 index 0000000..121e157 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H263plusVideoFileServerMediaSubsession.cpp @@ -0,0 +1,64 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a H263 video file. +// Implementation + +// Author: Bernhard Feiten. // Based on MPEG4VideoFileServerMediaSubsession +// Updated by Ross FInlayson (December 2007) + +#include "H263plusVideoFileServerMediaSubsession.hh" +#include "H263plusVideoRTPSink.hh" +#include "ByteStreamFileSource.hh" +#include "H263plusVideoStreamFramer.hh" + +H263plusVideoFileServerMediaSubsession* +H263plusVideoFileServerMediaSubsession::createNew(UsageEnvironment& env, + char const* fileName, + Boolean reuseFirstSource) { + return new H263plusVideoFileServerMediaSubsession(env, fileName, reuseFirstSource); +} + +H263plusVideoFileServerMediaSubsession +::H263plusVideoFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, + Boolean reuseFirstSource) + : FileServerMediaSubsession(env, fileName, reuseFirstSource) { +} + +H263plusVideoFileServerMediaSubsession::~H263plusVideoFileServerMediaSubsession() { +} + +FramedSource* H263plusVideoFileServerMediaSubsession +::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) { + estBitrate = 500; // kbps, estimate ?? + + // Create the video source: + ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(envir(), fFileName); + if (fileSource == NULL) return NULL; + fFileSize = fileSource->fileSize(); + + // Create a framer for the Video Elementary Stream: + return H263plusVideoStreamFramer::createNew(envir(), fileSource); +} + +RTPSink* H263plusVideoFileServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* /*inputSource*/) { + return H263plusVideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); +} diff --git a/AnyCore/lib_rtsp/liveMedia/H263plusVideoRTPSink.cpp b/AnyCore/lib_rtsp/liveMedia/H263plusVideoRTPSink.cpp new file mode 100644 index 0000000..ea44dbf --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H263plusVideoRTPSink.cpp @@ -0,0 +1,91 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for H.263+ video (RFC 4629) +// Implementation + +#include "H263plusVideoRTPSink.hh" + +H263plusVideoRTPSink +::H263plusVideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + u_int32_t rtpTimestampFrequency) + : VideoRTPSink(env, RTPgs, rtpPayloadFormat, rtpTimestampFrequency, "H263-1998") { +} + +H263plusVideoRTPSink::~H263plusVideoRTPSink() { +} + +H263plusVideoRTPSink* +H263plusVideoRTPSink::createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + u_int32_t rtpTimestampFrequency) { + return new H263plusVideoRTPSink(env, RTPgs, rtpPayloadFormat, rtpTimestampFrequency); +} + +Boolean H263plusVideoRTPSink +::frameCanAppearAfterPacketStart(unsigned char const* /*frameStart*/, + unsigned /*numBytesInFrame*/) const { + // A packet can contain only one frame + return False; +} + +void H263plusVideoRTPSink +::doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes) { + if (fragmentationOffset == 0) { + // This packet contains the first (or only) fragment of the frame. + // Set the 'P' bit in the special header: + unsigned short specialHeader = 0x0400; + + // Also, reuse the first two bytes of the payload for this special + // header. (They should both have been zero.) + if (numBytesInFrame < 2) { + envir() << "H263plusVideoRTPSink::doSpecialFrameHandling(): bad frame size " + << numBytesInFrame << "\n"; + return; + } + if (frameStart[0] != 0 || frameStart[1] != 0) { + envir() << "H263plusVideoRTPSink::doSpecialFrameHandling(): unexpected non-zero first two bytes!\n"; + } + frameStart[0] = specialHeader>>8; + frameStart[1] = (unsigned char)specialHeader; + } else { + unsigned short specialHeader = 0; + setSpecialHeaderBytes((unsigned char*)&specialHeader, 2); + } + + if (numRemainingBytes == 0) { + // This packet contains the last (or only) fragment of the frame. + // Set the RTP 'M' ('marker') bit: + setMarkerBit(); + } + + // Also set the RTP timestamp: + setTimestamp(framePresentationTime); +} + + +unsigned H263plusVideoRTPSink::specialHeaderSize() const { + // There's a 2-byte special video header. However, if we're the first + // (or only) fragment of a frame, then we reuse the first 2 bytes of + // the payload instead. + return (curFragmentationOffset() == 0) ? 0 : 2; +} diff --git a/AnyCore/lib_rtsp/liveMedia/H263plusVideoRTPSource.cpp b/AnyCore/lib_rtsp/liveMedia/H263plusVideoRTPSource.cpp new file mode 100644 index 0000000..67f00df --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H263plusVideoRTPSource.cpp @@ -0,0 +1,106 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// H.263+ Video RTP Sources +// Implementation + +#include "H263plusVideoRTPSource.hh" + +H263plusVideoRTPSource* +H263plusVideoRTPSource::createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) { + return new H263plusVideoRTPSource(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency); +} + +H263plusVideoRTPSource +::H263plusVideoRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) + : MultiFramedRTPSource(env, RTPgs, + rtpPayloadFormat, rtpTimestampFrequency), + fNumSpecialHeaders(0), fSpecialHeaderBytesLength(0) { +} + +H263plusVideoRTPSource::~H263plusVideoRTPSource() { +} + +Boolean H263plusVideoRTPSource +::processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize) { + unsigned char* headerStart = packet->data(); + unsigned packetSize = packet->dataSize(); + + // The H.263+ payload header is at least 2 bytes in size. + // Extract the known fields from the first 2 bytes: + unsigned expectedHeaderSize = 2; + if (packetSize < expectedHeaderSize) return False; + + //unsigned char RR = headerStart[0]>>3; + Boolean P = (headerStart[0]&0x4) != 0; + Boolean V = (headerStart[0]&0x2) != 0; + unsigned char PLEN = ((headerStart[0]&0x1)<<5)|(headerStart[1]>>3); + //unsigned char PEBIT = headerStart[1]&0x7; + + if (V) { + // There's an extra VRC byte at the end of the header: + ++expectedHeaderSize; + if (packetSize < expectedHeaderSize) return False; + } + + if (PLEN > 0) { + // There's an extra picture header at the end: + expectedHeaderSize += PLEN; + if (packetSize < expectedHeaderSize) return False; + } + + fCurrentPacketBeginsFrame = P; + if (fCurrentPacketBeginsFrame) { + fNumSpecialHeaders = fSpecialHeaderBytesLength = 0; + } + + // Make a copy of the special header bytes, in case a reader + // can use them: + unsigned bytesAvailable + = SPECIAL_HEADER_BUFFER_SIZE - fSpecialHeaderBytesLength - 1; + if (expectedHeaderSize <= bytesAvailable) { + fSpecialHeaderBytes[fSpecialHeaderBytesLength++] = expectedHeaderSize; + for (unsigned i = 0; i < expectedHeaderSize; ++i) { + fSpecialHeaderBytes[fSpecialHeaderBytesLength++] = headerStart[i]; + } + fPacketSizes[fNumSpecialHeaders++] = packetSize; + } + + if (P) { + // Prepend two zero bytes to the start of the payload proper. + // Hack: Do this by shrinking this special header by 2 bytes: + expectedHeaderSize -= 2; + headerStart[expectedHeaderSize] = 0; + headerStart[expectedHeaderSize+1] = 0; + } + + // The RTP "M" (marker) bit indicates the last fragment of a frame: + fCurrentPacketCompletesFrame = packet->rtpMarkerBit(); + + resultSpecialHeaderSize = expectedHeaderSize; + return True; +} + +char const* H263plusVideoRTPSource::MIMEtype() const { + return "video/H263-1998"; +} diff --git a/AnyCore/lib_rtsp/liveMedia/H263plusVideoStreamFramer.cpp b/AnyCore/lib_rtsp/liveMedia/H263plusVideoStreamFramer.cpp new file mode 100644 index 0000000..17ce365 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H263plusVideoStreamFramer.cpp @@ -0,0 +1,129 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Author Bernhard Feiten +// A filter that breaks up an H.263plus video stream into frames. +// + +#include "H263plusVideoStreamFramer.hh" +#include "H263plusVideoStreamParser.hh" + +#include +#include + + +/////////////////////////////////////////////////////////////////////////////// +////////// H263plusVideoStreamFramer implementation ////////// +//public/////////////////////////////////////////////////////////////////////// +H263plusVideoStreamFramer* H263plusVideoStreamFramer::createNew( + UsageEnvironment& env, + FramedSource* inputSource) +{ + // Need to add source type checking here??? ##### + H263plusVideoStreamFramer* fr; + fr = new H263plusVideoStreamFramer(env, inputSource); + return fr; +} + + +/////////////////////////////////////////////////////////////////////////////// +H263plusVideoStreamFramer::H263plusVideoStreamFramer( + UsageEnvironment& env, + FramedSource* inputSource, + Boolean createParser) + : FramedFilter(env, inputSource), + fFrameRate(0.0), // until we learn otherwise + fPictureEndMarker(False) +{ + // Use the current wallclock time as the base 'presentation time': + gettimeofday(&fPresentationTimeBase, NULL); + fParser = createParser ? new H263plusVideoStreamParser(this, inputSource) : NULL; +} + +/////////////////////////////////////////////////////////////////////////////// +H263plusVideoStreamFramer::~H263plusVideoStreamFramer() +{ + delete fParser; +} + + +/////////////////////////////////////////////////////////////////////////////// +void H263plusVideoStreamFramer::doGetNextFrame() +{ + fParser->registerReadInterest(fTo, fMaxSize); + continueReadProcessing(); +} + + +/////////////////////////////////////////////////////////////////////////////// +Boolean H263plusVideoStreamFramer::isH263plusVideoStreamFramer() const +{ + return True; +} + +/////////////////////////////////////////////////////////////////////////////// +void H263plusVideoStreamFramer::continueReadProcessing( + void* clientData, + unsigned char* /*ptr*/, unsigned /*size*/, + struct timeval /*presentationTime*/) +{ + H263plusVideoStreamFramer* framer = (H263plusVideoStreamFramer*)clientData; + framer->continueReadProcessing(); +} + +/////////////////////////////////////////////////////////////////////////////// +void H263plusVideoStreamFramer::continueReadProcessing() +{ + unsigned acquiredFrameSize; + + u_int64_t frameDuration; // in ms + + acquiredFrameSize = fParser->parse(frameDuration); +// Calculate some average bitrate information (to be adapted) +// avgBitrate = (totalBytes * 8 * H263_TIMESCALE) / totalDuration; + + if (acquiredFrameSize > 0) { + // We were able to acquire a frame from the input. + // It has already been copied to the reader's space. + fFrameSize = acquiredFrameSize; +// fNumTruncatedBytes = fParser->numTruncatedBytes(); // not needed so far + + fFrameRate = frameDuration == 0 ? 0.0 : 1000./(long)frameDuration; + + // Compute "fPresentationTime" + if (acquiredFrameSize == 5) // first frame + fPresentationTime = fPresentationTimeBase; + else + fPresentationTime.tv_usec += (long) frameDuration*1000; + + while (fPresentationTime.tv_usec >= 1000000) { + fPresentationTime.tv_usec -= 1000000; + ++fPresentationTime.tv_sec; + } + + // Compute "fDurationInMicroseconds" + fDurationInMicroseconds = (unsigned int) frameDuration*1000;; + + // Call our own 'after getting' function. Because we're not a 'leaf' + // source, we can call this directly, without risking infinite recursion. + afterGetting(this); + } else { + // We were unable to parse a complete frame from the input, because: + // - we had to read more data from the source stream, or + // - the source stream has ended. + } +} diff --git a/AnyCore/lib_rtsp/liveMedia/H263plusVideoStreamParser.cpp b/AnyCore/lib_rtsp/liveMedia/H263plusVideoStreamParser.cpp new file mode 100644 index 0000000..44ad210 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H263plusVideoStreamParser.cpp @@ -0,0 +1,859 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Author Bernhard Feiten +// A filter that breaks up an H.263plus video stream into frames. +// Based on MPEG4IP/mp4creator/h263.c + +#include "H263plusVideoStreamParser.hh" +#include "H263plusVideoStreamFramer.hh" +//#include +//#include "GroupsockHelper.hh" + + +H263plusVideoStreamParser::H263plusVideoStreamParser( + H263plusVideoStreamFramer* usingSource, + FramedSource* inputSource) + : StreamParser(inputSource, + FramedSource::handleClosure, + usingSource, + &H263plusVideoStreamFramer::continueReadProcessing, + usingSource), + fUsingSource(usingSource), + fnextTR(0), + fcurrentPT(0) +{ + memset(fStates, 0, sizeof(fStates)); + memset(&fNextInfo, 0, sizeof(fNextInfo)); + memset(&fCurrentInfo, 0, sizeof(fCurrentInfo)); + memset(&fMaxBitrateCtx, 0, sizeof(fMaxBitrateCtx)); + memset(fNextHeader,0, H263_REQUIRE_HEADER_SIZE_BYTES); +} + +/////////////////////////////////////////////////////////////////////////////// +H263plusVideoStreamParser::~H263plusVideoStreamParser() +{ +} + +/////////////////////////////////////////////////////////////////////////////// +void H263plusVideoStreamParser::restoreSavedParserState() +{ + StreamParser::restoreSavedParserState(); + fTo = fSavedTo; + fNumTruncatedBytes = fSavedNumTruncatedBytes; +} + +/////////////////////////////////////////////////////////////////////////////// +void H263plusVideoStreamParser::setParseState() +{ + fSavedTo = fTo; + fSavedNumTruncatedBytes = fNumTruncatedBytes; + saveParserState(); // Needed for the parsing process in StreamParser +} + + +/////////////////////////////////////////////////////////////////////////////// +void H263plusVideoStreamParser::registerReadInterest( + unsigned char* to, + unsigned maxSize) +{ + fStartOfFrame = fTo = fSavedTo = to; + fLimit = to + maxSize; + fMaxSize = maxSize; + fNumTruncatedBytes = fSavedNumTruncatedBytes = 0; +} + +/////////////////////////////////////////////////////////////////////////////// +// parse() , derived from H263Creator of MPEG4IP, h263.c +unsigned H263plusVideoStreamParser::parse(u_int64_t & currentDuration) +{ + +// u_int8_t frameBuffer[H263_BUFFER_SIZE]; // The input buffer + // Pointer which tells LoadNextH263Object where to read data to +// u_int8_t* pFrameBuffer = fTo + H263_REQUIRE_HEADER_SIZE_BYTES; + u_int32_t frameSize; // The current frame size + // Pointer to receive address of the header data +// u_int8_t* pCurrentHeader;// = pFrameBuffer; +// u_int64_t currentDuration; // The current frame's duration + u_int8_t trDifference; // The current TR difference + // The previous TR difference +// u_int8_t prevTrDifference = H263_BASIC_FRAME_RATE; +// u_int64_t totalDuration = 0;// Duration accumulator +// u_int64_t avgBitrate; // Average bitrate +// u_int64_t totalBytes = 0; // Size accumulator + + + try // The get data routines of the class FramedFilter returns an error when + { // the buffer is empty. This occurs at the beginning and at the end of the file. + fCurrentInfo = fNextInfo; + + // Parse 1 frame + // For the first time, only the first frame's header is returned. + // The second time the full first frame is returned + frameSize = parseH263Frame(); + + currentDuration = 0; + if ((frameSize > 0)){ + // We were able to acquire a frame from the input. + + // Parse the returned frame header (if any) + if (!ParseShortHeader(fTo, &fNextInfo)) { +#ifdef DEBUG + fprintf(stderr,"H263plusVideoStreamParser: Fatal error\n"); +#endif + } + + trDifference = GetTRDifference(fNextInfo.tr, fCurrentInfo.tr); + + // calculate the current frame duration + currentDuration = CalculateDuration(trDifference); + + // Accumulate the frame's size and duration for avgBitrate calculation + //totalDuration += currentDuration; + //totalBytes += frameSize; + // If needed, recalculate bitrate information + // if (h263Bitrates) + //GetMaxBitrate(&fMaxBitrateCtx, frameSize, prevTrDifference); + //prevTrDifference = trDifference; + + setParseState(); // Needed for the parsing process in StreamParser + } + } catch (int /*e*/) { +#ifdef DEBUG + fprintf(stderr, "H263plusVideoStreamParser::parse() EXCEPTION (This is normal behavior - *not* an error)\n"); +#endif + frameSize=0; + } + + return frameSize; +} + + +/////////////////////////////////////////////////////////////////////////////// +// parseH263Frame derived from LoadNextH263Object of MPEG4IP +// - service routine that reads a single frame from the input file. +// It shall fill the input buffer with data up until - and including - the +// next start code and shall report back both the number of bytes read and a +// pointer to the next start code. The first call to this function shall only +// yield a pointer with 0 data bytes and the last call to this function shall +// only yield data bytes with a NULL pointer as the next header. +// +// TODO: This function only supports valid bit streams. Upon error, it fails +// without the possibility to recover. A Better idea would be to skip frames +// until a parsable frame is read from the file. +// +// Parameters: +// ppNextHeader - output parameter that upon return points to the location +// of the next frame's head in the buffer. +// This pointer shall be NULL for the last frame read. +// Returns the total number of bytes read. +// Uses FrameFileSource intantiated by constructor. +/////////////////////////////////////////////////////////////////////////////// +int H263plusVideoStreamParser::parseH263Frame( ) +{ + char row = 0; + u_int8_t * bufferIndex = fTo; + // The buffer end which will allow the loop to leave place for + // the additionalBytesNeeded + u_int8_t * bufferEnd = fTo + fMaxSize - ADDITIONAL_BYTES_NEEDED - 1; + + memcpy(fTo, fNextHeader, H263_REQUIRE_HEADER_SIZE_BYTES); + bufferIndex += H263_REQUIRE_HEADER_SIZE_BYTES; + + + // The state table and the following loop implements a state machine enabling + // us to read bytes from the file until (and inclusing) the requested + // start code (00 00 8X) is found + + // Initialize the states array, if it hasn't been initialized yet... + if (!fStates[0][0]) { + // One 00 was read + fStates[0][0] = 1; + // Two sequential 0x00 ware read + fStates[1][0] = fStates[2][0] = 2; + // A full start code was read + fStates[2][128] = fStates[2][129] = fStates[2][130] = fStates[2][131] = -1; + } + + // Read data from file into the output buffer until either a start code + // is found, or the end of file has been reached. + do { + *bufferIndex = get1Byte(); + } while ((bufferIndex < bufferEnd) && // We have place in the buffer + ((row = fStates[(unsigned char)row][*(bufferIndex++)]) != -1)); // Start code was not found + + if (row != -1) { + fprintf(stderr, "%s: Buffer too small (%lu)\n", + "h263reader:", bufferEnd - fTo + ADDITIONAL_BYTES_NEEDED); + return 0; + } + + // Cool ... now we have a start code + // Now we just have to read the additionalBytesNeeded + getBytes(bufferIndex, ADDITIONAL_BYTES_NEEDED); + memcpy(fNextHeader, bufferIndex - H263_STARTCODE_SIZE_BYTES, H263_REQUIRE_HEADER_SIZE_BYTES); + + int sz = bufferIndex - fTo - H263_STARTCODE_SIZE_BYTES; + + if (sz == 5) // first frame + memcpy(fTo, fTo+H263_REQUIRE_HEADER_SIZE_BYTES, H263_REQUIRE_HEADER_SIZE_BYTES); + + return sz; +} + + +//////////////////////////////////////////////////////////////////////////////// +// ParseShortHeader - service routine that accepts a buffer containing a frame +// header and extracts relevant codec information from it. +// +// NOTE: the first bit in the following commnets is 0 (zero). +// +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | PSC (Picture Start Code=22 bits) | (TR=8 bits) | > +// |0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0| |1 0> +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// < (PTYPE=13 bits) | +// <. . .|(FMT)|Z|. . . .| +// +-+-+-+-+-+-+-+-+-+-+-+ +// -> PTYPE.FMT contains a width/height identification +// -> PTYPE.Z is 1 for P-Frames, 0 for I-Frames +// Note: When FMT is 111, there is an extended PTYPE... +// +// Inputs: +// headerBuffer - pointer to the current header buffer +// outputInfoStruct - pointer to the structure receiving the data +// Outputs: +// This function returns a structure of important codec-specific +// information (The Temporal Reference bits, width & height of the current +// frame and the sync - or "frame type" - bit. It reports success or +// failure to the calling function. +//////////////////////////////////////////////////////////////////////////////// +bool H263plusVideoStreamParser::ParseShortHeader( + u_int8_t *headerBuffer, + H263INFO *outputInfoStruct) +{ + u_int8_t fmt = 0; + // Extract temporal reference (TR) from the buffer (bits 22-29 inclusive) + outputInfoStruct->tr = (headerBuffer[2] << 6) & 0xC0; // 2 LS bits out of the 3rd byte + outputInfoStruct->tr |= (headerBuffer[3] >> 2) & 0x3F; // 6 MS bits out of the 4th byte + // Extract the FMT part of PTYPE from the buffer (bits 35-37 inclusive) + fmt = (headerBuffer[4] >> 2) & 0x07; // bits 3-5 ouf of the 5th byte + // If PTYPE is not supported, return a failure notice to the calling function + // FIXME: PLUSPTYPE is not supported + if (fmt == 0x07) { + return false; + } + // If PTYPE is supported, calculate the current width and height according to + // a predefined table + if (!GetWidthAndHeight(fmt, &(outputInfoStruct->width), + &(outputInfoStruct->height))) { + return false; + } + // Extract the frame-type bit, which is the 9th bit of PTYPE (bit 38) + outputInfoStruct->isSyncFrame = !(headerBuffer[4] & 0x02); + + return true; +} + +//////////////////////////////////////////////////////////////////////////////// +// GetMaxBitrate- service routine that accepts frame information and +// derives bitrate information from it. This function uses a sliding window +// technique to calculate the maximum bitrates in any window of 1 second +// inside the file. +// The sliding window is implemented with a table of bitrates for the last +// second (30 entries - one entry per TR unit). +// +// Inputs: +// ctx - context for this function +// frameSize - the size of the current frame in bytes +// frameTRDiff - the "duration" of the frame in TR units +// Outputs: +// This function returns the up-to-date maximum bitrate +//////////////////////////////////////////////////////////////////////////////// +void H263plusVideoStreamParser::GetMaxBitrate( MaxBitrate_CTX *ctx, + u_int32_t frameSize, + u_int8_t frameTRDiff) +{ + if (frameTRDiff == 0) + return; + + // Calculate the current frame's bitrate as bits per TR unit (round the result + // upwards) + u_int32_t frameBitrate = frameSize * 8 / frameTRDiff + 1; + + // for each TRdiff received, + while (frameTRDiff--) { + // Subtract the oldest bitrate entry from the current bitrate + ctx->windowBitrate -= ctx->bitrateTable[ctx->tableIndex]; + // Update the oldest bitrate entry with the current frame's bitrate + ctx->bitrateTable[ctx->tableIndex] = frameBitrate; + // Add the current frame's bitrate to the current bitrate + ctx->windowBitrate += frameBitrate; + // Check if we have a new maximum bitrate + if (ctx->windowBitrate > ctx->maxBitrate) { + ctx->maxBitrate = ctx->windowBitrate; + } + // Advance the table index + // Wrapping around the bitrateTable size + ctx->tableIndex = (ctx->tableIndex + 1) % + ( sizeof(ctx->bitrateTable) / sizeof(ctx->bitrateTable[0]) ); + } +} + +//////////////////////////////////////////////////////////////////////////////// +// CalculateDuration - service routine that calculates the current frame's +// duration in milli-seconds using it's duration in TR units. +// - In order not to accumulate the calculation error, we are using the TR +// duration to calculate the current and the next frame's presentation time in +// milli-seconds. +// +// Inputs: trDiff - The current frame's duration in TR units +// Return: The current frame's duration in milli-seconds +//////////////////////////////////////////////////////////////////////////////// +u_int64_t H263plusVideoStreamParser::CalculateDuration(u_int8_t trDiff) +{ + u_int64_t nextPT; // The next frame's presentation time in milli-seconds + u_int64_t duration; // The current frame's duration in milli-seconds + + fnextTR += trDiff; + // Calculate the next frame's presentation time, in milli-seconds + nextPT = (fnextTR * 1001) / H263_BASIC_FRAME_RATE; + // The frame's duration is the difference between the next presentation + // time and the current presentation time. + duration = nextPT - fcurrentPT; + // "Remember" the next presentation time for the next time this function is called + fcurrentPT = nextPT; + + return duration; +} + +//////////////////////////////////////////////////////////////////////////////// +bool H263plusVideoStreamParser::GetWidthAndHeight( u_int8_t fmt, + u_int16_t *width, + u_int16_t *height) +{ + // The 'fmt' corresponds to bits 5-7 of the PTYPE + static struct { + u_int16_t width; + u_int16_t height; + } const dimensionsTable[8] = { + { 0, 0 }, // 000 - 0 - forbidden, generates an error + { 128, 96 }, // 001 - 1 - Sub QCIF + { 176, 144 }, // 010 - 2 - QCIF + { 352, 288 }, // 011 - 3 - CIF + { 704, 576 }, // 100 - 4 - 4CIF + { 1409, 1152 }, // 101 - 5 - 16CIF + { 0, 0 }, // 110 - 6 - reserved, generates an error + { 0, 0 } // 111 - 7 - extended, not supported by profile 0 + }; + + if (fmt > 7) + return false; + + *width = dimensionsTable[fmt].width; + *height = dimensionsTable[fmt].height; + + if (*width == 0) + return false; + + return true; +} + +//////////////////////////////////////////////////////////////////////////////// +u_int8_t H263plusVideoStreamParser::GetTRDifference( + u_int8_t nextTR, + u_int8_t currentTR) +{ + if (currentTR > nextTR) { + // Wrap around 255... + return nextTR + (256 - currentTR); + } else { + return nextTR - currentTR; + } +} + + + + + + + +//////////////////////////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// +// this is the h263.c file of MPEG4IP mp4creator +/* +#include "mp4creator.h" + +// Default timescale for H.263 (1000ms) +#define H263_TIMESCALE 1000 +// Default H263 frame rate (30fps) +#define H263_BASIC_FRAME_RATE 30 + +// Minimum number of bytes needed to parse an H263 header +#define H263_REQUIRE_HEADER_SIZE_BYTES 5 +// Number of bytes the start code requries +#define H263_STARTCODE_SIZE_BYTES 3 +// This is the input buffer's size. It should contain +// 1 frame with the following start code +#define H263_BUFFER_SIZE 256 * 1024 +// The default max different (in %) betwqeen max and average bitrates +#define H263_DEFAULT_CBR_TOLERANCE 10 + +// The following structure holds information extracted from each frame's header: +typedef struct _H263INFO { + u_int8_t tr; // Temporal Reference, used in duration calculation + u_int16_t width; // Width of the picture + u_int16_t height; // Height of the picture + bool isSyncFrame; // Frame type (true = I frame = "sync" frame) +} H263INFO; + +// Context for the GetMaxBitrate function +typedef struct _MaxBitrate_CTX { + u_int32_t bitrateTable[H263_BASIC_FRAME_RATE];// Window of 1 second + u_int32_t windowBitrate; // The bitrate of the current window + u_int32_t maxBitrate; // The up-to-date maximum bitrate + u_int32_t tableIndex; // The next TR unit to update +} MaxBitrate_CTX; + +// Forward declarations: +static int LoadNextH263Object( FILE *inputFileHandle, + u_int8_t *frameBuffer, + u_int32_t *frameBufferSize, + u_int32_t additionalBytesNeeded, + u_int8_t **ppNextHeader); + +static bool ParseShortHeader( u_int8_t *headerBuffer, + H263INFO *outputInfoStruct); + +static u_int8_t GetTRDifference(u_int8_t nextTR, + u_int8_t currentTR); + +static void GetMaxBitrate( MaxBitrate_CTX *ctx, + u_int32_t frameSize, + u_int8_t frameTRDiff); + +static MP4Duration CalculateDuration(u_int8_t trDiff); + +static bool GetWidthAndHeight( u_int8_t fmt, + u_int16_t *width, + u_int16_t *height); + +static char states[3][256]; +/ * + * H263Creator - Main function + * Inputs: + * outputFileHandle - The handle of the output file + * inputFileHandle - The handle of the input file + * Codec-specific parameters: + * H263Level - H.263 Level used for this track + * H263Profile - H.263 Profile used for this track + * H263Bitrates - A Parameter indicating whether the function + * should calculate H263 bitrates or not. + * cbrTolerance - CBR tolerance indicates when to set the + * average bitrate. + * Outputs: + * This function returns either the track ID of the newly added track upon + * success or a predefined value representing an erroneous state. + * / +MP4TrackId H263Creator(MP4FileHandle outputFileHandle, + FILE* inputFileHandle, + u_int8_t h263Profile, + u_int8_t h263Level, + bool h263Bitrates, + u_int8_t cbrTolerance) +{ + H263INFO nextInfo; // Holds information about the next frame + H263INFO currentInfo;// Holds information about the current frame + MaxBitrate_CTX maxBitrateCtx;// Context for the GetMaxBitrate function + memset(&nextInfo, 0, sizeof(nextInfo)); + memset(¤tInfo, 0, sizeof(currentInfo)); + memset(&maxBitrateCtx, 0, sizeof(maxBitrateCtx)); + memset(states, 0, sizeof(states)); + u_int8_t frameBuffer[H263_BUFFER_SIZE]; // The input buffer + // Pointer which tells LoadNextH263Object where to read data to + u_int8_t* pFrameBuffer = frameBuffer + H263_REQUIRE_HEADER_SIZE_BYTES; + u_int32_t frameSize; // The current frame size + // Pointer to receive address of the header data + u_int8_t* pCurrentHeader = pFrameBuffer; + MP4Duration currentDuration; // The current frame's duration + u_int8_t trDifference; // The current TR difference + // The previous TR difference + u_int8_t prevTrDifference = H263_BASIC_FRAME_RATE; + MP4Duration totalDuration = 0;// Duration accumulator + MP4Duration avgBitrate; // Average bitrate + u_int64_t totalBytes = 0; // Size accumulator + MP4TrackId trackId = MP4_INVALID_TRACK_ID; // Our MP4 track + bool stay = true; // loop flag + + while (stay) { + currentInfo = nextInfo; + memmove(frameBuffer, pCurrentHeader, H263_REQUIRE_HEADER_SIZE_BYTES); + frameSize = H263_BUFFER_SIZE - H263_REQUIRE_HEADER_SIZE_BYTES; + // Read 1 frame and the next frame's header from the file. + // For the first frame, only the first frame's header is returned. + // For the last frame, only the last frame's data is returned. + if (! LoadNextH263Object(inputFileHandle, pFrameBuffer, &frameSize, + H263_REQUIRE_HEADER_SIZE_BYTES - H263_STARTCODE_SIZE_BYTES, + &pCurrentHeader)) + break; // Fatal error ... + + if (pCurrentHeader) { + // Parse the returned frame header (if any) + if (!ParseShortHeader(pCurrentHeader, &nextInfo)) + break; // Fatal error + trDifference = GetTRDifference(nextInfo.tr, currentInfo.tr); + } else { + // This is the last frame ... we have to fake the trDifference ... + trDifference = 1; + // No header data has been read at this iteration, so we have to manually + // add the frame's header we read at the previous iteration. + // Note that LoadNextH263Object returns the number of bytes read, which + // are the current frame's data and the next frame's header + frameSize += H263_REQUIRE_HEADER_SIZE_BYTES; + // There is no need for the next iteration ... + stay = false; + } + + // If this is the first iteration ... + if (currentInfo.width == 0) { + // If we have more data than just the header + if ((frameSize > H263_REQUIRE_HEADER_SIZE_BYTES) || + !pCurrentHeader) // Or no header at all + break; // Fatal error + else + continue; // We have only the first frame's header ... + } + + if (trackId == MP4_INVALID_TRACK_ID) { + // If a track has not been added yet, add the track to the file. + trackId = MP4AddH263VideoTrack(outputFileHandle, H263_TIMESCALE, + 0, currentInfo.width, currentInfo.height, + h263Level, h263Profile, 0, 0); + if (trackId == MP4_INVALID_TRACK_ID) + break; // Fatal error + } + + // calculate the current frame duration + currentDuration = CalculateDuration(trDifference); + // Write the current frame to the file. + if (!MP4WriteSample(outputFileHandle, trackId, frameBuffer, frameSize, + currentDuration, 0, currentInfo.isSyncFrame)) + break; // Fatal error + + // Accumulate the frame's size and duration for avgBitrate calculation + totalDuration += currentDuration; + totalBytes += frameSize; + // If needed, recalculate bitrate information + if (h263Bitrates) + GetMaxBitrate(&maxBitrateCtx, frameSize, prevTrDifference); + prevTrDifference = trDifference; + } // while (stay) + + // If this is the last frame, + if (!stay) { + // If needed and possible, update bitrate information in the file + if (h263Bitrates && totalDuration) { + avgBitrate = (totalBytes * 8 * H263_TIMESCALE) / totalDuration; + if (cbrTolerance == 0) + cbrTolerance = H263_DEFAULT_CBR_TOLERANCE; + // Same as: if (maxBitrate / avgBitrate > (cbrTolerance + 100) / 100.0) + if (maxBitrateCtx.maxBitrate * 100 > (cbrTolerance + 100) * avgBitrate) + avgBitrate = 0; + MP4SetH263Bitrates(outputFileHandle, trackId, + avgBitrate, maxBitrateCtx.maxBitrate); + } + // Return the newly added track ID + return trackId; + } + + // If we got to here... something went wrong ... + fprintf(stderr, + "%s: Could not parse input file, invalid video stream?\n", ProgName); + // Upon failure, delete the newly added track if it has been added + if (trackId != MP4_INVALID_TRACK_ID) { + MP4DeleteTrack(outputFileHandle, trackId); + } + return MP4_INVALID_TRACK_ID; +} + +/ * + * LoadNextH263Object - service routine that reads a single frame from the input + * file. It shall fill the input buffer with data up until - and including - the + * next start code and shall report back both the number of bytes read and a + * pointer to the next start code. The first call to this function shall only + * yield a pointer with 0 data bytes and the last call to this function shall + * only yield data bytes with a NULL pointer as the next header. + * + * TODO: This function only supports valid bit streams. Upon error, it fails + * without the possibility to recover. A Better idea would be to skip frames + * until a parsable frame is read from the file. + * + * Parameters: + * inputFileHandle - The handle of the input file + * frameBuffer - buffer where to place read data + * frameBufferSize - in/out parameter indicating the size of the buffer on + * entry and the number of bytes copied to the buffer upon + * return + * additionalBytesNeeded - indicates how many additional bytes are to be read + * from the next frame's header (over the 3 bytes that + * are already read). + * NOTE: This number MUST be > 0 + * ppNextHeader - output parameter that upon return points to the location + * of the next frame's head in the buffer + * Outputs: + * This function returns two pieces of information: + * 1. The total number of bytes read. + * 2. A Pointer to the header of the next frame. This pointer shall be NULL + * for the last frame read. + * / +static int LoadNextH263Object( FILE *inputFileHandle, + u_int8_t *frameBuffer, + u_int32_t *frameBufferSize, + u_int32_t additionalBytesNeeded, + u_int8_t **ppNextHeader) +{ + // This table and the following loop implements a state machine enabling + // us to read bytes from the file untill (and inclusing) the requested + // start code (00 00 8X) is found + char row = 0; + u_int8_t *bufferStart = frameBuffer; + // The buffer end which will allow the loop to leave place for + // the additionalBytesNeeded + u_int8_t *bufferEnd = frameBuffer + *frameBufferSize - + additionalBytesNeeded - 1; + + // Initialize the states array, if it hasn't been initialized yet... + if (!states[0][0]) { + // One 00 was read + states[0][0] = 1; + // Two sequential 0x00 ware read + states[1][0] = states[2][0] = 2; + // A full start code was read + states[2][128] = states[2][129] = states[2][130] = states[2][131] = -1; + } + + // Read data from file into the output buffer until either a start code + // is found, or the end of file has been reached. + do { + if (fread(frameBuffer, 1, 1, inputFileHandle) != 1){ + // EOF or other error before we got a start code + *ppNextHeader = NULL; + *frameBufferSize = frameBuffer - bufferStart; + return 1; + } + } while ((frameBuffer < bufferEnd) && // We have place in the buffer + ((row = states[row][*(frameBuffer++)]) != -1)); // Start code was not found + if (row != -1) { + fprintf(stderr, "%s: Buffer too small (%u)\n", + ProgName, bufferEnd - bufferStart + additionalBytesNeeded); + return 0; + } + + // Cool ... now we have a start code + *ppNextHeader = frameBuffer - H263_STARTCODE_SIZE_BYTES; + *frameBufferSize = frameBuffer - bufferStart + additionalBytesNeeded; + + // Now we just have to read the additionalBytesNeeded + if(fread(frameBuffer, additionalBytesNeeded, 1, inputFileHandle) != 1) { + /// We got a start code but can't read additionalBytesNeeded ... that's a fatal error + fprintf(stderr, "%s: Invalid H263 bitstream\n", ProgName); + return 0; + } + + return 1; +} + + +/ * + * ParseShortHeader - service routine that accepts a buffer containing a frame + * header and extracts relevant codec information from it. + * + * NOTE: the first bit in the following commnets is 0 (zero). + * + * + * 0 1 2 3 + * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + * | PSC (Picture Start Code=22 bits) | (TR=8 bits) | > + * |0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0| |1 0> + * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + * < (PTYPE=13 bits) | + * <. . .|(FMT)|Z|. . . .| + * +-+-+-+-+-+-+-+-+-+-+-+ + * -> PTYPE.FMT contains a width/height identification + * -> PTYPE.Z is 1 for P-Frames, 0 for I-Frames + * Note: When FMT is 111, there is an extended PTYPE... + * + * Inputs: + * headerBuffer - pointer to the current header buffer + * outputInfoStruct - pointer to the structure receiving the data + * Outputs: + * This function returns a structure of important codec-specific + * information (The Temporal Reference bits, width & height of the current + * frame and the sync - or "frame type" - bit. It reports success or + * failure to the calling function. + * / +static bool ParseShortHeader( u_int8_t *headerBuffer, + H263INFO *outputInfoStruct) +{ + u_int8_t fmt = 0; + // Extract temporal reference (TR) from the buffer (bits 22-29 inclusive) + outputInfoStruct->tr = (headerBuffer[2] << 6) & 0xC0; // 2 LS bits out of the 3rd byte + outputInfoStruct->tr |= (headerBuffer[3] >> 2) & 0x3F; // 6 MS bits out of the 4th byte + // Extract the FMT part of PTYPE from the buffer (bits 35-37 inclusive) + fmt = (headerBuffer[4] >> 2) & 0x07; // bits 3-5 ouf of the 5th byte + // If PTYPE is not supported, return a failure notice to the calling function + // FIXME: PLUSPTYPE is not supported + if (fmt == 0x07) { + return false; + } + // If PTYPE is supported, calculate the current width and height according to + // a predefined table + if (!GetWidthAndHeight(fmt, &(outputInfoStruct->width), + &(outputInfoStruct->height))) { + return false; + } + // Extract the frame-type bit, which is the 9th bit of PTYPE (bit 38) + outputInfoStruct->isSyncFrame = !(headerBuffer[4] & 0x02); + + return true; +} + +/ * + * GetMaxBitrate- service routine that accepts frame information and + * derives bitrate information from it. This function uses a sliding window + * technique to calculate the maximum bitrates in any window of 1 second + * inside the file. + * The sliding window is implemented with a table of bitrates for the last + * second (30 entries - one entry per TR unit). + * + * Inputs: + * ctx - context for this function + * frameSize - the size of the current frame in bytes + * frameTRDiff - the "duration" of the frame in TR units + * Outputs: + * This function returns the up-to-date maximum bitrate + * / +static void GetMaxBitrate( MaxBitrate_CTX *ctx, + u_int32_t frameSize, + u_int8_t frameTRDiff) +{ + if (frameTRDiff == 0) + return; + + // Calculate the current frame's bitrate as bits per TR unit (round the result + // upwards) + u_int32_t frameBitrate = frameSize * 8 / frameTRDiff + 1; + + // for each TRdiff received, + while (frameTRDiff--) { + // Subtract the oldest bitrate entry from the current bitrate + ctx->windowBitrate -= ctx->bitrateTable[ctx->tableIndex]; + // Update the oldest bitrate entry with the current frame's bitrate + ctx->bitrateTable[ctx->tableIndex] = frameBitrate; + // Add the current frame's bitrate to the current bitrate + ctx->windowBitrate += frameBitrate; + // Check if we have a new maximum bitrate + if (ctx->windowBitrate > ctx->maxBitrate) { + ctx->maxBitrate = ctx->windowBitrate; + } + // Advance the table index + ctx->tableIndex = (ctx->tableIndex + 1) % + // Wrapping around the bitrateTable size + ( sizeof(ctx->bitrateTable) / sizeof(ctx->bitrateTable[0]) ); + } +} + +/ * + * CalculateDuration - service routine that calculates the current frame's + * duration in milli-seconds using it's duration in TR units. + * - In order not to accumulate the calculation error, we are using the TR + * duration to calculate the current and the next frame's presentation time in + * milli-seconds. + * + * Inputs: + * trDiff - The current frame's duration in TR units + * Outputs: + * The current frame's duration in milli-seconds + * / +static MP4Duration CalculateDuration(u_int8_t trDiff) +{ + static u_int32_t const nextTR = 0; // The next frame's presentation time in TR units + static MP4Duration const currentPT = 0; // The current frame's presentation time in milli-seconds + MP4Duration nextPT; // The next frame's presentation time in milli-seconds + MP4Duration duration; // The current frame's duration in milli-seconds + + nextTR += trDiff; + // Calculate the next frame's presentation time, in milli-seconds + nextPT = (nextTR * 1001) / H263_BASIC_FRAME_RATE; + // The frame's duration is the difference between the next presentation + // time and the current presentation time. + duration = nextPT - currentPT; + // "Remember" the next presentation time for the next time this function is + // called + currentPT = nextPT; + + return duration; +} + +static bool GetWidthAndHeight( u_int8_t fmt, + u_int16_t *width, + u_int16_t *height) +{ + // The 'fmt' corresponds to bits 5-7 of the PTYPE + static struct { + u_int16_t width; + u_int16_t height; + } const dimensionsTable[8] = { + { 0, 0 }, // 000 - 0 - forbidden, generates an error + { 128, 96 }, // 001 - 1 - Sub QCIF + { 176, 144 }, // 010 - 2 - QCIF + { 352, 288 }, // 011 - 3 - CIF + { 704, 576 }, // 100 - 4 - 4CIF + { 1409, 1152 }, // 101 - 5 - 16CIF + { 0, 0 }, // 110 - 6 - reserved, generates an error + { 0, 0 } // 111 - 7 - extended, not supported by profile 0 + }; + + if (fmt > 7) + return false; + + *width = dimensionsTable[fmt].width; + *height = dimensionsTable[fmt].height; + + if (*width == 0) + return false; + + return true; +} + +static u_int8_t GetTRDifference(u_int8_t nextTR, + u_int8_t currentTR) +{ + if (currentTR > nextTR) { + // Wrap around 255... + return nextTR + (256 - currentTR); + } else { + return nextTR - currentTR; + } +} + +*/ + diff --git a/AnyCore/lib_rtsp/liveMedia/H263plusVideoStreamParser.hh b/AnyCore/lib_rtsp/liveMedia/H263plusVideoStreamParser.hh new file mode 100644 index 0000000..6e8b400 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H263plusVideoStreamParser.hh @@ -0,0 +1,127 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up an H263 video stream into frames. +// derived from MPEG4IP h263.c +// Author Benhard Feiten + +#ifndef _H263PLUS_VIDEO_STREAM_PARSER_HH +#define _H263PLUS_VIDEO_STREAM_PARSER_HH + +#ifndef _STREAM_PARSER_HH +#include "StreamParser.hh" +#endif + + +// Default timescale for H.263 (1000ms) +#define H263_TIMESCALE 1000 + +// Default H263 frame rate (30fps) +#define H263_BASIC_FRAME_RATE 30 + +// Minimum number of bytes needed to parse an H263 header +#define H263_REQUIRE_HEADER_SIZE_BYTES 5 + +// Number of bytes the start code requries +#define H263_STARTCODE_SIZE_BYTES 3 + +// This is the input buffer's size. It should contain +// 1 frame with the following start code +#define H263_BUFFER_SIZE 256 * 1024 + +// additionalBytesNeeded - indicates how many additional bytes are to be read +// from the next frame's header (over the 3 bytes that are already read). +#define ADDITIONAL_BYTES_NEEDED H263_REQUIRE_HEADER_SIZE_BYTES - H263_STARTCODE_SIZE_BYTES + +// The default max different (in %) betwqeen max and average bitrates +#define H263_DEFAULT_CBR_TOLERANCE 10 + + + +// The following structure holds information extracted from each frame's header: +typedef struct _H263INFO { + u_int8_t tr; // Temporal Reference, used in duration calculation + u_int16_t width; // Width of the picture + u_int16_t height; // Height of the picture + bool isSyncFrame; // Frame type (true = I frame = "sync" frame) +} H263INFO; + +typedef struct _MaxBitrate_CTX { + u_int32_t bitrateTable[H263_BASIC_FRAME_RATE];// Window of 1 second + u_int32_t windowBitrate; // The bitrate of the current window + u_int32_t maxBitrate; // The up-to-date maximum bitrate + u_int32_t tableIndex; // The next TR unit to update +} MaxBitrate_CTX; + + +class H263plusVideoStreamParser : public StreamParser { + +public: + H263plusVideoStreamParser( class H263plusVideoStreamFramer* usingSource, + FramedSource* inputSource); + + virtual ~H263plusVideoStreamParser(); + + void registerReadInterest(unsigned char* to, unsigned maxSize); + + unsigned parse(u_int64_t & currentDuration); // returns the size of the frame that was acquired, or 0 if none + unsigned numTruncatedBytes() const { return fNumTruncatedBytes; } // The number of truncated bytes (if any) + + +protected: +// H263plusVideoStreamFramer* usingSource() { +// return (H263plusVideoStreamFramer*)fUsingSource; +// } + void setParseState(); + +// void setParseState(H263plusParseState parseState); + + +private: + int parseH263Frame( ); + bool ParseShortHeader(u_int8_t *headerBuffer, H263INFO *outputInfoStruct); + void GetMaxBitrate( MaxBitrate_CTX *ctx, u_int32_t frameSize, u_int8_t frameTRDiff); + u_int64_t CalculateDuration(u_int8_t trDiff); + bool GetWidthAndHeight( u_int8_t fmt, u_int16_t *width, u_int16_t *height); + u_int8_t GetTRDifference( u_int8_t nextTR, u_int8_t currentTR); + + virtual void restoreSavedParserState(); + +protected: + class H263plusVideoStreamFramer* fUsingSource; + + unsigned char* fTo; + unsigned fMaxSize; + unsigned char* fStartOfFrame; + unsigned char* fSavedTo; + unsigned char* fLimit; + unsigned fNumTruncatedBytes; + unsigned fSavedNumTruncatedBytes; + +private: + H263INFO fNextInfo; // Holds information about the next frame + H263INFO fCurrentInfo; // Holds information about the current frame + MaxBitrate_CTX fMaxBitrateCtx; // Context for the GetMaxBitrate function + char fStates[3][256]; + u_int8_t fNextHeader[H263_REQUIRE_HEADER_SIZE_BYTES]; + + u_int32_t fnextTR; // The next frame's presentation time in TR units + u_int64_t fcurrentPT; // The current frame's presentation time in milli-seconds + +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/H264VideoFileServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/H264VideoFileServerMediaSubsession.cpp new file mode 100644 index 0000000..a12d64f --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H264VideoFileServerMediaSubsession.cpp @@ -0,0 +1,119 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a H264 video file. +// Implementation + +#include "H264VideoFileServerMediaSubsession.hh" +#include "H264VideoRTPSink.hh" +#include "ByteStreamFileSource.hh" +#include "H264VideoStreamFramer.hh" + +H264VideoFileServerMediaSubsession* +H264VideoFileServerMediaSubsession::createNew(UsageEnvironment& env, + char const* fileName, + Boolean reuseFirstSource) { + return new H264VideoFileServerMediaSubsession(env, fileName, reuseFirstSource); +} + +H264VideoFileServerMediaSubsession::H264VideoFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, Boolean reuseFirstSource) + : FileServerMediaSubsession(env, fileName, reuseFirstSource), + fAuxSDPLine(NULL), fDoneFlag(0), fDummyRTPSink(NULL) { +} + +H264VideoFileServerMediaSubsession::~H264VideoFileServerMediaSubsession() { + delete[] fAuxSDPLine; +} + +static void afterPlayingDummy(void* clientData) { + H264VideoFileServerMediaSubsession* subsess = (H264VideoFileServerMediaSubsession*)clientData; + subsess->afterPlayingDummy1(); +} + +void H264VideoFileServerMediaSubsession::afterPlayingDummy1() { + // Unschedule any pending 'checking' task: + envir().taskScheduler().unscheduleDelayedTask(nextTask()); + // Signal the event loop that we're done: + setDoneFlag(); +} + +static void checkForAuxSDPLine(void* clientData) { + H264VideoFileServerMediaSubsession* subsess = (H264VideoFileServerMediaSubsession*)clientData; + subsess->checkForAuxSDPLine1(); +} + +void H264VideoFileServerMediaSubsession::checkForAuxSDPLine1() { + char const* dasl; + + if (fAuxSDPLine != NULL) { + // Signal the event loop that we're done: + setDoneFlag(); + } else if (fDummyRTPSink != NULL && (dasl = fDummyRTPSink->auxSDPLine()) != NULL) { + fAuxSDPLine = strDup(dasl); + fDummyRTPSink = NULL; + + // Signal the event loop that we're done: + setDoneFlag(); + } else if (!fDoneFlag) { + // try again after a brief delay: + int uSecsToDelay = 100000; // 100 ms + nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay, + (TaskFunc*)checkForAuxSDPLine, this); + } +} + +char const* H264VideoFileServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource) { + if (fAuxSDPLine != NULL) return fAuxSDPLine; // it's already been set up (for a previous client) + + if (fDummyRTPSink == NULL) { // we're not already setting it up for another, concurrent stream + // Note: For H264 video files, the 'config' information ("profile-level-id" and "sprop-parameter-sets") isn't known + // until we start reading the file. This means that "rtpSink"s "auxSDPLine()" will be NULL initially, + // and we need to start reading data from our file until this changes. + fDummyRTPSink = rtpSink; + + // Start reading the file: + fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this); + + // Check whether the sink's 'auxSDPLine()' is ready: + checkForAuxSDPLine(this); + } + + envir().taskScheduler().doEventLoop(&fDoneFlag); + + return fAuxSDPLine; +} + +FramedSource* H264VideoFileServerMediaSubsession::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) { + estBitrate = 500; // kbps, estimate + + // Create the video source: + ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(envir(), fFileName); + if (fileSource == NULL) return NULL; + fFileSize = fileSource->fileSize(); + + // Create a framer for the Video Elementary Stream: + return H264VideoStreamFramer::createNew(envir(), fileSource); +} + +RTPSink* H264VideoFileServerMediaSubsession +::createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* /*inputSource*/) { + return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); +} diff --git a/AnyCore/lib_rtsp/liveMedia/H264VideoFileSink.cpp b/AnyCore/lib_rtsp/liveMedia/H264VideoFileSink.cpp new file mode 100644 index 0000000..a2a5b33 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H264VideoFileSink.cpp @@ -0,0 +1,59 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// H.264 Video File sinks +// Implementation + +#include "H264VideoFileSink.hh" +#include "OutputFile.hh" + +////////// H264VideoFileSink ////////// + +H264VideoFileSink +::H264VideoFileSink(UsageEnvironment& env, FILE* fid, + char const* sPropParameterSetsStr, + unsigned bufferSize, char const* perFrameFileNamePrefix) + : H264or5VideoFileSink(env, fid, bufferSize, perFrameFileNamePrefix, + sPropParameterSetsStr, NULL, NULL) { +} + +H264VideoFileSink::~H264VideoFileSink() { +} + +H264VideoFileSink* +H264VideoFileSink::createNew(UsageEnvironment& env, char const* fileName, + char const* sPropParameterSetsStr, + unsigned bufferSize, Boolean oneFilePerFrame) { + do { + FILE* fid; + char const* perFrameFileNamePrefix; + if (oneFilePerFrame) { + // Create the fid for each frame + fid = NULL; + perFrameFileNamePrefix = fileName; + } else { + // Normal case: create the fid once + fid = OpenOutputFile(env, fileName); + if (fid == NULL) break; + perFrameFileNamePrefix = NULL; + } + + return new H264VideoFileSink(env, fid, sPropParameterSetsStr, bufferSize, perFrameFileNamePrefix); + } while (0); + + return NULL; +} diff --git a/AnyCore/lib_rtsp/liveMedia/H264VideoRTPSink.cpp b/AnyCore/lib_rtsp/liveMedia/H264VideoRTPSink.cpp new file mode 100644 index 0000000..051affc --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H264VideoRTPSink.cpp @@ -0,0 +1,131 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for H.264 video (RFC 3984) +// Implementation + +#include "H264VideoRTPSink.hh" +#include "H264VideoStreamFramer.hh" +#include "Base64.hh" +#include "H264VideoRTPSource.hh" // for "parseSPropParameterSets()" + +////////// H264VideoRTPSink implementation ////////// + +H264VideoRTPSink +::H264VideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, + u_int8_t const* sps, unsigned spsSize, u_int8_t const* pps, unsigned ppsSize) + : H264or5VideoRTPSink(264, env, RTPgs, rtpPayloadFormat, + NULL, 0, sps, spsSize, pps, ppsSize) { +} + +H264VideoRTPSink::~H264VideoRTPSink() { +} + +H264VideoRTPSink* H264VideoRTPSink +::createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat) { + return new H264VideoRTPSink(env, RTPgs, rtpPayloadFormat); +} + +H264VideoRTPSink* H264VideoRTPSink +::createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, + u_int8_t const* sps, unsigned spsSize, u_int8_t const* pps, unsigned ppsSize) { + return new H264VideoRTPSink(env, RTPgs, rtpPayloadFormat, sps, spsSize, pps, ppsSize); +} + +H264VideoRTPSink* H264VideoRTPSink +::createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, + char const* sPropParameterSetsStr) { + u_int8_t* sps = NULL; unsigned spsSize = 0; + u_int8_t* pps = NULL; unsigned ppsSize = 0; + + unsigned numSPropRecords; + SPropRecord* sPropRecords = parseSPropParameterSets(sPropParameterSetsStr, numSPropRecords); + for (unsigned i = 0; i < numSPropRecords; ++i) { + if (sPropRecords[i].sPropLength == 0) continue; // bad data + u_int8_t nal_unit_type = (sPropRecords[i].sPropBytes[0])&0x1F; + if (nal_unit_type == 7/*SPS*/) { + sps = sPropRecords[i].sPropBytes; + spsSize = sPropRecords[i].sPropLength; + } else if (nal_unit_type == 8/*PPS*/) { + pps = sPropRecords[i].sPropBytes; + ppsSize = sPropRecords[i].sPropLength; + } + } + + H264VideoRTPSink* result + = new H264VideoRTPSink(env, RTPgs, rtpPayloadFormat, sps, spsSize, pps, ppsSize); + delete[] sPropRecords; + + return result; +} + +Boolean H264VideoRTPSink::sourceIsCompatibleWithUs(MediaSource& source) { + // Our source must be an appropriate framer: + return source.isH264VideoStreamFramer(); +} + +char const* H264VideoRTPSink::auxSDPLine() { + // Generate a new "a=fmtp:" line each time, using our SPS and PPS (if we have them), + // otherwise parameters from our framer source (in case they've changed since the last time that + // we were called): + H264or5VideoStreamFramer* framerSource = NULL; + u_int8_t* vpsDummy = NULL; unsigned vpsDummySize = 0; + u_int8_t* sps = fSPS; unsigned spsSize = fSPSSize; + u_int8_t* pps = fPPS; unsigned ppsSize = fPPSSize; + if (sps == NULL || pps == NULL) { + // We need to get SPS and PPS from our framer source: + if (fOurFragmenter == NULL) return NULL; // we don't yet have a fragmenter (and therefore not a source) + framerSource = (H264or5VideoStreamFramer*)(fOurFragmenter->inputSource()); + if (framerSource == NULL) return NULL; // we don't yet have a source + + framerSource->getVPSandSPSandPPS(vpsDummy, vpsDummySize, sps, spsSize, pps, ppsSize); + if (sps == NULL || pps == NULL) return NULL; // our source isn't ready + } + + // Set up the "a=fmtp:" SDP line for this stream: + u_int8_t* spsWEB = new u_int8_t[spsSize]; // "WEB" means "Without Emulation Bytes" + unsigned spsWEBSize = removeH264or5EmulationBytes(spsWEB, spsSize, sps, spsSize); + if (spsWEBSize < 4) { // Bad SPS size => assume our source isn't ready + delete[] spsWEB; + return NULL; + } + u_int32_t profileLevelId = (spsWEB[1]<<16) | (spsWEB[2]<<8) | spsWEB[3]; + delete[] spsWEB; + + char* sps_base64 = base64Encode((char*)sps, spsSize); + char* pps_base64 = base64Encode((char*)pps, ppsSize); + + char const* fmtpFmt = + "a=fmtp:%d packetization-mode=1" + ";profile-level-id=%06X" + ";sprop-parameter-sets=%s,%s\r\n"; + unsigned fmtpFmtSize = strlen(fmtpFmt) + + 3 /* max char len */ + + 6 /* 3 bytes in hex */ + + strlen(sps_base64) + strlen(pps_base64); + char* fmtp = new char[fmtpFmtSize]; + sprintf(fmtp, fmtpFmt, + rtpPayloadType(), + profileLevelId, + sps_base64, pps_base64); + + delete[] sps_base64; + delete[] pps_base64; + + delete[] fFmtpSDPLine; fFmtpSDPLine = fmtp; + return fFmtpSDPLine; +} diff --git a/AnyCore/lib_rtsp/liveMedia/H264VideoRTPSource.cpp b/AnyCore/lib_rtsp/liveMedia/H264VideoRTPSource.cpp new file mode 100644 index 0000000..10356c2 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H264VideoRTPSource.cpp @@ -0,0 +1,199 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// H.264 Video RTP Sources +// Implementation + +#include "H264VideoRTPSource.hh" +#include "Base64.hh" + +////////// H264BufferedPacket and H264BufferedPacketFactory ////////// + +class H264BufferedPacket: public BufferedPacket { +public: + H264BufferedPacket(H264VideoRTPSource& ourSource); + virtual ~H264BufferedPacket(); + +private: // redefined virtual functions + virtual unsigned nextEnclosedFrameSize(unsigned char*& framePtr, + unsigned dataSize); +private: + H264VideoRTPSource& fOurSource; +}; + +class H264BufferedPacketFactory: public BufferedPacketFactory { +private: // redefined virtual functions + virtual BufferedPacket* createNewPacket(MultiFramedRTPSource* ourSource); +}; + + +///////// H264VideoRTPSource implementation //////// + +H264VideoRTPSource* +H264VideoRTPSource::createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) { + return new H264VideoRTPSource(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency); +} + +H264VideoRTPSource +::H264VideoRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) + : MultiFramedRTPSource(env, RTPgs, rtpPayloadFormat, rtpTimestampFrequency, + new H264BufferedPacketFactory) { +} + +H264VideoRTPSource::~H264VideoRTPSource() { +} + +Boolean H264VideoRTPSource +::processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize) { + unsigned char* headerStart = packet->data(); + unsigned packetSize = packet->dataSize(); + unsigned numBytesToSkip; + + // Check the 'nal_unit_type' for special 'aggregation' or 'fragmentation' packets: + if (packetSize < 1) return False; + fCurPacketNALUnitType = (headerStart[0]&0x1F); + switch (fCurPacketNALUnitType) { + case 24: { // STAP-A + numBytesToSkip = 1; // discard the type byte + break; + } + case 25: case 26: case 27: { // STAP-B, MTAP16, or MTAP24 + numBytesToSkip = 3; // discard the type byte, and the initial DON + break; + } + case 28: case 29: { // // FU-A or FU-B + // For these NALUs, the first two bytes are the FU indicator and the FU header. + // If the start bit is set, we reconstruct the original NAL header into byte 1: + if (packetSize < 2) return False; + unsigned char startBit = headerStart[1]&0x80; + unsigned char endBit = headerStart[1]&0x40; + if (startBit) { + fCurrentPacketBeginsFrame = True; + + headerStart[1] = (headerStart[0]&0xE0)|(headerStart[1]&0x1F); + numBytesToSkip = 1; + } else { + // The start bit is not set, so we skip both the FU indicator and header: + fCurrentPacketBeginsFrame = False; + numBytesToSkip = 2; + } + fCurrentPacketCompletesFrame = (endBit != 0); + break; + } + default: { + // This packet contains one complete NAL unit: + fCurrentPacketBeginsFrame = fCurrentPacketCompletesFrame = True; + numBytesToSkip = 0; + break; + } + } + + resultSpecialHeaderSize = numBytesToSkip; + return True; +} + +char const* H264VideoRTPSource::MIMEtype() const { + return "video/H264"; +} + +SPropRecord* parseSPropParameterSets(char const* sPropParameterSetsStr, + // result parameter: + unsigned& numSPropRecords) { + // Make a copy of the input string, so we can replace the commas with '\0's: + char* inStr = strDup(sPropParameterSetsStr); + if (inStr == NULL) { + numSPropRecords = 0; + return NULL; + } + + // Count the number of commas (and thus the number of parameter sets): + numSPropRecords = 1; + char* s; + for (s = inStr; *s != '\0'; ++s) { + if (*s == ',') { + ++numSPropRecords; + *s = '\0'; + } + } + + // Allocate and fill in the result array: + SPropRecord* resultArray = new SPropRecord[numSPropRecords]; + s = inStr; + for (unsigned i = 0; i < numSPropRecords; ++i) { + resultArray[i].sPropBytes = base64Decode(s, resultArray[i].sPropLength); + s += strlen(s) + 1; + } + + delete[] inStr; + return resultArray; +} + + +////////// H264BufferedPacket and H264BufferedPacketFactory implementation ////////// + +H264BufferedPacket::H264BufferedPacket(H264VideoRTPSource& ourSource) + : fOurSource(ourSource) { +} + +H264BufferedPacket::~H264BufferedPacket() { +} + +unsigned H264BufferedPacket +::nextEnclosedFrameSize(unsigned char*& framePtr, unsigned dataSize) { + unsigned resultNALUSize = 0; // if an error occurs + + switch (fOurSource.fCurPacketNALUnitType) { + case 24: case 25: { // STAP-A or STAP-B + // The first two bytes are NALU size: + if (dataSize < 2) break; + resultNALUSize = (framePtr[0]<<8)|framePtr[1]; + framePtr += 2; + break; + } + case 26: { // MTAP16 + // The first two bytes are NALU size. The next three are the DOND and TS offset: + if (dataSize < 5) break; + resultNALUSize = (framePtr[0]<<8)|framePtr[1]; + framePtr += 5; + break; + } + case 27: { // MTAP24 + // The first two bytes are NALU size. The next four are the DOND and TS offset: + if (dataSize < 6) break; + resultNALUSize = (framePtr[0]<<8)|framePtr[1]; + framePtr += 6; + break; + } + default: { + // Common case: We use the entire packet data: + return dataSize; + } + } + + return (resultNALUSize <= dataSize) ? resultNALUSize : dataSize; +} + +BufferedPacket* H264BufferedPacketFactory +::createNewPacket(MultiFramedRTPSource* ourSource) { + return new H264BufferedPacket((H264VideoRTPSource&)(*ourSource)); +} diff --git a/AnyCore/lib_rtsp/liveMedia/H264VideoStreamDiscreteFramer.cpp b/AnyCore/lib_rtsp/liveMedia/H264VideoStreamDiscreteFramer.cpp new file mode 100644 index 0000000..ad15947 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H264VideoStreamDiscreteFramer.cpp @@ -0,0 +1,41 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A simplified version of "H264VideoStreamFramer" that takes only complete, +// discrete frames (rather than an arbitrary byte stream) as input. +// This avoids the parsing and data copying overhead of the full +// "H264VideoStreamFramer". +// Implementation + +#include "H264VideoStreamDiscreteFramer.hh" + +H264VideoStreamDiscreteFramer* +H264VideoStreamDiscreteFramer::createNew(UsageEnvironment& env, FramedSource* inputSource) { + return new H264VideoStreamDiscreteFramer(env, inputSource); +} + +H264VideoStreamDiscreteFramer +::H264VideoStreamDiscreteFramer(UsageEnvironment& env, FramedSource* inputSource) + : H264or5VideoStreamDiscreteFramer(264, env, inputSource) { +} + +H264VideoStreamDiscreteFramer::~H264VideoStreamDiscreteFramer() { +} + +Boolean H264VideoStreamDiscreteFramer::isH264VideoStreamFramer() const { + return True; +} diff --git a/AnyCore/lib_rtsp/liveMedia/H264VideoStreamFramer.cpp b/AnyCore/lib_rtsp/liveMedia/H264VideoStreamFramer.cpp new file mode 100644 index 0000000..2be9d47 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H264VideoStreamFramer.cpp @@ -0,0 +1,38 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up a H.264 Video Elementary Stream into NAL units. +// Implementation + +#include "H264VideoStreamFramer.hh" + +H264VideoStreamFramer* H264VideoStreamFramer +::createNew(UsageEnvironment& env, FramedSource* inputSource, Boolean includeStartCodeInOutput) { + return new H264VideoStreamFramer(env, inputSource, True, includeStartCodeInOutput); +} + +H264VideoStreamFramer +::H264VideoStreamFramer(UsageEnvironment& env, FramedSource* inputSource, Boolean createParser, Boolean includeStartCodeInOutput) + : H264or5VideoStreamFramer(264, env, inputSource, createParser, includeStartCodeInOutput) { +} + +H264VideoStreamFramer::~H264VideoStreamFramer() { +} + +Boolean H264VideoStreamFramer::isH264VideoStreamFramer() const { + return True; +} diff --git a/AnyCore/lib_rtsp/liveMedia/H264or5VideoFileSink.cpp b/AnyCore/lib_rtsp/liveMedia/H264or5VideoFileSink.cpp new file mode 100644 index 0000000..c7cc3e4 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H264or5VideoFileSink.cpp @@ -0,0 +1,65 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// H.264 or H.265 Video File sinks +// Implementation + +#include "H264or5VideoFileSink.hh" +#include "H264VideoRTPSource.hh" // for "parseSPropParameterSets()" + +////////// H264or5VideoFileSink ////////// + +H264or5VideoFileSink +::H264or5VideoFileSink(UsageEnvironment& env, FILE* fid, + unsigned bufferSize, char const* perFrameFileNamePrefix, + char const* sPropParameterSetsStr1, + char const* sPropParameterSetsStr2, + char const* sPropParameterSetsStr3) + : FileSink(env, fid, bufferSize, perFrameFileNamePrefix), + fHaveWrittenFirstFrame(False) { + fSPropParameterSetsStr[0] = sPropParameterSetsStr1; + fSPropParameterSetsStr[1] = sPropParameterSetsStr2; + fSPropParameterSetsStr[2] = sPropParameterSetsStr3; +} + +H264or5VideoFileSink::~H264or5VideoFileSink() { +} + +void H264or5VideoFileSink::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, struct timeval presentationTime) { + unsigned char const start_code[4] = {0x00, 0x00, 0x00, 0x01}; + + if (!fHaveWrittenFirstFrame) { + // If we have NAL units encoded in "sprop parameter strings", prepend these to the file: + for (unsigned j = 0; j < 3; ++j) { + unsigned numSPropRecords; + SPropRecord* sPropRecords + = parseSPropParameterSets(fSPropParameterSetsStr[j], numSPropRecords); + for (unsigned i = 0; i < numSPropRecords; ++i) { + addData(start_code, 4, presentationTime); + addData(sPropRecords[i].sPropBytes, sPropRecords[i].sPropLength, presentationTime); + } + delete[] sPropRecords; + } + fHaveWrittenFirstFrame = True; // for next time + } + + // Write the input data to the file, with the start code in front: + addData(start_code, 4, presentationTime); + + // Call the parent class to complete the normal file write with the input data: + FileSink::afterGettingFrame(frameSize, numTruncatedBytes, presentationTime); +} diff --git a/AnyCore/lib_rtsp/liveMedia/H264or5VideoRTPSink.cpp b/AnyCore/lib_rtsp/liveMedia/H264or5VideoRTPSink.cpp new file mode 100644 index 0000000..e985635 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H264or5VideoRTPSink.cpp @@ -0,0 +1,286 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for H.264 or H.265 video +// Implementation + +#include "H264or5VideoRTPSink.hh" +#include "H264or5VideoStreamFramer.hh" + +////////// H264or5Fragmenter definition ////////// + +// Because of the ideosyncracies of the H.264 RTP payload format, we implement +// "H264or5VideoRTPSink" using a separate "H264or5Fragmenter" class that delivers, +// to the "H264or5VideoRTPSink", only fragments that will fit within an outgoing +// RTP packet. I.e., we implement fragmentation in this separate "H264or5Fragmenter" +// class, rather than in "H264or5VideoRTPSink". +// (Note: This class should be used only by "H264or5VideoRTPSink", or a subclass.) + +class H264or5Fragmenter: public FramedFilter { +public: + H264or5Fragmenter(int hNumber, UsageEnvironment& env, FramedSource* inputSource, + unsigned inputBufferMax, unsigned maxOutputPacketSize); + virtual ~H264or5Fragmenter(); + + Boolean lastFragmentCompletedNALUnit() const { return fLastFragmentCompletedNALUnit; } + +private: // redefined virtual functions: + virtual void doGetNextFrame(); + +private: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + +private: + int fHNumber; + unsigned fInputBufferSize; + unsigned fMaxOutputPacketSize; + unsigned char* fInputBuffer; + unsigned fNumValidDataBytes; + unsigned fCurDataOffset; + unsigned fSaveNumTruncatedBytes; + Boolean fLastFragmentCompletedNALUnit; +}; + + +////////// H264or5VideoRTPSink implementation ////////// + +H264or5VideoRTPSink +::H264or5VideoRTPSink(int hNumber, + UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, + u_int8_t const* vps, unsigned vpsSize, + u_int8_t const* sps, unsigned spsSize, + u_int8_t const* pps, unsigned ppsSize) + : VideoRTPSink(env, RTPgs, rtpPayloadFormat, 90000, hNumber == 264 ? "H264" : "H265"), + fHNumber(hNumber), fOurFragmenter(NULL), fFmtpSDPLine(NULL) { + if (vps != NULL) { + fVPSSize = vpsSize; + fVPS = new u_int8_t[fVPSSize]; + memmove(fVPS, vps, fVPSSize); + } else { + fVPSSize = 0; + fVPS = NULL; + } + if (sps != NULL) { + fSPSSize = spsSize; + fSPS = new u_int8_t[fSPSSize]; + memmove(fSPS, sps, fSPSSize); + } else { + fSPSSize = 0; + fSPS = NULL; + } + if (pps != NULL) { + fPPSSize = ppsSize; + fPPS = new u_int8_t[fPPSSize]; + memmove(fPPS, pps, fPPSSize); + } else { + fPPSSize = 0; + fPPS = NULL; + } +} + +H264or5VideoRTPSink::~H264or5VideoRTPSink() { + fSource = fOurFragmenter; // hack: in case "fSource" had gotten set to NULL before we were called + delete[] fFmtpSDPLine; + delete[] fVPS; delete[] fSPS; delete[] fPPS; + stopPlaying(); // call this now, because we won't have our 'fragmenter' when the base class destructor calls it later. + + // Close our 'fragmenter' as well: + Medium::close(fOurFragmenter); + fSource = NULL; // for the base class destructor, which gets called next +} + +Boolean H264or5VideoRTPSink::continuePlaying() { + // First, check whether we have a 'fragmenter' class set up yet. + // If not, create it now: + if (fOurFragmenter == NULL) { + fOurFragmenter = new H264or5Fragmenter(fHNumber, envir(), fSource, OutPacketBuffer::maxSize, + ourMaxPacketSize() - 12/*RTP hdr size*/); + } else { + fOurFragmenter->reassignInputSource(fSource); + } + fSource = fOurFragmenter; + + // Then call the parent class's implementation: + return MultiFramedRTPSink::continuePlaying(); +} + +void H264or5VideoRTPSink::doSpecialFrameHandling(unsigned /*fragmentationOffset*/, + unsigned char* /*frameStart*/, + unsigned /*numBytesInFrame*/, +struct timeval framePresentationTime, + unsigned /*numRemainingBytes*/) { + // Set the RTP 'M' (marker) bit iff + // 1/ The most recently delivered fragment was the end of (or the only fragment of) an NAL unit, and + // 2/ This NAL unit was the last NAL unit of an 'access unit' (i.e. video frame). + if (fOurFragmenter != NULL) { + H264or5VideoStreamFramer* framerSource + = (H264or5VideoStreamFramer*)(fOurFragmenter->inputSource()); + // This relies on our fragmenter's source being a "H264or5VideoStreamFramer". + if (((H264or5Fragmenter*)fOurFragmenter)->lastFragmentCompletedNALUnit() + && framerSource != NULL && framerSource->pictureEndMarker()) { + setMarkerBit(); + framerSource->pictureEndMarker() = False; + } + } + + setTimestamp(framePresentationTime); +} + +Boolean H264or5VideoRTPSink +::frameCanAppearAfterPacketStart(unsigned char const* /*frameStart*/, + unsigned /*numBytesInFrame*/) const { + return False; +} + + +////////// H264or5Fragmenter implementation ////////// + +H264or5Fragmenter::H264or5Fragmenter(int hNumber, + UsageEnvironment& env, FramedSource* inputSource, + unsigned inputBufferMax, unsigned maxOutputPacketSize) + : FramedFilter(env, inputSource), + fHNumber(hNumber), + fInputBufferSize(inputBufferMax+1), fMaxOutputPacketSize(maxOutputPacketSize), + fNumValidDataBytes(1), fCurDataOffset(1), fSaveNumTruncatedBytes(0), + fLastFragmentCompletedNALUnit(True) { + fInputBuffer = new unsigned char[fInputBufferSize]; +} + +H264or5Fragmenter::~H264or5Fragmenter() { + delete[] fInputBuffer; + detachInputSource(); // so that the subsequent ~FramedFilter() doesn't delete it +} + +void H264or5Fragmenter::doGetNextFrame() { + if (fNumValidDataBytes == 1) { + // We have no NAL unit data currently in the buffer. Read a new one: + fInputSource->getNextFrame(&fInputBuffer[1], fInputBufferSize - 1, + afterGettingFrame, this, + FramedSource::handleClosure, this); + } else { + // We have NAL unit data in the buffer. There are three cases to consider: + // 1. There is a new NAL unit in the buffer, and it's small enough to deliver + // to the RTP sink (as is). + // 2. There is a new NAL unit in the buffer, but it's too large to deliver to + // the RTP sink in its entirety. Deliver the first fragment of this data, + // as a FU packet, with one extra preceding header byte (for the "FU header"). + // 3. There is a NAL unit in the buffer, and we've already delivered some + // fragment(s) of this. Deliver the next fragment of this data, + // as a FU packet, with two (H.264) or three (H.265) extra preceding header bytes + // (for the "NAL header" and the "FU header"). + + if (fMaxSize < fMaxOutputPacketSize) { // shouldn't happen + envir() << "H264or5Fragmenter::doGetNextFrame(): fMaxSize (" + << fMaxSize << ") is smaller than expected\n"; + } else { + fMaxSize = fMaxOutputPacketSize; + } + + fLastFragmentCompletedNALUnit = True; // by default + if (fCurDataOffset == 1) { // case 1 or 2 + if (fNumValidDataBytes - 1 <= fMaxSize) { // case 1 + memmove(fTo, &fInputBuffer[1], fNumValidDataBytes - 1); + fFrameSize = fNumValidDataBytes - 1; + fCurDataOffset = fNumValidDataBytes; + } else { // case 2 + // We need to send the NAL unit data as FU packets. Deliver the first + // packet now. Note that we add "NAL header" and "FU header" bytes to the front + // of the packet (overwriting the existing "NAL header"). + if (fHNumber == 264) { + fInputBuffer[0] = (fInputBuffer[1] & 0xE0) | 28; // FU indicator + fInputBuffer[1] = 0x80 | (fInputBuffer[1] & 0x1F); // FU header (with S bit) + } else { // 265 + u_int8_t nal_unit_type = (fInputBuffer[1]&0x7E)>>1; + fInputBuffer[0] = (fInputBuffer[1] & 0x81) | (49<<1); // Payload header (1st byte) + fInputBuffer[1] = fInputBuffer[2]; // Payload header (2nd byte) + fInputBuffer[2] = 0x80 | nal_unit_type; // FU header (with S bit) + } + memmove(fTo, fInputBuffer, fMaxSize); + fFrameSize = fMaxSize; + fCurDataOffset += fMaxSize - 1; + fLastFragmentCompletedNALUnit = False; + } + } else { // case 3 + // We are sending this NAL unit data as FU packets. We've already sent the + // first packet (fragment). Now, send the next fragment. Note that we add + // "NAL header" and "FU header" bytes to the front. (We reuse these bytes that + // we already sent for the first fragment, but clear the S bit, and add the E + // bit if this is the last fragment.) + unsigned numExtraHeaderBytes; + if (fHNumber == 264) { + fInputBuffer[fCurDataOffset-2] = fInputBuffer[0]; // FU indicator + fInputBuffer[fCurDataOffset-1] = fInputBuffer[1]&~0x80; // FU header (no S bit) + numExtraHeaderBytes = 2; + } else { // 265 + fInputBuffer[fCurDataOffset-3] = fInputBuffer[0]; // Payload header (1st byte) + fInputBuffer[fCurDataOffset-2] = fInputBuffer[1]; // Payload header (2nd byte) + fInputBuffer[fCurDataOffset-1] = fInputBuffer[2]&~0x80; // FU header (no S bit) + numExtraHeaderBytes = 3; + } + unsigned numBytesToSend = numExtraHeaderBytes + (fNumValidDataBytes - fCurDataOffset); + if (numBytesToSend > fMaxSize) { + // We can't send all of the remaining data this time: + numBytesToSend = fMaxSize; + fLastFragmentCompletedNALUnit = False; + } else { + // This is the last fragment: + fInputBuffer[fCurDataOffset-1] |= 0x40; // set the E bit in the FU header + fNumTruncatedBytes = fSaveNumTruncatedBytes; + } + memmove(fTo, &fInputBuffer[fCurDataOffset-numExtraHeaderBytes], numBytesToSend); + fFrameSize = numBytesToSend; + fCurDataOffset += numBytesToSend - numExtraHeaderBytes; + } + + if (fCurDataOffset >= fNumValidDataBytes) { + // We're done with this data. Reset the pointers for receiving new data: + fNumValidDataBytes = fCurDataOffset = 1; + } + + // Complete delivery to the client: + FramedSource::afterGetting(this); + } +} + +void H264or5Fragmenter::afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, +struct timeval presentationTime, + unsigned durationInMicroseconds) { + H264or5Fragmenter* fragmenter = (H264or5Fragmenter*)clientData; + fragmenter->afterGettingFrame1(frameSize, numTruncatedBytes, presentationTime, + durationInMicroseconds); +} + +void H264or5Fragmenter::afterGettingFrame1(unsigned frameSize, + unsigned numTruncatedBytes, +struct timeval presentationTime, + unsigned durationInMicroseconds) { + fNumValidDataBytes += frameSize; + fSaveNumTruncatedBytes = numTruncatedBytes; + fPresentationTime = presentationTime; + fDurationInMicroseconds = durationInMicroseconds; + + // Deliver data to the client: + doGetNextFrame(); +} diff --git a/AnyCore/lib_rtsp/liveMedia/H264or5VideoStreamDiscreteFramer.cpp b/AnyCore/lib_rtsp/liveMedia/H264or5VideoStreamDiscreteFramer.cpp new file mode 100644 index 0000000..0f5c1b7 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H264or5VideoStreamDiscreteFramer.cpp @@ -0,0 +1,94 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A simplified version of "H264or5VideoStreamFramer" that takes only complete, +// discrete frames (rather than an arbitrary byte stream) as input. +// This avoids the parsing and data copying overhead of the full +// "H264or5VideoStreamFramer". +// Implementation + +#include "H264or5VideoStreamDiscreteFramer.hh" + +H264or5VideoStreamDiscreteFramer +::H264or5VideoStreamDiscreteFramer(int hNumber, UsageEnvironment& env, FramedSource* inputSource) + : H264or5VideoStreamFramer(hNumber, env, inputSource, False/*don't create a parser*/, False) { +} + +H264or5VideoStreamDiscreteFramer::~H264or5VideoStreamDiscreteFramer() { +} + +void H264or5VideoStreamDiscreteFramer::doGetNextFrame() { + // Arrange to read data (which should be a complete H.264 or H.265 NAL unit) + // from our data source, directly into the client's input buffer. + // After reading this, we'll do some parsing on the frame. + fInputSource->getNextFrame(fTo, fMaxSize, + afterGettingFrame, this, + FramedSource::handleClosure, this); +} + +void H264or5VideoStreamDiscreteFramer +::afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + H264or5VideoStreamDiscreteFramer* source = (H264or5VideoStreamDiscreteFramer*)clientData; + source->afterGettingFrame1(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds); +} + +void H264or5VideoStreamDiscreteFramer +::afterGettingFrame1(unsigned frameSize, unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + // Get the "nal_unit_type", to see if this NAL unit is one that we want to save a copy of: + u_int8_t nal_unit_type; + if (fHNumber == 264 && frameSize >= 1) { + nal_unit_type = fTo[0]&0x1F; + } else if (fHNumber == 265 && frameSize >= 2) { + nal_unit_type = (fTo[0]&0x7E)>>1; + } else { + // This is too short to be a valid NAL unit, so just assume a bogus nal_unit_type + nal_unit_type = 0xFF; + } + + // Begin by checking for a (likely) common error: NAL units that (erroneously) begin with a + // 0x00000001 or 0x000001 'start code'. (Those start codes should only be in byte-stream data; + // *not* data that consists of discrete NAL units.) + // Once again, to be clear: The NAL units that you feed to a "H264or5VideoStreamDiscreteFramer" + // MUST NOT include start codes. + if (frameSize >= 4 && fTo[0] == 0 && fTo[1] == 0 && ((fTo[2] == 0 && fTo[3] == 1) || fTo[2] == 1)) { + envir() << "H264or5VideoStreamDiscreteFramer error: MPEG 'start code' seen in the input\n"; + } else if (isVPS(nal_unit_type)) { // Video parameter set (VPS) + saveCopyOfVPS(fTo, frameSize); + } else if (isSPS(nal_unit_type)) { // Sequence parameter set (SPS) + saveCopyOfSPS(fTo, frameSize); + } else if (isPPS(nal_unit_type)) { // Picture parameter set (PPS) + saveCopyOfPPS(fTo, frameSize); + } + + // Next, check whether this NAL unit ends the current 'access unit' (basically, a video frame). + // Unfortunately, we can't do this reliably, because we don't yet know anything about the + // *next* NAL unit that we'll see. So, we guess this as best as we can, by assuming that + // if this NAL unit is a VCL NAL unit, then it ends the current 'access unit'. + if (isVCL(nal_unit_type)) fPictureEndMarker = True; + + // Finally, complete delivery to the client: + fFrameSize = frameSize; + fNumTruncatedBytes = numTruncatedBytes; + fPresentationTime = presentationTime; + fDurationInMicroseconds = durationInMicroseconds; + afterGetting(this); +} diff --git a/AnyCore/lib_rtsp/liveMedia/H264or5VideoStreamFramer.cpp b/AnyCore/lib_rtsp/liveMedia/H264or5VideoStreamFramer.cpp new file mode 100644 index 0000000..9dce315 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H264or5VideoStreamFramer.cpp @@ -0,0 +1,1071 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up a H.264 or H.265 Video Elementary Stream into NAL units. +// Implementation + +#include "H264or5VideoStreamFramer.hh" +#include "MPEGVideoStreamParser.hh" +#include "BitVector.hh" + +////////// H264or5VideoStreamParser definition ////////// + +class H264or5VideoStreamParser: public MPEGVideoStreamParser { +public: + H264or5VideoStreamParser(int hNumber, H264or5VideoStreamFramer* usingSource, + FramedSource* inputSource, Boolean includeStartCodeInOutput); + virtual ~H264or5VideoStreamParser(); + +private: // redefined virtual functions: + virtual void flushInput(); + virtual unsigned parse(); + +private: + H264or5VideoStreamFramer* usingSource() { + return (H264or5VideoStreamFramer*)fUsingSource; + } + + Boolean isVPS(u_int8_t nal_unit_type) { return usingSource()->isVPS(nal_unit_type); } + Boolean isSPS(u_int8_t nal_unit_type) { return usingSource()->isSPS(nal_unit_type); } + Boolean isPPS(u_int8_t nal_unit_type) { return usingSource()->isPPS(nal_unit_type); } + Boolean isVCL(u_int8_t nal_unit_type) { return usingSource()->isVCL(nal_unit_type); } + Boolean isSEI(u_int8_t nal_unit_type); + Boolean isEOF(u_int8_t nal_unit_type); + Boolean usuallyBeginsAccessUnit(u_int8_t nal_unit_type); + + void removeEmulationBytes(u_int8_t* nalUnitCopy, unsigned maxSize, unsigned& nalUnitCopySize); + + void analyze_video_parameter_set_data(unsigned& num_units_in_tick, unsigned& time_scale); + void analyze_seq_parameter_set_data(unsigned& num_units_in_tick, unsigned& time_scale); + void profile_tier_level(BitVector& bv, unsigned max_sub_layers_minus1); + void analyze_vui_parameters(BitVector& bv, unsigned& num_units_in_tick, unsigned& time_scale); + void analyze_sei_data(u_int8_t nal_unit_type); + +private: + int fHNumber; // 264 or 265 + unsigned fOutputStartCodeSize; + Boolean fHaveSeenFirstStartCode, fHaveSeenFirstByteOfNALUnit; + u_int8_t fFirstByteOfNALUnit; + double fParsedFrameRate; +}; + + +////////// H264or5VideoStreamFramer implementation ////////// + +H264or5VideoStreamFramer +::H264or5VideoStreamFramer(int hNumber, UsageEnvironment& env, FramedSource* inputSource, + Boolean createParser, Boolean includeStartCodeInOutput) + : MPEGVideoStreamFramer(env, inputSource), + fHNumber(hNumber), + fLastSeenVPS(NULL), fLastSeenVPSSize(0), + fLastSeenSPS(NULL), fLastSeenSPSSize(0), + fLastSeenPPS(NULL), fLastSeenPPSSize(0) { + fParser = createParser + ? new H264or5VideoStreamParser(hNumber, this, inputSource, includeStartCodeInOutput) + : NULL; + fNextPresentationTime = fPresentationTimeBase; + fFrameRate = 25.0; // We assume a frame rate of 25 fps, unless we learn otherwise (from parsing a VPS or SPS NAL unit) +} + +H264or5VideoStreamFramer::~H264or5VideoStreamFramer() { + delete[] fLastSeenPPS; + delete[] fLastSeenSPS; + delete[] fLastSeenVPS; +} + +#define VPS_MAX_SIZE 1000 // larger than the largest possible VPS (Video Parameter Set) NAL unit + +void H264or5VideoStreamFramer::saveCopyOfVPS(u_int8_t* from, unsigned size) { + if (from == NULL) return; + delete[] fLastSeenVPS; + fLastSeenVPS = new u_int8_t[size]; + memmove(fLastSeenVPS, from, size); + + fLastSeenVPSSize = size; +} + +#define SPS_MAX_SIZE 1000 // larger than the largest possible SPS (Sequence Parameter Set) NAL unit + +void H264or5VideoStreamFramer::saveCopyOfSPS(u_int8_t* from, unsigned size) { + if (from == NULL) return; + delete[] fLastSeenSPS; + fLastSeenSPS = new u_int8_t[size]; + memmove(fLastSeenSPS, from, size); + + fLastSeenSPSSize = size; +} + +void H264or5VideoStreamFramer::saveCopyOfPPS(u_int8_t* from, unsigned size) { + if (from == NULL) return; + delete[] fLastSeenPPS; + fLastSeenPPS = new u_int8_t[size]; + memmove(fLastSeenPPS, from, size); + + fLastSeenPPSSize = size; +} + +Boolean H264or5VideoStreamFramer::isVPS(u_int8_t nal_unit_type) { + // VPS NAL units occur in H.265 only: + return fHNumber == 265 && nal_unit_type == 32; +} + +Boolean H264or5VideoStreamFramer::isSPS(u_int8_t nal_unit_type) { + return fHNumber == 264 ? nal_unit_type == 7 : nal_unit_type == 33; +} + +Boolean H264or5VideoStreamFramer::isPPS(u_int8_t nal_unit_type) { + return fHNumber == 264 ? nal_unit_type == 8 : nal_unit_type == 34; +} + +Boolean H264or5VideoStreamFramer::isVCL(u_int8_t nal_unit_type) { + return fHNumber == 264 + ? (nal_unit_type <= 5 && nal_unit_type > 0) + : (nal_unit_type <= 31); +} + + +////////// H264or5VideoStreamParser implementation ////////// + +H264or5VideoStreamParser +::H264or5VideoStreamParser(int hNumber, H264or5VideoStreamFramer* usingSource, + FramedSource* inputSource, Boolean includeStartCodeInOutput) + : MPEGVideoStreamParser(usingSource, inputSource), + fHNumber(hNumber), fOutputStartCodeSize(includeStartCodeInOutput ? 4 : 0), fHaveSeenFirstStartCode(False), fHaveSeenFirstByteOfNALUnit(False), fParsedFrameRate(0.0) { +} + +H264or5VideoStreamParser::~H264or5VideoStreamParser() { +} + +#define PREFIX_SEI_NUT 39 // for H.265 +#define SUFFIX_SEI_NUT 40 // for H.265 +Boolean H264or5VideoStreamParser::isSEI(u_int8_t nal_unit_type) { + return fHNumber == 264 + ? nal_unit_type == 6 + : (nal_unit_type == PREFIX_SEI_NUT || nal_unit_type == SUFFIX_SEI_NUT); +} + +Boolean H264or5VideoStreamParser::isEOF(u_int8_t nal_unit_type) { + // "end of sequence" or "end of (bit)stream" + return fHNumber == 264 + ? (nal_unit_type == 10 || nal_unit_type == 11) + : (nal_unit_type == 36 || nal_unit_type == 37); +} + +Boolean H264or5VideoStreamParser::usuallyBeginsAccessUnit(u_int8_t nal_unit_type) { + return fHNumber == 264 + ? (nal_unit_type >= 6 && nal_unit_type <= 9) || (nal_unit_type >= 14 && nal_unit_type <= 18) + : (nal_unit_type >= 32 && nal_unit_type <= 35) || (nal_unit_type == 39) + || (nal_unit_type >= 41 && nal_unit_type <= 44) + || (nal_unit_type >= 48 && nal_unit_type <= 55); +} + +void H264or5VideoStreamParser +::removeEmulationBytes(u_int8_t* nalUnitCopy, unsigned maxSize, unsigned& nalUnitCopySize) { + u_int8_t* nalUnitOrig = fStartOfFrame + fOutputStartCodeSize; + unsigned const numBytesInNALunit = fTo - nalUnitOrig; + nalUnitCopySize + = removeH264or5EmulationBytes(nalUnitCopy, maxSize, nalUnitOrig, numBytesInNALunit); +} + +#ifdef DEBUG +char const* nal_unit_type_description_h264[32] = { + "Unspecified", //0 + "Coded slice of a non-IDR picture", //1 + "Coded slice data partition A", //2 + "Coded slice data partition B", //3 + "Coded slice data partition C", //4 + "Coded slice of an IDR picture", //5 + "Supplemental enhancement information (SEI)", //6 + "Sequence parameter set", //7 + "Picture parameter set", //8 + "Access unit delimiter", //9 + "End of sequence", //10 + "End of stream", //11 + "Filler data", //12 + "Sequence parameter set extension", //13 + "Prefix NAL unit", //14 + "Subset sequence parameter set", //15 + "Reserved", //16 + "Reserved", //17 + "Reserved", //18 + "Coded slice of an auxiliary coded picture without partitioning", //19 + "Coded slice extension", //20 + "Reserved", //21 + "Reserved", //22 + "Reserved", //23 + "Unspecified", //24 + "Unspecified", //25 + "Unspecified", //26 + "Unspecified", //27 + "Unspecified", //28 + "Unspecified", //29 + "Unspecified", //30 + "Unspecified" //31 +}; +char const* nal_unit_type_description_h265[64] = { + "Coded slice segment of a non-TSA, non-STSA trailing picture", //0 + "Coded slice segment of a non-TSA, non-STSA trailing picture", //1 + "Coded slice segment of a TSA picture", //2 + "Coded slice segment of a TSA picture", //3 + "Coded slice segment of a STSA picture", //4 + "Coded slice segment of a STSA picture", //5 + "Coded slice segment of a RADL picture", //6 + "Coded slice segment of a RADL picture", //7 + "Coded slice segment of a RASL picture", //8 + "Coded slice segment of a RASL picture", //9 + "Reserved", //10 + "Reserved", //11 + "Reserved", //12 + "Reserved", //13 + "Reserved", //14 + "Reserved", //15 + "Coded slice segment of a BLA picture", //16 + "Coded slice segment of a BLA picture", //17 + "Coded slice segment of a BLA picture", //18 + "Coded slice segment of an IDR picture", //19 + "Coded slice segment of an IDR picture", //20 + "Coded slice segment of a CRA picture", //21 + "Reserved", //22 + "Reserved", //23 + "Reserved", //24 + "Reserved", //25 + "Reserved", //26 + "Reserved", //27 + "Reserved", //28 + "Reserved", //29 + "Reserved", //30 + "Reserved", //31 + "Video parameter set", //32 + "Sequence parameter set", //33 + "Picture parameter set", //34 + "Access unit delimiter", //35 + "End of sequence", //36 + "End of bitstream", //37 + "Filler data", //38 + "Supplemental enhancement information (SEI)", //39 + "Supplemental enhancement information (SEI)", //40 + "Reserved", //41 + "Reserved", //42 + "Reserved", //43 + "Reserved", //44 + "Reserved", //45 + "Reserved", //46 + "Reserved", //47 + "Unspecified", //48 + "Unspecified", //49 + "Unspecified", //50 + "Unspecified", //51 + "Unspecified", //52 + "Unspecified", //53 + "Unspecified", //54 + "Unspecified", //55 + "Unspecified", //56 + "Unspecified", //57 + "Unspecified", //58 + "Unspecified", //59 + "Unspecified", //60 + "Unspecified", //61 + "Unspecified", //62 + "Unspecified", //63 +}; +#endif + +#ifdef DEBUG +static unsigned numDebugTabs = 1; +#define DEBUG_PRINT_TABS for (unsigned _i = 0; _i < numDebugTabs; ++_i) fprintf(stderr, "\t") +#define DEBUG_PRINT(x) do { DEBUG_PRINT_TABS; fprintf(stderr, "%s: %d\n", #x, x); } while (0) +#define DEBUG_STR(x) do { DEBUG_PRINT_TABS; fprintf(stderr, "%s\n", x); } while (0) +class DebugTab { +public: + DebugTab() {++numDebugTabs;} + ~DebugTab() {--numDebugTabs;} +}; +#define DEBUG_TAB DebugTab dummy +#else +#define DEBUG_PRINT(x) do {x = x;} while (0) + // Note: the "x=x;" statement is intended to eliminate "unused variable" compiler warning messages +#define DEBUG_STR(x) do {} while (0) +#define DEBUG_TAB do {} while (0) +#endif + +void H264or5VideoStreamParser::profile_tier_level(BitVector& bv, unsigned max_sub_layers_minus1) { + bv.skipBits(96); + + unsigned i; + Boolean sub_layer_profile_present_flag[7], sub_layer_level_present_flag[7]; + for (i = 0; i < max_sub_layers_minus1; ++i) { + sub_layer_profile_present_flag[i] = bv.get1BitBoolean(); + sub_layer_level_present_flag[i] = bv.get1BitBoolean(); + } + if (max_sub_layers_minus1 > 0) { + bv.skipBits(2*(8-max_sub_layers_minus1)); // reserved_zero_2bits + } + for (i = 0; i < max_sub_layers_minus1; ++i) { + if (sub_layer_profile_present_flag[i]) { + bv.skipBits(88); + } + if (sub_layer_level_present_flag[i]) { + bv.skipBits(8); // sub_layer_level_idc[i] + } + } +} + +void H264or5VideoStreamParser +::analyze_vui_parameters(BitVector& bv, + unsigned& num_units_in_tick, unsigned& time_scale) { + Boolean aspect_ratio_info_present_flag = bv.get1BitBoolean(); + DEBUG_PRINT(aspect_ratio_info_present_flag); + if (aspect_ratio_info_present_flag) { + DEBUG_TAB; + unsigned aspect_ratio_idc = bv.getBits(8); + DEBUG_PRINT(aspect_ratio_idc); + if (aspect_ratio_idc == 255/*Extended_SAR*/) { + bv.skipBits(32); // sar_width; sar_height + } + } + Boolean overscan_info_present_flag = bv.get1BitBoolean(); + DEBUG_PRINT(overscan_info_present_flag); + if (overscan_info_present_flag) { + bv.skipBits(1); // overscan_appropriate_flag + } + Boolean video_signal_type_present_flag = bv.get1BitBoolean(); + DEBUG_PRINT(video_signal_type_present_flag); + if (video_signal_type_present_flag) { + DEBUG_TAB; + bv.skipBits(4); // video_format; video_full_range_flag + Boolean colour_description_present_flag = bv.get1BitBoolean(); + DEBUG_PRINT(colour_description_present_flag); + if (colour_description_present_flag) { + bv.skipBits(24); // colour_primaries; transfer_characteristics; matrix_coefficients + } + } + Boolean chroma_loc_info_present_flag = bv.get1BitBoolean(); + DEBUG_PRINT(chroma_loc_info_present_flag); + if (chroma_loc_info_present_flag) { + (void)bv.get_expGolomb(); // chroma_sample_loc_type_top_field + (void)bv.get_expGolomb(); // chroma_sample_loc_type_bottom_field + } + if (fHNumber == 265) { + bv.skipBits(3); // neutral_chroma_indication_flag, field_seq_flag, frame_field_info_present_flag + Boolean default_display_window_flag = bv.get1BitBoolean(); + DEBUG_PRINT(default_display_window_flag); + if (default_display_window_flag) { + (void)bv.get_expGolomb(); // def_disp_win_left_offset + (void)bv.get_expGolomb(); // def_disp_win_right_offset + (void)bv.get_expGolomb(); // def_disp_win_top_offset + (void)bv.get_expGolomb(); // def_disp_win_bottom_offset + } + } + Boolean timing_info_present_flag = bv.get1BitBoolean(); + DEBUG_PRINT(timing_info_present_flag); + if (timing_info_present_flag) { + DEBUG_TAB; + num_units_in_tick = bv.getBits(32); + DEBUG_PRINT(num_units_in_tick); + time_scale = bv.getBits(32); + DEBUG_PRINT(time_scale); + if (fHNumber == 264) { + Boolean fixed_frame_rate_flag = bv.get1BitBoolean(); + DEBUG_PRINT(fixed_frame_rate_flag); + } else { // 265 + Boolean vui_poc_proportional_to_timing_flag = bv.get1BitBoolean(); + DEBUG_PRINT(vui_poc_proportional_to_timing_flag); + if (vui_poc_proportional_to_timing_flag) { + unsigned vui_num_ticks_poc_diff_one_minus1 = bv.get_expGolomb(); + DEBUG_PRINT(vui_num_ticks_poc_diff_one_minus1); + } + } + } +} + +void H264or5VideoStreamParser +::analyze_video_parameter_set_data(unsigned& num_units_in_tick, unsigned& time_scale) { + num_units_in_tick = time_scale = 0; // default values + + // Begin by making a copy of the NAL unit data, removing any 'emulation prevention' bytes: + u_int8_t vps[VPS_MAX_SIZE]; + unsigned vpsSize; + removeEmulationBytes(vps, sizeof vps, vpsSize); + + BitVector bv(vps, 0, 8*vpsSize); + + // Assert: fHNumber == 265 (because this function is called only when parsing H.265) + unsigned i; + + bv.skipBits(28); // nal_unit_header, vps_video_parameter_set_id, vps_reserved_three_2bits, vps_max_layers_minus1 + unsigned vps_max_sub_layers_minus1 = bv.getBits(3); + DEBUG_PRINT(vps_max_sub_layers_minus1); + bv.skipBits(17); // vps_temporal_id_nesting_flag, vps_reserved_0xffff_16bits + profile_tier_level(bv, vps_max_sub_layers_minus1); + Boolean vps_sub_layer_ordering_info_present_flag = bv.get1BitBoolean(); + DEBUG_PRINT(vps_sub_layer_ordering_info_present_flag); + for (i = vps_sub_layer_ordering_info_present_flag ? 0 : vps_max_sub_layers_minus1; + i <= vps_max_sub_layers_minus1; ++i) { + (void)bv.get_expGolomb(); // vps_max_dec_pic_buffering_minus1[i] + (void)bv.get_expGolomb(); // vps_max_num_reorder_pics[i] + (void)bv.get_expGolomb(); // vps_max_latency_increase_plus1[i] + } + unsigned vps_max_layer_id = bv.getBits(6); + DEBUG_PRINT(vps_max_layer_id); + unsigned vps_num_layer_sets_minus1 = bv.get_expGolomb(); + DEBUG_PRINT(vps_num_layer_sets_minus1); + for (i = 1; i <= vps_num_layer_sets_minus1; ++i) { + bv.skipBits(vps_max_layer_id+1); // layer_id_included_flag[i][0..vps_max_layer_id] + } + Boolean vps_timing_info_present_flag = bv.get1BitBoolean(); + DEBUG_PRINT(vps_timing_info_present_flag); + if (vps_timing_info_present_flag) { + DEBUG_TAB; + num_units_in_tick = bv.getBits(32); + DEBUG_PRINT(num_units_in_tick); + time_scale = bv.getBits(32); + DEBUG_PRINT(time_scale); + Boolean vps_poc_proportional_to_timing_flag = bv.get1BitBoolean(); + DEBUG_PRINT(vps_poc_proportional_to_timing_flag); + if (vps_poc_proportional_to_timing_flag) { + unsigned vps_num_ticks_poc_diff_one_minus1 = bv.get_expGolomb(); + DEBUG_PRINT(vps_num_ticks_poc_diff_one_minus1); + } + } + Boolean vps_extension_flag = bv.get1BitBoolean(); + DEBUG_PRINT(vps_extension_flag); +} + +void H264or5VideoStreamParser +::analyze_seq_parameter_set_data(unsigned& num_units_in_tick, unsigned& time_scale) { + num_units_in_tick = time_scale = 0; // default values + + // Begin by making a copy of the NAL unit data, removing any 'emulation prevention' bytes: + u_int8_t sps[SPS_MAX_SIZE]; + unsigned spsSize; + removeEmulationBytes(sps, sizeof sps, spsSize); + + BitVector bv(sps, 0, 8*spsSize); + + if (fHNumber == 264) { + bv.skipBits(8); // forbidden_zero_bit; nal_ref_idc; nal_unit_type + unsigned profile_idc = bv.getBits(8); + DEBUG_PRINT(profile_idc); + unsigned constraint_setN_flag = bv.getBits(8); // also "reserved_zero_2bits" at end + DEBUG_PRINT(constraint_setN_flag); + unsigned level_idc = bv.getBits(8); + DEBUG_PRINT(level_idc); + unsigned seq_parameter_set_id = bv.get_expGolomb(); + DEBUG_PRINT(seq_parameter_set_id); + if (profile_idc == 100 || profile_idc == 110 || profile_idc == 122 || profile_idc == 244 || profile_idc == 44 || profile_idc == 83 || profile_idc == 86 || profile_idc == 118 || profile_idc == 128 ) { + DEBUG_TAB; + unsigned chroma_format_idc = bv.get_expGolomb(); + DEBUG_PRINT(chroma_format_idc); + if (chroma_format_idc == 3) { + DEBUG_TAB; + Boolean separate_colour_plane_flag = bv.get1BitBoolean(); + DEBUG_PRINT(separate_colour_plane_flag); + } + (void)bv.get_expGolomb(); // bit_depth_luma_minus8 + (void)bv.get_expGolomb(); // bit_depth_chroma_minus8 + bv.skipBits(1); // qpprime_y_zero_transform_bypass_flag + Boolean seq_scaling_matrix_present_flag = bv.get1BitBoolean(); + DEBUG_PRINT(seq_scaling_matrix_present_flag); + if (seq_scaling_matrix_present_flag) { + for (int i = 0; i < ((chroma_format_idc != 3) ? 8 : 12); ++i) { + DEBUG_TAB; + DEBUG_PRINT(i); + Boolean seq_scaling_list_present_flag = bv.get1BitBoolean(); + DEBUG_PRINT(seq_scaling_list_present_flag); + if (seq_scaling_list_present_flag) { + DEBUG_TAB; + unsigned sizeOfScalingList = i < 6 ? 16 : 64; + unsigned lastScale = 8; + unsigned nextScale = 8; + for (unsigned j = 0; j < sizeOfScalingList; ++j) { + DEBUG_TAB; + DEBUG_PRINT(j); + DEBUG_PRINT(nextScale); + if (nextScale != 0) { + DEBUG_TAB; + unsigned delta_scale = bv.get_expGolomb(); + DEBUG_PRINT(delta_scale); + nextScale = (lastScale + delta_scale + 256) % 256; + } + lastScale = (nextScale == 0) ? lastScale : nextScale; + DEBUG_PRINT(lastScale); + } + } + } + } + } + unsigned log2_max_frame_num_minus4 = bv.get_expGolomb(); + DEBUG_PRINT(log2_max_frame_num_minus4); + unsigned pic_order_cnt_type = bv.get_expGolomb(); + DEBUG_PRINT(pic_order_cnt_type); + if (pic_order_cnt_type == 0) { + DEBUG_TAB; + unsigned log2_max_pic_order_cnt_lsb_minus4 = bv.get_expGolomb(); + DEBUG_PRINT(log2_max_pic_order_cnt_lsb_minus4); + } else if (pic_order_cnt_type == 1) { + DEBUG_TAB; + bv.skipBits(1); // delta_pic_order_always_zero_flag + (void)bv.get_expGolomb(); // offset_for_non_ref_pic + (void)bv.get_expGolomb(); // offset_for_top_to_bottom_field + unsigned num_ref_frames_in_pic_order_cnt_cycle = bv.get_expGolomb(); + DEBUG_PRINT(num_ref_frames_in_pic_order_cnt_cycle); + for (unsigned i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; ++i) { + (void)bv.get_expGolomb(); // offset_for_ref_frame[i] + } + } + unsigned max_num_ref_frames = bv.get_expGolomb(); + DEBUG_PRINT(max_num_ref_frames); + Boolean gaps_in_frame_num_value_allowed_flag = bv.get1BitBoolean(); + DEBUG_PRINT(gaps_in_frame_num_value_allowed_flag); + unsigned pic_width_in_mbs_minus1 = bv.get_expGolomb(); + DEBUG_PRINT(pic_width_in_mbs_minus1); + unsigned pic_height_in_map_units_minus1 = bv.get_expGolomb(); + DEBUG_PRINT(pic_height_in_map_units_minus1); + Boolean frame_mbs_only_flag = bv.get1BitBoolean(); + DEBUG_PRINT(frame_mbs_only_flag); + if (!frame_mbs_only_flag) { + bv.skipBits(1); // mb_adaptive_frame_field_flag + } + bv.skipBits(1); // direct_8x8_inference_flag + Boolean frame_cropping_flag = bv.get1BitBoolean(); + DEBUG_PRINT(frame_cropping_flag); + if (frame_cropping_flag) { + (void)bv.get_expGolomb(); // frame_crop_left_offset + (void)bv.get_expGolomb(); // frame_crop_right_offset + (void)bv.get_expGolomb(); // frame_crop_top_offset + (void)bv.get_expGolomb(); // frame_crop_bottom_offset + } + Boolean vui_parameters_present_flag = bv.get1BitBoolean(); + DEBUG_PRINT(vui_parameters_present_flag); + if (vui_parameters_present_flag) { + DEBUG_TAB; + analyze_vui_parameters(bv, num_units_in_tick, time_scale); + } + } else { // 265 + unsigned i; + + bv.skipBits(16); // nal_unit_header + bv.skipBits(4); // sps_video_parameter_set_id + unsigned sps_max_sub_layers_minus1 = bv.getBits(3); + DEBUG_PRINT(sps_max_sub_layers_minus1); + bv.skipBits(1); // sps_temporal_id_nesting_flag + profile_tier_level(bv, sps_max_sub_layers_minus1); + (void)bv.get_expGolomb(); // sps_seq_parameter_set_id + unsigned chroma_format_idc = bv.get_expGolomb(); + DEBUG_PRINT(chroma_format_idc); + if (chroma_format_idc == 3) bv.skipBits(1); // separate_colour_plane_flag + unsigned pic_width_in_luma_samples = bv.get_expGolomb(); + DEBUG_PRINT(pic_width_in_luma_samples); + unsigned pic_height_in_luma_samples = bv.get_expGolomb(); + DEBUG_PRINT(pic_height_in_luma_samples); + Boolean conformance_window_flag = bv.get1BitBoolean(); + DEBUG_PRINT(conformance_window_flag); + if (conformance_window_flag) { + DEBUG_TAB; + unsigned conf_win_left_offset = bv.get_expGolomb(); + DEBUG_PRINT(conf_win_left_offset); + unsigned conf_win_right_offset = bv.get_expGolomb(); + DEBUG_PRINT(conf_win_right_offset); + unsigned conf_win_top_offset = bv.get_expGolomb(); + DEBUG_PRINT(conf_win_top_offset); + unsigned conf_win_bottom_offset = bv.get_expGolomb(); + DEBUG_PRINT(conf_win_bottom_offset); + } + (void)bv.get_expGolomb(); // bit_depth_luma_minus8 + (void)bv.get_expGolomb(); // bit_depth_chroma_minus8 + unsigned log2_max_pic_order_cnt_lsb_minus4 = bv.get_expGolomb(); + Boolean sps_sub_layer_ordering_info_present_flag = bv.get1BitBoolean(); + DEBUG_PRINT(sps_sub_layer_ordering_info_present_flag); + for (i = (sps_sub_layer_ordering_info_present_flag ? 0 : sps_max_sub_layers_minus1); + i <= sps_max_sub_layers_minus1; ++i) { + (void)bv.get_expGolomb(); // sps_max_dec_pic_buffering_minus1[i] + (void)bv.get_expGolomb(); // sps_max_num_reorder_pics[i] + (void)bv.get_expGolomb(); // sps_max_latency_increase[i] + } + (void)bv.get_expGolomb(); // log2_min_luma_coding_block_size_minus3 + (void)bv.get_expGolomb(); // log2_diff_max_min_luma_coding_block_size + (void)bv.get_expGolomb(); // log2_min_transform_block_size_minus2 + (void)bv.get_expGolomb(); // log2_diff_max_min_transform_block_size + (void)bv.get_expGolomb(); // max_transform_hierarchy_depth_inter + (void)bv.get_expGolomb(); // max_transform_hierarchy_depth_intra + Boolean scaling_list_enabled_flag = bv.get1BitBoolean(); + DEBUG_PRINT(scaling_list_enabled_flag); + if (scaling_list_enabled_flag) { + DEBUG_TAB; + Boolean sps_scaling_list_data_present_flag = bv.get1BitBoolean(); + DEBUG_PRINT(sps_scaling_list_data_present_flag); + if (sps_scaling_list_data_present_flag) { + // scaling_list_data() + DEBUG_TAB; + for (unsigned sizeId = 0; sizeId < 4; ++sizeId) { + DEBUG_PRINT(sizeId); + for (unsigned matrixId = 0; matrixId < (sizeId == 3 ? 2 : 6); ++matrixId) { + DEBUG_TAB; + DEBUG_PRINT(matrixId); + Boolean scaling_list_pred_mode_flag = bv.get1BitBoolean(); + DEBUG_PRINT(scaling_list_pred_mode_flag); + if (!scaling_list_pred_mode_flag) { + (void)bv.get_expGolomb(); // scaling_list_pred_matrix_id_delta[sizeId][matrixId] + } else { + unsigned const c = 1 << (4+(sizeId<<1)); + unsigned coefNum = c < 64 ? c : 64; + if (sizeId > 1) { + (void)bv.get_expGolomb(); // scaling_list_dc_coef_minus8[sizeId][matrixId] + } + for (i = 0; i < coefNum; ++i) { + (void)bv.get_expGolomb(); // scaling_list_delta_coef + } + } + } + } + } + } + bv.skipBits(2); // amp_enabled_flag, sample_adaptive_offset_enabled_flag + Boolean pcm_enabled_flag = bv.get1BitBoolean(); + DEBUG_PRINT(pcm_enabled_flag); + if (pcm_enabled_flag) { + bv.skipBits(8); // pcm_sample_bit_depth_luma_minus1, pcm_sample_bit_depth_chroma_minus1 + (void)bv.get_expGolomb(); // log2_min_pcm_luma_coding_block_size_minus3 + (void)bv.get_expGolomb(); // log2_diff_max_min_pcm_luma_coding_block_size + bv.skipBits(1); // pcm_loop_filter_disabled_flag + } + unsigned num_short_term_ref_pic_sets = bv.get_expGolomb(); + DEBUG_PRINT(num_short_term_ref_pic_sets); + unsigned num_negative_pics = 0, prev_num_negative_pics = 0; + unsigned num_positive_pics = 0, prev_num_positive_pics = 0; + for (i = 0; i < num_short_term_ref_pic_sets; ++i) { + // short_term_ref_pic_set(i): + DEBUG_TAB; + DEBUG_PRINT(i); + Boolean inter_ref_pic_set_prediction_flag = False; + if (i != 0) { + inter_ref_pic_set_prediction_flag = bv.get1BitBoolean(); + } + DEBUG_PRINT(inter_ref_pic_set_prediction_flag); + if (inter_ref_pic_set_prediction_flag) { + DEBUG_TAB; + if (i == num_short_term_ref_pic_sets) { + // This can't happen here, but it's in the spec, so we include it for completeness + (void)bv.get_expGolomb(); // delta_idx_minus1 + } + bv.skipBits(1); // delta_rps_sign + (void)bv.get_expGolomb(); // abs_delta_rps_minus1 + unsigned NumDeltaPocs = prev_num_negative_pics + prev_num_positive_pics; // correct??? + for (unsigned j = 0; j < NumDeltaPocs; ++j) { + DEBUG_PRINT(j); + Boolean used_by_curr_pic_flag = bv.get1BitBoolean(); + DEBUG_PRINT(used_by_curr_pic_flag); + if (!used_by_curr_pic_flag) bv.skipBits(1); // use_delta_flag[j] + } + } else { + prev_num_negative_pics = num_negative_pics; + num_negative_pics = bv.get_expGolomb(); + DEBUG_PRINT(num_negative_pics); + prev_num_positive_pics = num_positive_pics; + num_positive_pics = bv.get_expGolomb(); + DEBUG_PRINT(num_positive_pics); + unsigned k; + for (k = 0; k < num_negative_pics; ++k) { + (void)bv.get_expGolomb(); // delta_poc_s0_minus1[k] + bv.skipBits(1); // used_by_curr_pic_s0_flag[k] + } + for (k = 0; k < num_positive_pics; ++k) { + (void)bv.get_expGolomb(); // delta_poc_s1_minus1[k] + bv.skipBits(1); // used_by_curr_pic_s1_flag[k] + } + } + } + Boolean long_term_ref_pics_present_flag = bv.get1BitBoolean(); + DEBUG_PRINT(long_term_ref_pics_present_flag); + if (long_term_ref_pics_present_flag) { + DEBUG_TAB; + unsigned num_long_term_ref_pics_sps = bv.get_expGolomb(); + DEBUG_PRINT(num_long_term_ref_pics_sps); + for (i = 0; i < num_long_term_ref_pics_sps; ++i) { + bv.skipBits(log2_max_pic_order_cnt_lsb_minus4); // lt_ref_pic_poc_lsb_sps[i] + bv.skipBits(1); // used_by_curr_pic_lt_sps_flag[1] + } + } + bv.skipBits(2); // sps_temporal_mvp_enabled_flag, strong_intra_smoothing_enabled_flag + Boolean vui_parameters_present_flag = bv.get1BitBoolean(); + DEBUG_PRINT(vui_parameters_present_flag); + if (vui_parameters_present_flag) { + DEBUG_TAB; + analyze_vui_parameters(bv, num_units_in_tick, time_scale); + } + Boolean sps_extension_flag = bv.get1BitBoolean(); + DEBUG_PRINT(sps_extension_flag); + } +} + +#define SEI_MAX_SIZE 5000 // larger than the largest possible SEI NAL unit + +#ifdef DEBUG +#define MAX_SEI_PAYLOAD_TYPE_DESCRIPTION_H264 46 +char const* sei_payloadType_description_h264[MAX_SEI_PAYLOAD_TYPE_DESCRIPTION_H264+1] = { + "buffering_period", //0 + "pic_timing", //1 + "pan_scan_rect", //2 + "filler_payload", //3 + "user_data_registered_itu_t_t35", //4 + "user_data_unregistered", //5 + "recovery_point", //6 + "dec_ref_pic_marking_repetition", //7 + "spare_pic", //8 + "scene_info", //9 + "sub_seq_info", //10 + "sub_seq_layer_characteristics", //11 + "sub_seq_characteristics", //12 + "full_frame_freeze", //13 + "full_frame_freeze_release", //14 + "full_frame_snapshot", //15 + "progressive_refinement_segment_start", //16 + "progressive_refinement_segment_end", //17 + "motion_constrained_slice_group_set", //18 + "film_grain_characteristics", //19 + "deblocking_filter_display_preference", //20 + "stereo_video_info", //21 + "post_filter_hint", //22 + "tone_mapping_info", //23 + "scalability_info", //24 + "sub_pic_scalable_layer", //25 + "non_required_layer_rep", //26 + "priority_layer_info", //27 + "layers_not_present", //28 + "layer_dependency_change", //29 + "scalable_nesting", //30 + "base_layer_temporal_hrd", //31 + "quality_layer_integrity_check", //32 + "redundant_pic_property", //33 + "tl0_dep_rep_index", //34 + "tl_switching_point", //35 + "parallel_decoding_info", //36 + "mvc_scalable_nesting", //37 + "view_scalability_info", //38 + "multiview_scene_info", //39 + "multiview_acquisition_info", //40 + "non_required_view_component", //41 + "view_dependency_change", //42 + "operation_points_not_present", //43 + "base_view_temporal_hrd", //44 + "frame_packing_arrangement", //45 + "reserved_sei_message" // 46 or higher +}; +#endif + +void H264or5VideoStreamParser::analyze_sei_data(u_int8_t nal_unit_type) { + // Begin by making a copy of the NAL unit data, removing any 'emulation prevention' bytes: + u_int8_t sei[SEI_MAX_SIZE]; + unsigned seiSize; + removeEmulationBytes(sei, sizeof sei, seiSize); + + unsigned j = 1; // skip the initial byte (forbidden_zero_bit; nal_ref_idc; nal_unit_type); we've already seen it + while (j < seiSize) { + unsigned payloadType = 0; + do { + payloadType += sei[j]; + } while (sei[j++] == 255 && j < seiSize); + if (j >= seiSize) break; + + unsigned payloadSize = 0; + do { + payloadSize += sei[j]; + } while (sei[j++] == 255 && j < seiSize); + if (j >= seiSize) break; + +#ifdef DEBUG + char const* description; + if (fHNumber == 264) { + unsigned descriptionNum = payloadType <= MAX_SEI_PAYLOAD_TYPE_DESCRIPTION_H264 + ? payloadType : MAX_SEI_PAYLOAD_TYPE_DESCRIPTION_H264; + description = sei_payloadType_description_h264[descriptionNum]; + } else { // 265 + description = + payloadType == 3 ? "filler_payload" : + payloadType == 4 ? "user_data_registered_itu_t_t35" : + payloadType == 5 ? "user_data_unregistered" : + payloadType == 17 ? "progressive_refinement_segment_end" : + payloadType == 22 ? "post_filter_hint" : + (payloadType == 132 && nal_unit_type == SUFFIX_SEI_NUT) ? "decoded_picture_hash" : + nal_unit_type == SUFFIX_SEI_NUT ? "reserved_sei_message" : + payloadType == 0 ? "buffering_period" : + payloadType == 1 ? "pic_timing" : + payloadType == 2 ? "pan_scan_rect" : + payloadType == 6 ? "recovery_point" : + payloadType == 9 ? "scene_info" : + payloadType == 15 ? "picture_snapshot" : + payloadType == 16 ? "progressive_refinement_segment_start" : + payloadType == 19 ? "film_grain_characteristics" : + payloadType == 23 ? "tone_mapping_info" : + payloadType == 45 ? "frame_packing_arrangement" : + payloadType == 47 ? "display_orientation" : + payloadType == 128 ? "structure_of_pictures_info" : + payloadType == 129 ? "active_parameter_sets" : + payloadType == 130 ? "decoding_unit_info" : + payloadType == 131 ? "temporal_sub_layer_zero_index" : + payloadType == 133 ? "scalable_nesting" : + payloadType == 134 ? "region_refresh_info" : "reserved_sei_message"; + } + fprintf(stderr, "\tpayloadType %d (\"%s\"); payloadSize %d\n", payloadType, description, payloadSize); +#endif + j += payloadSize; + } +} + +void H264or5VideoStreamParser::flushInput() { + fHaveSeenFirstStartCode = False; + fHaveSeenFirstByteOfNALUnit = False; + + StreamParser::flushInput(); +} + +#define NUM_NEXT_SLICE_HEADER_BYTES_TO_ANALYZE 12 + +unsigned H264or5VideoStreamParser::parse() { + try { + // The stream must start with a 0x00000001: + if (!fHaveSeenFirstStartCode) { + // Skip over any input bytes that precede the first 0x00000001: + u_int32_t first4Bytes; + while ((first4Bytes = test4Bytes()) != 0x00000001) { + get1Byte(); setParseState(); // ensures that we progress over bad data + } + skipBytes(4); // skip this initial code + + setParseState(); + fHaveSeenFirstStartCode = True; // from now on + } + + if (fOutputStartCodeSize > 0 && curFrameSize() == 0 && !haveSeenEOF()) { + // Include a start code in the output: + save4Bytes(0x00000001); + } + + // Then save everything up until the next 0x00000001 (4 bytes) or 0x000001 (3 bytes), or we hit EOF. + // Also make note of the first byte, because it contains the "nal_unit_type": + if (haveSeenEOF()) { + // We hit EOF the last time that we tried to parse this data, so we know that any remaining unparsed data + // forms a complete NAL unit, and that there's no 'start code' at the end: + unsigned remainingDataSize = totNumValidBytes() - curOffset(); +#ifdef DEBUG + unsigned const trailingNALUnitSize = remainingDataSize; +#endif + while (remainingDataSize > 0) { + u_int8_t nextByte = get1Byte(); + if (!fHaveSeenFirstByteOfNALUnit) { + fFirstByteOfNALUnit = nextByte; + fHaveSeenFirstByteOfNALUnit = True; + } + saveByte(nextByte); + --remainingDataSize; + } + +#ifdef DEBUG + if (fHNumber == 264) { + u_int8_t nal_ref_idc = (fFirstByteOfNALUnit&0x60)>>5; + u_int8_t nal_unit_type = fFirstByteOfNALUnit&0x1F; + fprintf(stderr, "Parsed trailing %d-byte NAL-unit (nal_ref_idc: %d, nal_unit_type: %d (\"%s\"))\n", + trailingNALUnitSize, nal_ref_idc, nal_unit_type, nal_unit_type_description_h264[nal_unit_type]); + } else { // 265 + u_int8_t nal_unit_type = (fFirstByteOfNALUnit&0x7E)>>1; + fprintf(stderr, "Parsed trailing %d-byte NAL-unit (nal_unit_type: %d (\"%s\"))\n", + trailingNALUnitSize, nal_unit_type, nal_unit_type_description_h265[nal_unit_type]); + } +#endif + + (void)get1Byte(); // forces another read, which will cause EOF to get handled for real this time + return 0; + } else { + u_int32_t next4Bytes = test4Bytes(); + if (!fHaveSeenFirstByteOfNALUnit) { + fFirstByteOfNALUnit = next4Bytes>>24; + fHaveSeenFirstByteOfNALUnit = True; + } + while (next4Bytes != 0x00000001 && (next4Bytes&0xFFFFFF00) != 0x00000100) { + // We save at least some of "next4Bytes". + if ((unsigned)(next4Bytes&0xFF) > 1) { + // Common case: 0x00000001 or 0x000001 definitely doesn't begin anywhere in "next4Bytes", so we save all of it: + save4Bytes(next4Bytes); + skipBytes(4); + } else { + // Save the first byte, and continue testing the rest: + saveByte(next4Bytes>>24); + skipBytes(1); + } + setParseState(); // ensures forward progress + next4Bytes = test4Bytes(); + } + // Assert: next4Bytes starts with 0x00000001 or 0x000001, and we've saved all previous bytes (forming a complete NAL unit). + // Skip over these remaining bytes, up until the start of the next NAL unit: + if (next4Bytes == 0x00000001) { + skipBytes(4); + } else { + skipBytes(3); + } + } + + fHaveSeenFirstByteOfNALUnit = False; // for the next NAL unit that we'll parse + u_int8_t nal_unit_type; + if (fHNumber == 264) { + nal_unit_type = fFirstByteOfNALUnit&0x1F; +#ifdef DEBUG + u_int8_t nal_ref_idc = (fFirstByteOfNALUnit&0x60)>>5; + fprintf(stderr, "Parsed %d-byte NAL-unit (nal_ref_idc: %d, nal_unit_type: %d (\"%s\"))\n", + curFrameSize()-fOutputStartCodeSize, nal_ref_idc, nal_unit_type, nal_unit_type_description_h264[nal_unit_type]); +#endif + } else { // 265 + nal_unit_type = (fFirstByteOfNALUnit&0x7E)>>1; +#ifdef DEBUG + fprintf(stderr, "Parsed %d-byte NAL-unit (nal_unit_type: %d (\"%s\"))\n", + curFrameSize()-fOutputStartCodeSize, nal_unit_type, nal_unit_type_description_h265[nal_unit_type]); +#endif + } + + // Now that we have found (& copied) a NAL unit, process it if it's of special interest to us: + if (isVPS(nal_unit_type)) { // Video parameter set + // First, save a copy of this NAL unit, in case the downstream object wants to see it: + usingSource()->saveCopyOfVPS(fStartOfFrame + fOutputStartCodeSize, curFrameSize() - fOutputStartCodeSize); + + if (fParsedFrameRate == 0.0) { + // We haven't yet parsed a frame rate from the stream. + // So parse this NAL unit to check whether frame rate information is present: + unsigned num_units_in_tick, time_scale; + analyze_video_parameter_set_data(num_units_in_tick, time_scale); + if (time_scale > 0 && num_units_in_tick > 0) { + usingSource()->fFrameRate = fParsedFrameRate = time_scale/(2.0*num_units_in_tick); +#ifdef DEBUG + fprintf(stderr, "Set frame rate to %f fps\n", usingSource()->fFrameRate); +#endif + } else { +#ifdef DEBUG + fprintf(stderr, "\tThis \"Video Parameter Set\" NAL unit contained no frame rate information, so we use a default frame rate of %f fps\n", usingSource()->fFrameRate); +#endif + } + } + } else if (isSPS(nal_unit_type)) { // Sequence parameter set + // First, save a copy of this NAL unit, in case the downstream object wants to see it: + usingSource()->saveCopyOfSPS(fStartOfFrame + fOutputStartCodeSize, curFrameSize() - fOutputStartCodeSize); + + if (fParsedFrameRate == 0.0) { + // We haven't yet parsed a frame rate from the stream. + // So parse this NAL unit to check whether frame rate information is present: + unsigned num_units_in_tick, time_scale; + analyze_seq_parameter_set_data(num_units_in_tick, time_scale); + if (time_scale > 0 && num_units_in_tick > 0) { + usingSource()->fFrameRate = fParsedFrameRate = time_scale/(2.0*num_units_in_tick); +#ifdef DEBUG + fprintf(stderr, "Set frame rate to %f fps\n", usingSource()->fFrameRate); +#endif + } else { +#ifdef DEBUG + fprintf(stderr, "\tThis \"Sequence Parameter Set\" NAL unit contained no frame rate information, so we use a default frame rate of %f fps\n", usingSource()->fFrameRate); +#endif + } + } + } else if (isPPS(nal_unit_type)) { // Picture parameter set + // Save a copy of this NAL unit, in case the downstream object wants to see it: + usingSource()->saveCopyOfPPS(fStartOfFrame + fOutputStartCodeSize, curFrameSize() - fOutputStartCodeSize); + } else if (isSEI(nal_unit_type)) { // Supplemental enhancement information (SEI) + analyze_sei_data(nal_unit_type); + // Later, perhaps adjust "fPresentationTime" if we saw a "pic_timing" SEI payload??? ##### + } + + usingSource()->setPresentationTime(); +#ifdef DEBUG + unsigned long secs = (unsigned long)usingSource()->fPresentationTime.tv_sec; + unsigned uSecs = (unsigned)usingSource()->fPresentationTime.tv_usec; + fprintf(stderr, "\tPresentation time: %lu.%06u\n", secs, uSecs); +#endif + + // Now, check whether this NAL unit ends an 'access unit'. + // (RTP streamers need to know this in order to figure out whether or not to set the "M" bit.) + Boolean thisNALUnitEndsAccessUnit; + if (haveSeenEOF() || isEOF(nal_unit_type)) { + // There is no next NAL unit, so we assume that this one ends the current 'access unit': + thisNALUnitEndsAccessUnit = True; + } else if (usuallyBeginsAccessUnit(nal_unit_type)) { + // These NAL units usually *begin* an access unit, so assume that they don't end one here: + thisNALUnitEndsAccessUnit = False; + } else { + // We need to check the *next* NAL unit to figure out whether + // the current NAL unit ends an 'access unit': + u_int8_t firstBytesOfNextNALUnit[3]; + testBytes(firstBytesOfNextNALUnit, 3); + + u_int8_t const& next_nal_unit_type = fHNumber == 264 + ? (firstBytesOfNextNALUnit[0]&0x1F) : ((firstBytesOfNextNALUnit[0]&0x7E)>>1); + if (isVCL(next_nal_unit_type)) { + // The high-order bit of the byte after the "nal_unit_header" tells us whether it's + // the start of a new 'access unit' (and thus the current NAL unit ends an 'access unit'): + u_int8_t const byteAfter_nal_unit_header + = fHNumber == 264 ? firstBytesOfNextNALUnit[1] : firstBytesOfNextNALUnit[2]; + thisNALUnitEndsAccessUnit = (byteAfter_nal_unit_header&0x80) != 0; + } else if (usuallyBeginsAccessUnit(next_nal_unit_type)) { + // The next NAL unit's type is one that usually appears at the start of an 'access unit', + // so we assume that the current NAL unit ends an 'access unit': + thisNALUnitEndsAccessUnit = True; + } else { + // The next NAL unit definitely doesn't start a new 'access unit', + // which means that the current NAL unit doesn't end one: + thisNALUnitEndsAccessUnit = False; + } + } + + if (thisNALUnitEndsAccessUnit) { +#ifdef DEBUG + fprintf(stderr, "*****This NAL unit ends the current access unit*****\n"); +#endif + usingSource()->fPictureEndMarker = True; + ++usingSource()->fPictureCount; + + // Note that the presentation time for the next NAL unit will be different: + struct timeval& nextPT = usingSource()->fNextPresentationTime; // alias + nextPT = usingSource()->fPresentationTime; + double nextFraction = nextPT.tv_usec/1000000.0 + 1/usingSource()->fFrameRate; + unsigned nextSecsIncrement = (long)nextFraction; + nextPT.tv_sec += (long)nextSecsIncrement; + nextPT.tv_usec = (long)((nextFraction - nextSecsIncrement)*1000000); + } + setParseState(); + + return curFrameSize(); + } catch (int /*e*/) { +#ifdef DEBUG + fprintf(stderr, "H264or5VideoStreamParser::parse() EXCEPTION (This is normal behavior - *not* an error)\n"); +#endif + return 0; // the parsing got interrupted + } +} + +unsigned removeH264or5EmulationBytes(u_int8_t* to, unsigned toMaxSize, + u_int8_t* from, unsigned fromSize) { + unsigned toSize = 0; + unsigned i = 0; + while (i < fromSize && toSize+1 < toMaxSize) { + if (i+2 < fromSize && from[i] == 0 && from[i+1] == 0 && from[i+2] == 3) { + to[toSize] = to[toSize+1] = 0; + toSize += 2; + i += 3; + } else { + to[toSize] = from[i]; + toSize += 1; + i += 1; + } + } + + return toSize; +} diff --git a/AnyCore/lib_rtsp/liveMedia/H265VideoFileServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/H265VideoFileServerMediaSubsession.cpp new file mode 100644 index 0000000..62a3bdd --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H265VideoFileServerMediaSubsession.cpp @@ -0,0 +1,120 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a H265 video file. +// Implementation + +#include "H265VideoFileServerMediaSubsession.hh" +#include "H265VideoRTPSink.hh" +#include "ByteStreamFileSource.hh" +#include "H265VideoStreamFramer.hh" + +H265VideoFileServerMediaSubsession* +H265VideoFileServerMediaSubsession::createNew(UsageEnvironment& env, + char const* fileName, + Boolean reuseFirstSource) { + return new H265VideoFileServerMediaSubsession(env, fileName, reuseFirstSource); +} + +H265VideoFileServerMediaSubsession::H265VideoFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, Boolean reuseFirstSource) + : FileServerMediaSubsession(env, fileName, reuseFirstSource), + fAuxSDPLine(NULL), fDoneFlag(0), fDummyRTPSink(NULL) { +} + +H265VideoFileServerMediaSubsession::~H265VideoFileServerMediaSubsession() { + delete[] fAuxSDPLine; +} + +static void afterPlayingDummy(void* clientData) { + H265VideoFileServerMediaSubsession* subsess = (H265VideoFileServerMediaSubsession*)clientData; + subsess->afterPlayingDummy1(); +} + +void H265VideoFileServerMediaSubsession::afterPlayingDummy1() { + // Unschedule any pending 'checking' task: + envir().taskScheduler().unscheduleDelayedTask(nextTask()); + // Signal the event loop that we're done: + setDoneFlag(); +} + +static void checkForAuxSDPLine(void* clientData) { + H265VideoFileServerMediaSubsession* subsess = (H265VideoFileServerMediaSubsession*)clientData; + subsess->checkForAuxSDPLine1(); +} + +void H265VideoFileServerMediaSubsession::checkForAuxSDPLine1() { + char const* dasl; + + if (fAuxSDPLine != NULL) { + // Signal the event loop that we're done: + setDoneFlag(); + } else if (fDummyRTPSink != NULL && (dasl = fDummyRTPSink->auxSDPLine()) != NULL) { + fAuxSDPLine = strDup(dasl); + fDummyRTPSink = NULL; + + // Signal the event loop that we're done: + setDoneFlag(); + } else if (!fDoneFlag) { + // try again after a brief delay: + int uSecsToDelay = 100000; // 100 ms + nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay, + (TaskFunc*)checkForAuxSDPLine, this); + } +} + +char const* H265VideoFileServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource) { + if (fAuxSDPLine != NULL) return fAuxSDPLine; // it's already been set up (for a previous client) + + if (fDummyRTPSink == NULL) { // we're not already setting it up for another, concurrent stream + // Note: For H265 video files, the 'config' information (used for several payload-format + // specific parameters in the SDP description) isn't known until we start reading the file. + // This means that "rtpSink"s "auxSDPLine()" will be NULL initially, + // and we need to start reading data from our file until this changes. + fDummyRTPSink = rtpSink; + + // Start reading the file: + fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this); + + // Check whether the sink's 'auxSDPLine()' is ready: + checkForAuxSDPLine(this); + } + + envir().taskScheduler().doEventLoop(&fDoneFlag); + + return fAuxSDPLine; +} + +FramedSource* H265VideoFileServerMediaSubsession::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) { + estBitrate = 500; // kbps, estimate + + // Create the video source: + ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(envir(), fFileName); + if (fileSource == NULL) return NULL; + fFileSize = fileSource->fileSize(); + + // Create a framer for the Video Elementary Stream: + return H265VideoStreamFramer::createNew(envir(), fileSource); +} + +RTPSink* H265VideoFileServerMediaSubsession +::createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* /*inputSource*/) { + return H265VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); +} diff --git a/AnyCore/lib_rtsp/liveMedia/H265VideoFileSink.cpp b/AnyCore/lib_rtsp/liveMedia/H265VideoFileSink.cpp new file mode 100644 index 0000000..1f01fcc --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H265VideoFileSink.cpp @@ -0,0 +1,63 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// H.265 Video File sinks +// Implementation + +#include "H265VideoFileSink.hh" +#include "OutputFile.hh" + +////////// H265VideoFileSink ////////// + +H265VideoFileSink +::H265VideoFileSink(UsageEnvironment& env, FILE* fid, + char const* sPropVPSStr, + char const* sPropSPSStr, + char const* sPropPPSStr, + unsigned bufferSize, char const* perFrameFileNamePrefix) + : H264or5VideoFileSink(env, fid, bufferSize, perFrameFileNamePrefix, + sPropVPSStr, sPropSPSStr, sPropPPSStr) { +} + +H265VideoFileSink::~H265VideoFileSink() { +} + +H265VideoFileSink* +H265VideoFileSink::createNew(UsageEnvironment& env, char const* fileName, + char const* sPropVPSStr, + char const* sPropSPSStr, + char const* sPropPPSStr, + unsigned bufferSize, Boolean oneFilePerFrame) { + do { + FILE* fid; + char const* perFrameFileNamePrefix; + if (oneFilePerFrame) { + // Create the fid for each frame + fid = NULL; + perFrameFileNamePrefix = fileName; + } else { + // Normal case: create the fid once + fid = OpenOutputFile(env, fileName); + if (fid == NULL) break; + perFrameFileNamePrefix = NULL; + } + + return new H265VideoFileSink(env, fid, sPropVPSStr, sPropSPSStr, sPropPPSStr, bufferSize, perFrameFileNamePrefix); + } while (0); + + return NULL; +} diff --git a/AnyCore/lib_rtsp/liveMedia/H265VideoRTPSink.cpp b/AnyCore/lib_rtsp/liveMedia/H265VideoRTPSink.cpp new file mode 100644 index 0000000..a621302 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H265VideoRTPSink.cpp @@ -0,0 +1,182 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for H.265 video +// Implementation + +#include "H265VideoRTPSink.hh" +#include "H265VideoStreamFramer.hh" +#include "Base64.hh" +#include "BitVector.hh" +#include "H264VideoRTPSource.hh" // for "parseSPropParameterSets()" + +////////// H265VideoRTPSink implementation ////////// + +H265VideoRTPSink +::H265VideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, + u_int8_t const* vps, unsigned vpsSize, + u_int8_t const* sps, unsigned spsSize, + u_int8_t const* pps, unsigned ppsSize) + : H264or5VideoRTPSink(265, env, RTPgs, rtpPayloadFormat, + vps, vpsSize, sps, spsSize, pps, ppsSize) { +} + +H265VideoRTPSink::~H265VideoRTPSink() { +} + +H265VideoRTPSink* H265VideoRTPSink +::createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat) { + return new H265VideoRTPSink(env, RTPgs, rtpPayloadFormat); +} + +H265VideoRTPSink* H265VideoRTPSink +::createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, + u_int8_t const* vps, unsigned vpsSize, + u_int8_t const* sps, unsigned spsSize, + u_int8_t const* pps, unsigned ppsSize) { + return new H265VideoRTPSink(env, RTPgs, rtpPayloadFormat, + vps, vpsSize, sps, spsSize, pps, ppsSize); +} + +H265VideoRTPSink* H265VideoRTPSink +::createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, + char const* sPropVPSStr, char const* sPropSPSStr, char const* sPropPPSStr) { + u_int8_t* vps = NULL; unsigned vpsSize = 0; + u_int8_t* sps = NULL; unsigned spsSize = 0; + u_int8_t* pps = NULL; unsigned ppsSize = 0; + + // Parse each 'sProp' string, extracting and then classifying the NAL unit(s) from each one. + // We're 'liberal in what we accept'; it's OK if the strings don't contain the NAL unit type + // implied by their names (or if one or more of the strings encode multiple NAL units). + SPropRecord* sPropRecords[3]; + unsigned numSPropRecords[3]; + sPropRecords[0] = parseSPropParameterSets(sPropVPSStr, numSPropRecords[0]); + sPropRecords[1] = parseSPropParameterSets(sPropSPSStr, numSPropRecords[1]); + sPropRecords[2] = parseSPropParameterSets(sPropPPSStr, numSPropRecords[2]); + + for (unsigned j = 0; j < 3; ++j) { + SPropRecord* records = sPropRecords[j]; + unsigned numRecords = numSPropRecords[j]; + + for (unsigned i = 0; i < numRecords; ++i) { + if (records[i].sPropLength == 0) continue; // bad data + u_int8_t nal_unit_type = ((records[i].sPropBytes[0])&0x7E)>>1; + if (nal_unit_type == 32/*VPS*/) { + vps = records[i].sPropBytes; + vpsSize = records[i].sPropLength; + } else if (nal_unit_type == 33/*SPS*/) { + sps = records[i].sPropBytes; + spsSize = records[i].sPropLength; + } else if (nal_unit_type == 34/*PPS*/) { + pps = records[i].sPropBytes; + ppsSize = records[i].sPropLength; + } + } + } + + H265VideoRTPSink* result = new H265VideoRTPSink(env, RTPgs, rtpPayloadFormat, + vps, vpsSize, sps, spsSize, pps, ppsSize); + delete[] sPropRecords[0]; delete[] sPropRecords[1]; delete[] sPropRecords[2]; + + return result; +} + +Boolean H265VideoRTPSink::sourceIsCompatibleWithUs(MediaSource& source) { + // Our source must be an appropriate framer: + return source.isH265VideoStreamFramer(); +} + +char const* H265VideoRTPSink::auxSDPLine() { + // Generate a new "a=fmtp:" line each time, using our VPS, SPS and PPS (if we have them), + // otherwise parameters from our framer source (in case they've changed since the last time that + // we were called): + H264or5VideoStreamFramer* framerSource = NULL; + u_int8_t* vps = fVPS; unsigned vpsSize = fVPSSize; + u_int8_t* sps = fSPS; unsigned spsSize = fSPSSize; + u_int8_t* pps = fPPS; unsigned ppsSize = fPPSSize; + if (vps == NULL || sps == NULL || pps == NULL) { + // We need to get VPS, SPS and PPS from our framer source: + if (fOurFragmenter == NULL) return NULL; // we don't yet have a fragmenter (and therefore not a source) + framerSource = (H264or5VideoStreamFramer*)(fOurFragmenter->inputSource()); + if (framerSource == NULL) return NULL; // we don't yet have a source + + framerSource->getVPSandSPSandPPS(vps, vpsSize, sps, spsSize, pps, ppsSize); + if (vps == NULL || sps == NULL || pps == NULL) { + return NULL; // our source isn't ready + } + } + + // Set up the "a=fmtp:" SDP line for this stream. + u_int8_t* vpsWEB = new u_int8_t[vpsSize]; // "WEB" means "Without Emulation Bytes" + unsigned vpsWEBSize = removeH264or5EmulationBytes(vpsWEB, vpsSize, vps, vpsSize); + if (vpsWEBSize < 6/*'profile_tier_level' offset*/ + 12/*num 'profile_tier_level' bytes*/) { + // Bad VPS size => assume our source isn't ready + delete[] vpsWEB; + return NULL; + } + u_int8_t const* profileTierLevelHeaderBytes = &vpsWEB[6]; + unsigned profileSpace = profileTierLevelHeaderBytes[0]>>6; // general_profile_space + unsigned profileId = profileTierLevelHeaderBytes[0]&0x1F; // general_profile_idc + unsigned tierFlag = (profileTierLevelHeaderBytes[0]>>5)&0x1; // general_tier_flag + unsigned levelId = profileTierLevelHeaderBytes[11]; // general_level_idc + u_int8_t const* interop_constraints = &profileTierLevelHeaderBytes[5]; + char interopConstraintsStr[100]; + sprintf(interopConstraintsStr, "%02X%02X%02X%02X%02X%02X", + interop_constraints[0], interop_constraints[1], interop_constraints[2], + interop_constraints[3], interop_constraints[4], interop_constraints[5]); + delete[] vpsWEB; + + char* sprop_vps = base64Encode((char*)vps, vpsSize); + char* sprop_sps = base64Encode((char*)sps, spsSize); + char* sprop_pps = base64Encode((char*)pps, ppsSize); + + char const* fmtpFmt = + "a=fmtp:%d profile-space=%u" + ";profile-id=%u" + ";tier-flag=%u" + ";level-id=%u" + ";interop-constraints=%s" + ";sprop-vps=%s" + ";sprop-sps=%s" + ";sprop-pps=%s\r\n"; + unsigned fmtpFmtSize = strlen(fmtpFmt) + + 3 /* max num chars: rtpPayloadType */ + 20 /* max num chars: profile_space */ + + 20 /* max num chars: profile_id */ + + 20 /* max num chars: tier_flag */ + + 20 /* max num chars: level_id */ + + strlen(interopConstraintsStr) + + strlen(sprop_vps) + + strlen(sprop_sps) + + strlen(sprop_pps); + char* fmtp = new char[fmtpFmtSize]; + sprintf(fmtp, fmtpFmt, + rtpPayloadType(), profileSpace, + profileId, + tierFlag, + levelId, + interopConstraintsStr, + sprop_vps, + sprop_sps, + sprop_pps); + + delete[] sprop_vps; + delete[] sprop_sps; + delete[] sprop_pps; + + delete[] fFmtpSDPLine; fFmtpSDPLine = fmtp; + return fFmtpSDPLine; +} diff --git a/AnyCore/lib_rtsp/liveMedia/H265VideoRTPSource.cpp b/AnyCore/lib_rtsp/liveMedia/H265VideoRTPSource.cpp new file mode 100644 index 0000000..d6a497c --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H265VideoRTPSource.cpp @@ -0,0 +1,218 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// H.265 Video RTP Sources +// Implementation + +#include "H265VideoRTPSource.hh" + +////////// H265BufferedPacket and H265BufferedPacketFactory ////////// + +class H265BufferedPacket: public BufferedPacket { +public: + H265BufferedPacket(H265VideoRTPSource& ourSource); + virtual ~H265BufferedPacket(); + +private: // redefined virtual functions + virtual unsigned nextEnclosedFrameSize(unsigned char*& framePtr, + unsigned dataSize); +private: + H265VideoRTPSource& fOurSource; +}; + +class H265BufferedPacketFactory: public BufferedPacketFactory { +private: // redefined virtual functions + virtual BufferedPacket* createNewPacket(MultiFramedRTPSource* ourSource); +}; + + +///////// H265VideoRTPSource implementation //////// + +H265VideoRTPSource* +H265VideoRTPSource::createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + Boolean expectDONFields, + unsigned rtpTimestampFrequency) { + return new H265VideoRTPSource(env, RTPgs, rtpPayloadFormat, + expectDONFields, rtpTimestampFrequency); +} + +H265VideoRTPSource +::H265VideoRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + Boolean expectDONFields, + unsigned rtpTimestampFrequency) + : MultiFramedRTPSource(env, RTPgs, rtpPayloadFormat, rtpTimestampFrequency, + new H265BufferedPacketFactory), + fExpectDONFields(expectDONFields), + fPreviousNALUnitDON(0), fCurrentNALUnitAbsDon((u_int64_t)(~0)) { +} + +H265VideoRTPSource::~H265VideoRTPSource() { +} + +Boolean H265VideoRTPSource +::processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize) { + unsigned char* headerStart = packet->data(); + unsigned packetSize = packet->dataSize(); + u_int16_t DONL = 0; + unsigned numBytesToSkip; + + // Check the Payload Header's 'nal_unit_type' for special aggregation or fragmentation packets: + if (packetSize < 2) return False; + fCurPacketNALUnitType = (headerStart[0]&0x7E)>>1; + switch (fCurPacketNALUnitType) { + case 48: { // Aggregation Packet (AP) + // We skip over the 2-byte Payload Header, and the DONL header (if any). + if (fExpectDONFields) { + if (packetSize < 4) return False; + DONL = (headerStart[2]<<8)|headerStart[3]; + numBytesToSkip = 4; + } else { + numBytesToSkip = 2; + } + break; + } + case 49: { // Fragmentation Unit (FU) + // This NALU begins with the 2-byte Payload Header, the 1-byte FU header, and (optionally) + // the 2-byte DONL header. + // If the start bit is set, we reconstruct the original NAL header at the end of these + // 3 (or 5) bytes, and skip over the first 1 (or 3) bytes. + if (packetSize < 3) return False; + u_int8_t startBit = headerStart[2]&0x80; // from the FU header + u_int8_t endBit = headerStart[2]&0x40; // from the FU header + if (startBit) { + fCurrentPacketBeginsFrame = True; + + u_int8_t nal_unit_type = headerStart[2]&0x3F; // the last 6 bits of the FU header + u_int8_t newNALHeader[2]; + newNALHeader[0] = (headerStart[0]&0x81)|(nal_unit_type<<1); + newNALHeader[1] = headerStart[1]; + + if (fExpectDONFields) { + if (packetSize < 5) return False; + DONL = (headerStart[3]<<8)|headerStart[4]; + headerStart[3] = newNALHeader[0]; + headerStart[4] = newNALHeader[1]; + numBytesToSkip = 3; + } else { + headerStart[1] = newNALHeader[0]; + headerStart[2] = newNALHeader[1]; + numBytesToSkip = 1; + } + } else { + // The start bit is not set, so we skip over all headers: + fCurrentPacketBeginsFrame = False; + if (fExpectDONFields) { + if (packetSize < 5) return False; + DONL = (headerStart[3]<<8)|headerStart[4]; + numBytesToSkip = 5; + } else { + numBytesToSkip = 3; + } + } + fCurrentPacketCompletesFrame = (endBit != 0); + break; + } + default: { + // This packet contains one complete NAL unit: + fCurrentPacketBeginsFrame = fCurrentPacketCompletesFrame = True; + numBytesToSkip = 0; + break; + } + } + + computeAbsDonFromDON(DONL); + resultSpecialHeaderSize = numBytesToSkip; + return True; +} + +char const* H265VideoRTPSource::MIMEtype() const { + return "video/H265"; +} + +void H265VideoRTPSource::computeAbsDonFromDON(u_int16_t DON) { + if (!fExpectDONFields) { + // Without DON fields in the input stream, we just increment our "AbsDon" count each time: + ++fCurrentNALUnitAbsDon; + } else { + if (fCurrentNALUnitAbsDon == (u_int64_t)(~0)) { + // This is the very first NAL unit, so "AbsDon" is just "DON": + fCurrentNALUnitAbsDon = (u_int64_t)DON; + } else { + // Use the previous NAL unit's DON and the current DON to compute "AbsDon": + // AbsDon[n] = AbsDon[n-1] + (DON[n] - DON[n-1]) mod 2^16 + short signedDiff16 = (short)(DON - fPreviousNALUnitDON); + int64_t signedDiff64 = (int64_t)signedDiff16; + fCurrentNALUnitAbsDon += signedDiff64; + } + + fPreviousNALUnitDON = DON; // for next time + } +} + + +////////// H265BufferedPacket and H265BufferedPacketFactory implementation ////////// + +H265BufferedPacket::H265BufferedPacket(H265VideoRTPSource& ourSource) + : fOurSource(ourSource) { +} + +H265BufferedPacket::~H265BufferedPacket() { +} + +unsigned H265BufferedPacket +::nextEnclosedFrameSize(unsigned char*& framePtr, unsigned dataSize) { + unsigned resultNALUSize = 0; // if an error occurs + + switch (fOurSource.fCurPacketNALUnitType) { + case 48: { // Aggregation Packet (AP) + if (useCount() > 0) { + // We're other than the first NAL unit inside this Aggregation Packet. + // Update our 'decoding order number': + u_int16_t DONL = 0; + if (fOurSource.fExpectDONFields) { + // There's a 1-byte DOND field next: + if (dataSize < 1) break; + u_int8_t DOND = framePtr[0]; + DONL = fOurSource.fPreviousNALUnitDON + (u_int16_t)(DOND + 1); + ++framePtr; + --dataSize; + } + fOurSource.computeAbsDonFromDON(DONL); + } + + // The next 2 bytes are the NAL unit size: + if (dataSize < 2) break; + resultNALUSize = (framePtr[0]<<8)|framePtr[1]; + framePtr += 2; + break; + } + default: { + // Common case: We use the entire packet data: + return dataSize; + } + } + + return (resultNALUSize <= dataSize) ? resultNALUSize : dataSize; +} + +BufferedPacket* H265BufferedPacketFactory +::createNewPacket(MultiFramedRTPSource* ourSource) { + return new H265BufferedPacket((H265VideoRTPSource&)(*ourSource)); +} diff --git a/AnyCore/lib_rtsp/liveMedia/H265VideoStreamDiscreteFramer.cpp b/AnyCore/lib_rtsp/liveMedia/H265VideoStreamDiscreteFramer.cpp new file mode 100644 index 0000000..3611e2b --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H265VideoStreamDiscreteFramer.cpp @@ -0,0 +1,41 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A simplified version of "H265VideoStreamFramer" that takes only complete, +// discrete frames (rather than an arbitrary byte stream) as input. +// This avoids the parsing and data copying overhead of the full +// "H265VideoStreamFramer". +// Implementation + +#include "H265VideoStreamDiscreteFramer.hh" + +H265VideoStreamDiscreteFramer* +H265VideoStreamDiscreteFramer::createNew(UsageEnvironment& env, FramedSource* inputSource) { + return new H265VideoStreamDiscreteFramer(env, inputSource); +} + +H265VideoStreamDiscreteFramer +::H265VideoStreamDiscreteFramer(UsageEnvironment& env, FramedSource* inputSource) + : H264or5VideoStreamDiscreteFramer(265, env, inputSource) { +} + +H265VideoStreamDiscreteFramer::~H265VideoStreamDiscreteFramer() { +} + +Boolean H265VideoStreamDiscreteFramer::isH265VideoStreamFramer() const { + return True; +} diff --git a/AnyCore/lib_rtsp/liveMedia/H265VideoStreamFramer.cpp b/AnyCore/lib_rtsp/liveMedia/H265VideoStreamFramer.cpp new file mode 100644 index 0000000..f69f0ec --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/H265VideoStreamFramer.cpp @@ -0,0 +1,38 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up a H.265 Video Elementary Stream into NAL units. +// Implementation + +#include "H265VideoStreamFramer.hh" + +H265VideoStreamFramer* H265VideoStreamFramer +::createNew(UsageEnvironment& env, FramedSource* inputSource, Boolean includeStartCodeInOutput) { + return new H265VideoStreamFramer(env, inputSource, True, includeStartCodeInOutput); +} + +H265VideoStreamFramer +::H265VideoStreamFramer(UsageEnvironment& env, FramedSource* inputSource, Boolean createParser, Boolean includeStartCodeInOutput) + : H264or5VideoStreamFramer(265, env, inputSource, createParser, includeStartCodeInOutput) { +} + +H265VideoStreamFramer::~H265VideoStreamFramer() { +} + +Boolean H265VideoStreamFramer::isH265VideoStreamFramer() const { + return True; +} diff --git a/AnyCore/lib_rtsp/liveMedia/InputFile.cpp b/AnyCore/lib_rtsp/liveMedia/InputFile.cpp new file mode 100644 index 0000000..96ee20c --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/InputFile.cpp @@ -0,0 +1,112 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Common routines for opening/closing named input files +// Implementation + +#include "InputFile.hh" +#include + +FILE* OpenInputFile(UsageEnvironment& env, char const* fileName) { + FILE* fid; + + // Check for a special case file name: "stdin" + if (strcmp(fileName, "stdin") == 0) { + fid = stdin; +#if (defined(__WIN32__) || defined(_WIN32)) && !defined(_WIN32_WCE) + _setmode(_fileno(stdin), _O_BINARY); // convert to binary mode +#endif + } else { + fid = fopen(fileName, "rb"); + if (fid == NULL) { + env.setResultMsg("unable to open file \"",fileName, "\""); + } + } + + return fid; +} + +void CloseInputFile(FILE* fid) { + // Don't close 'stdin', in case we want to use it again later. + if (fid != NULL && fid != stdin) fclose(fid); +} + +u_int64_t GetFileSize(char const* fileName, FILE* fid) { + u_int64_t fileSize = 0; // by default + + if (fid != stdin) { +#if !defined(_WIN32_WCE) + if (fileName == NULL) { +#endif + if (fid != NULL && SeekFile64(fid, 0, SEEK_END) >= 0) { + fileSize = (u_int64_t)TellFile64(fid); + if (fileSize == (u_int64_t)-1) fileSize = 0; // TellFile64() failed + SeekFile64(fid, 0, SEEK_SET); + } +#if !defined(_WIN32_WCE) + } else { + struct stat sb; + if (stat(fileName, &sb) == 0) { + fileSize = sb.st_size; + } + } +#endif + } + + return fileSize; +} + +int64_t SeekFile64(FILE *fid, int64_t offset, int whence) { + if (fid == NULL) return -1; + + clearerr(fid); + fflush(fid); +#if (defined(__WIN32__) || defined(_WIN32)) && !defined(_WIN32_WCE) + return _lseeki64(_fileno(fid), offset, whence) == (int64_t)-1 ? -1 : 0; +#else +#if defined(_WIN32_WCE) + return fseek(fid, (long)(offset), whence); +#else + return fseeko(fid, (off_t)(offset), whence); +#endif +#endif +} + +int64_t TellFile64(FILE *fid) { + if (fid == NULL) return -1; + + clearerr(fid); + fflush(fid); +#if (defined(__WIN32__) || defined(_WIN32)) && !defined(_WIN32_WCE) + return _telli64(_fileno(fid)); +#else +#if defined(_WIN32_WCE) + return ftell(fid); +#else + return ftello(fid); +#endif +#endif +} + +Boolean FileIsSeekable(FILE *fid) { + if (SeekFile64(fid, 1, SEEK_CUR) < 0) { + return False; + } + + SeekFile64(fid, -1, SEEK_CUR); // seek back to where we were + return True; +} diff --git a/AnyCore/lib_rtsp/liveMedia/JPEGVideoRTPSink.cpp b/AnyCore/lib_rtsp/liveMedia/JPEGVideoRTPSink.cpp new file mode 100644 index 0000000..60b694f --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/JPEGVideoRTPSink.cpp @@ -0,0 +1,145 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for JPEG video (RFC 2435) +// Implementation + +#include "JPEGVideoRTPSink.hh" +#include "JPEGVideoSource.hh" + +JPEGVideoRTPSink +::JPEGVideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs) + : VideoRTPSink(env, RTPgs, 26, 90000, "JPEG") { +} + +JPEGVideoRTPSink::~JPEGVideoRTPSink() { +} + +JPEGVideoRTPSink* +JPEGVideoRTPSink::createNew(UsageEnvironment& env, Groupsock* RTPgs) { + return new JPEGVideoRTPSink(env, RTPgs); +} + +Boolean JPEGVideoRTPSink::sourceIsCompatibleWithUs(MediaSource& source) { + return source.isJPEGVideoSource(); +} + +Boolean JPEGVideoRTPSink +::frameCanAppearAfterPacketStart(unsigned char const* /*frameStart*/, + unsigned /*numBytesInFrame*/) const { + // A packet can contain only one frame + return False; +} + +void JPEGVideoRTPSink +::doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* /*frameStart*/, + unsigned /*numBytesInFrame*/, + struct timeval framePresentationTime, + unsigned numRemainingBytes) { + // Our source is known to be a JPEGVideoSource + JPEGVideoSource* source = (JPEGVideoSource*)fSource; + if (source == NULL) return; // sanity check + + u_int8_t mainJPEGHeader[8]; // the special header + u_int8_t const type = source->type(); + + mainJPEGHeader[0] = 0; // Type-specific + mainJPEGHeader[1] = fragmentationOffset >> 16; + mainJPEGHeader[2] = fragmentationOffset >> 8; + mainJPEGHeader[3] = fragmentationOffset; + mainJPEGHeader[4] = type; + mainJPEGHeader[5] = source->qFactor(); + mainJPEGHeader[6] = source->width(); + mainJPEGHeader[7] = source->height(); + setSpecialHeaderBytes(mainJPEGHeader, sizeof mainJPEGHeader); + + unsigned restartMarkerHeaderSize = 0; // by default + if (type >= 64 && type <= 127) { + // There is also a Restart Marker Header: + restartMarkerHeaderSize = 4; + u_int16_t const restartInterval = source->restartInterval(); // should be non-zero + + u_int8_t restartMarkerHeader[4]; + restartMarkerHeader[0] = restartInterval>>8; + restartMarkerHeader[1] = restartInterval&0xFF; + restartMarkerHeader[2] = restartMarkerHeader[3] = 0xFF; // F=L=1; Restart Count = 0x3FFF + + setSpecialHeaderBytes(restartMarkerHeader, restartMarkerHeaderSize, + sizeof mainJPEGHeader/* start position */); + } + + if (fragmentationOffset == 0 && source->qFactor() >= 128) { + // There is also a Quantization Header: + u_int8_t precision; + u_int16_t length; + u_int8_t const* quantizationTables + = source->quantizationTables(precision, length); + + unsigned const quantizationHeaderSize = 4 + length; + u_int8_t* quantizationHeader = new u_int8_t[quantizationHeaderSize]; + + quantizationHeader[0] = 0; // MBZ + quantizationHeader[1] = precision; + quantizationHeader[2] = length >> 8; + quantizationHeader[3] = length&0xFF; + if (quantizationTables != NULL) { // sanity check + for (u_int16_t i = 0; i < length; ++i) { + quantizationHeader[4+i] = quantizationTables[i]; + } + } + + setSpecialHeaderBytes(quantizationHeader, quantizationHeaderSize, + sizeof mainJPEGHeader + restartMarkerHeaderSize/* start position */); + delete[] quantizationHeader; + } + + if (numRemainingBytes == 0) { + // This packet contains the last (or only) fragment of the frame. + // Set the RTP 'M' ('marker') bit: + setMarkerBit(); + } + + // Also set the RTP timestamp: + setTimestamp(framePresentationTime); +} + + +unsigned JPEGVideoRTPSink::specialHeaderSize() const { + // Our source is known to be a JPEGVideoSource + JPEGVideoSource* source = (JPEGVideoSource*)fSource; + if (source == NULL) return 0; // sanity check + + unsigned headerSize = 8; // by default + + u_int8_t const type = source->type(); + if (type >= 64 && type <= 127) { + // There is also a Restart Marker Header: + headerSize += 4; + } + + if (curFragmentationOffset() == 0 && source->qFactor() >= 128) { + // There is also a Quantization Header: + u_int8_t dummy; + u_int16_t quantizationTablesSize; + (void)(source->quantizationTables(dummy, quantizationTablesSize)); + + headerSize += 4 + quantizationTablesSize; + } + + return headerSize; +} diff --git a/AnyCore/lib_rtsp/liveMedia/JPEGVideoRTPSource.cpp b/AnyCore/lib_rtsp/liveMedia/JPEGVideoRTPSource.cpp new file mode 100644 index 0000000..8b4dde3 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/JPEGVideoRTPSource.cpp @@ -0,0 +1,465 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// JPEG Video (RFC 2435) RTP Sources +// Implementation + +#include "JPEGVideoRTPSource.hh" + +////////// JPEGBufferedPacket and JPEGBufferedPacketFactory ////////// + +class JPEGBufferedPacket: public BufferedPacket { +public: + Boolean completesFrame; + +private: + // Redefined virtual functions: + virtual void reset(); + virtual unsigned nextEnclosedFrameSize(unsigned char*& framePtr, + unsigned dataSize); +}; + +class JPEGBufferedPacketFactory: public BufferedPacketFactory { +private: // redefined virtual functions + virtual BufferedPacket* createNewPacket(MultiFramedRTPSource* ourSource); +}; + + +////////// JPEGVideoRTPSource implementation ////////// + +#define BYTE unsigned char +#define WORD unsigned +#define DWORD unsigned long + +JPEGVideoRTPSource* +JPEGVideoRTPSource::createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency, + unsigned defaultWidth, unsigned defaultHeight) { + return new JPEGVideoRTPSource(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency, defaultWidth, defaultHeight); +} + +JPEGVideoRTPSource::JPEGVideoRTPSource(UsageEnvironment& env, + Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency, + unsigned defaultWidth, unsigned defaultHeight) + : MultiFramedRTPSource(env, RTPgs, + rtpPayloadFormat, rtpTimestampFrequency, + new JPEGBufferedPacketFactory), + fDefaultWidth(defaultWidth), fDefaultHeight(defaultHeight) { +} + +JPEGVideoRTPSource::~JPEGVideoRTPSource() { +} + +enum { + MARKER_SOF0 = 0xc0, // start-of-frame, baseline scan + MARKER_SOI = 0xd8, // start of image + MARKER_EOI = 0xd9, // end of image + MARKER_SOS = 0xda, // start of scan + MARKER_DRI = 0xdd, // restart interval + MARKER_DQT = 0xdb, // define quantization tables + MARKER_DHT = 0xc4, // huffman tables + MARKER_APP_FIRST = 0xe0, + MARKER_APP_LAST = 0xef, + MARKER_COMMENT = 0xfe, +}; + +static unsigned char const lum_dc_codelens[] = { + 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, +}; + +static unsigned char const lum_dc_symbols[] = { + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, +}; + +static unsigned char const lum_ac_codelens[] = { + 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d, +}; + +static unsigned char const lum_ac_symbols[] = { + 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12, + 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07, + 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08, + 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0, + 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16, + 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28, + 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, + 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, + 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, + 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, + 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, + 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, + 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, + 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, + 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, + 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5, + 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4, + 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2, + 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, + 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, + 0xf9, 0xfa, +}; + +static unsigned char const chm_dc_codelens[] = { + 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, +}; + +static unsigned char const chm_dc_symbols[] = { + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, +}; + +static unsigned char const chm_ac_codelens[] = { + 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77, +}; + +static unsigned char const chm_ac_symbols[] = { + 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21, + 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71, + 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91, + 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0, + 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34, + 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26, + 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38, + 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, + 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, + 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, + 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, + 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, + 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, + 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, + 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, + 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, + 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, + 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, + 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, + 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, + 0xf9, 0xfa, +}; + +static void createHuffmanHeader(unsigned char*& p, + unsigned char const* codelens, + int ncodes, + unsigned char const* symbols, + int nsymbols, + int tableNo, int tableClass) { + *p++ = 0xff; *p++ = MARKER_DHT; + *p++ = 0; /* length msb */ + *p++ = 3 + ncodes + nsymbols; /* length lsb */ + *p++ = (tableClass << 4) | tableNo; + memcpy(p, codelens, ncodes); + p += ncodes; + memcpy(p, symbols, nsymbols); + p += nsymbols; +} + +static unsigned computeJPEGHeaderSize(unsigned qtlen, unsigned dri) { + unsigned qtlen_half = qtlen/2; // in case qtlen is odd; shouldn't happen + qtlen = qtlen_half*2; + + unsigned numQtables = qtlen > 64 ? 2 : 1; + return 485 + numQtables*5 + qtlen + (dri > 0 ? 6 : 0); +} + +static void createJPEGHeader(unsigned char* buf, unsigned type, + unsigned w, unsigned h, + unsigned char const* qtables, unsigned qtlen, + unsigned dri) { + unsigned char *ptr = buf; + unsigned numQtables = qtlen > 64 ? 2 : 1; + + // MARKER_SOI: + *ptr++ = 0xFF; *ptr++ = MARKER_SOI; + + // MARKER_APP_FIRST: + *ptr++ = 0xFF; *ptr++ = MARKER_APP_FIRST; + *ptr++ = 0x00; *ptr++ = 0x10; // size of chunk + *ptr++ = 'J'; *ptr++ = 'F'; *ptr++ = 'I'; *ptr++ = 'F'; *ptr++ = 0x00; + *ptr++ = 0x01; *ptr++ = 0x01; // JFIF format version (1.1) + *ptr++ = 0x00; // no units + *ptr++ = 0x00; *ptr++ = 0x01; // Horizontal pixel aspect ratio + *ptr++ = 0x00; *ptr++ = 0x01; // Vertical pixel aspect ratio + *ptr++ = 0x00; *ptr++ = 0x00; // no thumbnail + + // MARKER_DRI: + if (dri > 0) { + *ptr++ = 0xFF; *ptr++ = MARKER_DRI; + *ptr++ = 0x00; *ptr++ = 0x04; // size of chunk + *ptr++ = (BYTE)(dri >> 8); *ptr++ = (BYTE)(dri); // restart interval + } + + // MARKER_DQT (luma): + unsigned tableSize = numQtables == 1 ? qtlen : qtlen/2; + *ptr++ = 0xFF; *ptr++ = MARKER_DQT; + *ptr++ = 0x00; *ptr++ = tableSize + 3; // size of chunk + *ptr++ = 0x00; // precision(0), table id(0) + memcpy(ptr, qtables, tableSize); + qtables += tableSize; + ptr += tableSize; + + if (numQtables > 1) { + unsigned tableSize = qtlen - qtlen/2; + // MARKER_DQT (chroma): + *ptr++ = 0xFF; *ptr++ = MARKER_DQT; + *ptr++ = 0x00; *ptr++ = tableSize + 3; // size of chunk + *ptr++ = 0x01; // precision(0), table id(1) + memcpy(ptr, qtables, tableSize); + qtables += tableSize; + ptr += tableSize; + } + + // MARKER_SOF0: + *ptr++ = 0xFF; *ptr++ = MARKER_SOF0; + *ptr++ = 0x00; *ptr++ = 0x11; // size of chunk + *ptr++ = 0x08; // sample precision + *ptr++ = (BYTE)(h >> 8); + *ptr++ = (BYTE)(h); // number of lines (must be a multiple of 8) + *ptr++ = (BYTE)(w >> 8); + *ptr++ = (BYTE)(w); // number of columns (must be a multiple of 8) + *ptr++ = 0x03; // number of components + *ptr++ = 0x01; // id of component + *ptr++ = type ? 0x22 : 0x21; // sampling ratio (h,v) + *ptr++ = 0x00; // quant table id + *ptr++ = 0x02; // id of component + *ptr++ = 0x11; // sampling ratio (h,v) + *ptr++ = numQtables == 1 ? 0x00 : 0x01; // quant table id + *ptr++ = 0x03; // id of component + *ptr++ = 0x11; // sampling ratio (h,v) + *ptr++ = numQtables == 1 ? 0x00 : 0x01; // quant table id + + createHuffmanHeader(ptr, lum_dc_codelens, sizeof lum_dc_codelens, + lum_dc_symbols, sizeof lum_dc_symbols, 0, 0); + createHuffmanHeader(ptr, lum_ac_codelens, sizeof lum_ac_codelens, + lum_ac_symbols, sizeof lum_ac_symbols, 0, 1); + createHuffmanHeader(ptr, chm_dc_codelens, sizeof chm_dc_codelens, + chm_dc_symbols, sizeof chm_dc_symbols, 1, 0); + createHuffmanHeader(ptr, chm_ac_codelens, sizeof chm_ac_codelens, + chm_ac_symbols, sizeof chm_ac_symbols, 1, 1); + + // MARKER_SOS: + *ptr++ = 0xFF; *ptr++ = MARKER_SOS; + *ptr++ = 0x00; *ptr++ = 0x0C; // size of chunk + *ptr++ = 0x03; // number of components + *ptr++ = 0x01; // id of component + *ptr++ = 0x00; // huffman table id (DC, AC) + *ptr++ = 0x02; // id of component + *ptr++ = 0x11; // huffman table id (DC, AC) + *ptr++ = 0x03; // id of component + *ptr++ = 0x11; // huffman table id (DC, AC) + *ptr++ = 0x00; // start of spectral + *ptr++ = 0x3F; // end of spectral + *ptr++ = 0x00; // successive approximation bit position (high, low) +} + +// The default 'luma' and 'chroma' quantizer tables, in zigzag order: +static unsigned char const defaultQuantizers[128] = { + // luma table: + 16, 11, 12, 14, 12, 10, 16, 14, + 13, 14, 18, 17, 16, 19, 24, 40, + 26, 24, 22, 22, 24, 49, 35, 37, + 29, 40, 58, 51, 61, 60, 57, 51, + 56, 55, 64, 72, 92, 78, 64, 68, + 87, 69, 55, 56, 80, 109, 81, 87, + 95, 98, 103, 104, 103, 62, 77, 113, + 121, 112, 100, 120, 92, 101, 103, 99, + // chroma table: + 17, 18, 18, 24, 21, 24, 47, 26, + 26, 47, 99, 66, 56, 66, 99, 99, + 99, 99, 99, 99, 99, 99, 99, 99, + 99, 99, 99, 99, 99, 99, 99, 99, + 99, 99, 99, 99, 99, 99, 99, 99, + 99, 99, 99, 99, 99, 99, 99, 99, + 99, 99, 99, 99, 99, 99, 99, 99, + 99, 99, 99, 99, 99, 99, 99, 99 +}; + +static void makeDefaultQtables(unsigned char* resultTables, unsigned Q) { + int factor = Q; + int q; + + if (Q < 1) factor = 1; + else if (Q > 99) factor = 99; + + if (Q < 50) { + q = 5000 / factor; + } else { + q = 200 - factor*2; + } + + for (int i = 0; i < 128; ++i) { + int newVal = (defaultQuantizers[i]*q + 50)/100; + if (newVal < 1) newVal = 1; + else if (newVal > 255) newVal = 255; + resultTables[i] = newVal; + } +} + +Boolean JPEGVideoRTPSource +::processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize) { + unsigned char* headerStart = packet->data(); + unsigned packetSize = packet->dataSize(); + + unsigned char* qtables = NULL; + unsigned qtlen = 0; + unsigned dri = 0; + + // There's at least 8-byte video-specific header + /* +0 1 2 3 +0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 ++-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +| Type-specific | Fragment Offset | ++-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +| Type | Q | Width | Height | ++-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + */ + if (packetSize < 8) return False; + + resultSpecialHeaderSize = 8; + + unsigned Offset = (unsigned)((DWORD)headerStart[1] << 16 | (DWORD)headerStart[2] << 8 | (DWORD)headerStart[3]); + unsigned Type = (unsigned)headerStart[4]; + unsigned type = Type & 1; + unsigned Q = (unsigned)headerStart[5]; + unsigned width = (unsigned)headerStart[6] * 8; + unsigned height = (unsigned)headerStart[7] * 8; + if ((width == 0 || height == 0) && fDefaultWidth != 0 && fDefaultHeight != 0) { + // Use the default width and height parameters instead: + width = fDefaultWidth; + height = fDefaultHeight; + } + if (width == 0) width = 256*8; // special case + if (height == 0) height = 256*8; // special case + + if (Type > 63) { + // Restart Marker header present + /* +0 1 2 3 +0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 ++-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +| Restart Interval |F|L| Restart Count | ++-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + */ + if (packetSize < resultSpecialHeaderSize + 4) return False; + + unsigned RestartInterval = (unsigned)((WORD)headerStart[resultSpecialHeaderSize] << 8 | (WORD)headerStart[resultSpecialHeaderSize + 1]); + dri = RestartInterval; + resultSpecialHeaderSize += 4; + } + + if (Offset == 0) { + if (Q > 127) { + // Quantization Table header present +/* +0 1 2 3 +0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 ++-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +| MBZ | Precision | Length | ++-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +| Quantization Table Data | +| ... | ++-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +*/ + if (packetSize < resultSpecialHeaderSize + 4) return False; + + unsigned MBZ = (unsigned)headerStart[resultSpecialHeaderSize]; + if (MBZ == 0) { + // unsigned Precision = (unsigned)headerStart[resultSpecialHeaderSize + 1]; + unsigned Length = (unsigned)((WORD)headerStart[resultSpecialHeaderSize + 2] << 8 | (WORD)headerStart[resultSpecialHeaderSize + 3]); + + //ASSERT(Length == 128); + + resultSpecialHeaderSize += 4; + + if (packetSize < resultSpecialHeaderSize + Length) return False; + + qtlen = Length; + qtables = &headerStart[resultSpecialHeaderSize]; + + resultSpecialHeaderSize += Length; + } + } + } + + // If this is the first (or only) fragment of a JPEG frame, then we need + // to synthesize a JPEG header, and prepend it to the incoming data. + // Hack: We can do this because we allowed space for it in + // our special "JPEGBufferedPacket" subclass. We also adjust + // "resultSpecialHeaderSize" to compensate for this, by subtracting + // the size of the synthesized header. Note that this will cause + // "resultSpecialHeaderSize" to become negative, but the code that called + // us (in "MultiFramedRTPSource") will handle this properly. + if (Offset == 0) { + unsigned char newQtables[128]; + if (qtlen == 0) { + // A quantization table was not present in the RTP JPEG header, + // so use the default tables, scaled according to the "Q" factor: + makeDefaultQtables(newQtables, Q); + qtables = newQtables; + qtlen = sizeof newQtables; + } + + unsigned hdrlen = computeJPEGHeaderSize(qtlen, dri); + resultSpecialHeaderSize -= hdrlen; // goes negative + headerStart += (int)resultSpecialHeaderSize; // goes backward + createJPEGHeader(headerStart, type, width, height, qtables, qtlen, dri); + } + + fCurrentPacketBeginsFrame = (Offset == 0); + + // The RTP "M" (marker) bit indicates the last fragment of a frame: + ((JPEGBufferedPacket*)packet)->completesFrame + = fCurrentPacketCompletesFrame = packet->rtpMarkerBit(); + + return True; +} + +char const* JPEGVideoRTPSource::MIMEtype() const { + return "video/JPEG"; +} + +////////// JPEGBufferedPacket and JPEGBufferedPacketFactory implementation + +void JPEGBufferedPacket::reset() { + BufferedPacket::reset(); + + // Move our "fHead" and "fTail" forward, to allow space for a synthesized + // JPEG header to precede the RTP data that comes in over the network. + unsigned offset = MAX_JPEG_HEADER_SIZE; + if (offset > fPacketSize) offset = fPacketSize; // shouldn't happen + fHead = fTail = offset; +} + +unsigned JPEGBufferedPacket +::nextEnclosedFrameSize(unsigned char*& framePtr, unsigned dataSize) { + // Normally, the enclosed frame size is just "dataSize". If, however, + // the frame does not end with the "EOI" marker, then add this now: + if (completesFrame && dataSize >= 2 && + !(framePtr[dataSize-2] == 0xFF && framePtr[dataSize-1] == MARKER_EOI)) { + framePtr[dataSize++] = 0xFF; + framePtr[dataSize++] = MARKER_EOI; + } + return dataSize; +} + +BufferedPacket* JPEGBufferedPacketFactory +::createNewPacket(MultiFramedRTPSource* /*ourSource*/) { + return new JPEGBufferedPacket; +} diff --git a/AnyCore/lib_rtsp/liveMedia/JPEGVideoSource.cpp b/AnyCore/lib_rtsp/liveMedia/JPEGVideoSource.cpp new file mode 100644 index 0000000..1ff853f --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/JPEGVideoSource.cpp @@ -0,0 +1,45 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// JPEG video sources +// Implementation + +#include "JPEGVideoSource.hh" + +JPEGVideoSource::JPEGVideoSource(UsageEnvironment& env) + : FramedSource(env) { +} + +JPEGVideoSource::~JPEGVideoSource() { +} + +u_int8_t const* JPEGVideoSource::quantizationTables(u_int8_t& precision, + u_int16_t& length) { + // Default implementation + precision = 0; + length = 0; + return NULL; +} + +u_int16_t JPEGVideoSource::restartInterval() { + // Default implementation + return 0; +} + +Boolean JPEGVideoSource::isJPEGVideoSource() const { + return True; +} diff --git a/AnyCore/lib_rtsp/liveMedia/Locale.cpp b/AnyCore/lib_rtsp/liveMedia/Locale.cpp new file mode 100644 index 0000000..0bf1963 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/Locale.cpp @@ -0,0 +1,60 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Support for temporarily setting the locale (e.g., to "C" or "POSIX") for (e.g.) parsing or printing +// floating-point numbers in protocol headers, or calling toupper()/tolower() on human-input strings. +// Implementation + +#include "Locale.hh" +#include + +Locale::Locale(char const* newLocale, LocaleCategory category) { +#ifndef LOCALE_NOT_USED +#ifndef XLOCALE_NOT_USED + int categoryMask; + switch (category) { + case All: { categoryMask = LC_ALL_MASK; break; } + case Numeric: { categoryMask = LC_NUMERIC_MASK; break; } + } + fLocale = newlocale(categoryMask, newLocale, NULL); + fPrevLocale = uselocale(fLocale); +#else + switch (category) { + case All: { fCategoryNum = LC_ALL; break; } + case Numeric: { fCategoryNum = LC_NUMERIC; break; } + } + fPrevLocale = strDup(setlocale(fCategoryNum, NULL)); + setlocale(fCategoryNum, newLocale); +#endif +#endif +} + +Locale::~Locale() { +#ifndef LOCALE_NOT_USED +#ifndef XLOCALE_NOT_USED + if (fLocale != (locale_t)0) { + uselocale(fPrevLocale); + freelocale(fLocale); + } +#else + if (fPrevLocale != NULL) { + setlocale(fCategoryNum, fPrevLocale); + delete[] fPrevLocale; + } +#endif +#endif +} diff --git a/AnyCore/lib_rtsp/liveMedia/MP3ADU.cpp b/AnyCore/lib_rtsp/liveMedia/MP3ADU.cpp new file mode 100644 index 0000000..5057338 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3ADU.cpp @@ -0,0 +1,634 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// 'ADU' MP3 streams (for improved loss-tolerance) +// Implementation + +#include "MP3ADU.hh" +#include "MP3ADUdescriptor.hh" +#include "MP3Internals.hh" +#include + +#ifdef TEST_LOSS +#include "GroupsockHelper.hh" +#endif + +// Segment data structures, used in the implementation below: + +#define SegmentBufSize 2000 /* conservatively high */ + +class Segment { +public: + unsigned char buf[SegmentBufSize]; + unsigned char* dataStart() { return &buf[descriptorSize]; } + unsigned frameSize; // if it's a non-ADU frame + unsigned dataHere(); // if it's a non-ADU frame + + unsigned descriptorSize; + static unsigned const headerSize; + unsigned sideInfoSize, aduSize; + unsigned backpointer; + + struct timeval presentationTime; + unsigned durationInMicroseconds; +}; + +unsigned const Segment::headerSize = 4; + +#define SegmentQueueSize 20 + +class SegmentQueue { +public: + SegmentQueue(Boolean directionIsToADU, Boolean includeADUdescriptors) + : fDirectionIsToADU(directionIsToADU), + fIncludeADUdescriptors(includeADUdescriptors) { + reset(); + } + + Segment s[SegmentQueueSize]; + + unsigned headIndex() {return fHeadIndex;} + Segment& headSegment() {return s[fHeadIndex];} + + unsigned nextFreeIndex() {return fNextFreeIndex;} + Segment& nextFreeSegment() {return s[fNextFreeIndex];} + Boolean isEmpty() {return isEmptyOrFull() && totalDataSize() == 0;} + Boolean isFull() {return isEmptyOrFull() && totalDataSize() > 0;} + + static unsigned nextIndex(unsigned ix) {return (ix+1)%SegmentQueueSize;} + static unsigned prevIndex(unsigned ix) {return (ix+SegmentQueueSize-1)%SegmentQueueSize;} + + unsigned totalDataSize() {return fTotalDataSize;} + + void enqueueNewSegment(FramedSource* inputSource, FramedSource* usingSource); + + Boolean dequeue(); + + Boolean insertDummyBeforeTail(unsigned backpointer); + + void reset() { fHeadIndex = fNextFreeIndex = fTotalDataSize = 0; } + +private: + static void sqAfterGettingSegment(void* clientData, + unsigned numBytesRead, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + + Boolean sqAfterGettingCommon(Segment& seg, unsigned numBytesRead); + Boolean isEmptyOrFull() {return headIndex() == nextFreeIndex();} + + unsigned fHeadIndex, fNextFreeIndex, fTotalDataSize; + + // The following is used for asynchronous reads: + FramedSource* fUsingSource; + + // This tells us whether the direction in which we're being used + // is MP3->ADU, or vice-versa. (This flag is used for debugging output.) + Boolean fDirectionIsToADU; + + // The following is true iff we're used to enqueue incoming + // ADU frames, and these have an ADU descriptor in front + Boolean fIncludeADUdescriptors; +}; + +////////// ADUFromMP3Source ////////// + +ADUFromMP3Source::ADUFromMP3Source(UsageEnvironment& env, + FramedSource* inputSource, + Boolean includeADUdescriptors) + : FramedFilter(env, inputSource), + fAreEnqueueingMP3Frame(False), + fSegments(new SegmentQueue(True /* because we're MP3->ADU */, + False /*no descriptors in incoming frames*/)), + fIncludeADUdescriptors(includeADUdescriptors), + fTotalDataSizeBeforePreviousRead(0), fScale(1), fFrameCounter(0) { +} + +ADUFromMP3Source::~ADUFromMP3Source() { + delete fSegments; +} + + +char const* ADUFromMP3Source::MIMEtype() const { + return "audio/MPA-ROBUST"; +} + +ADUFromMP3Source* ADUFromMP3Source::createNew(UsageEnvironment& env, + FramedSource* inputSource, + Boolean includeADUdescriptors) { + // The source must be a MPEG audio source: + if (strcmp(inputSource->MIMEtype(), "audio/MPEG") != 0) { + env.setResultMsg(inputSource->name(), " is not an MPEG audio source"); + return NULL; + } + + return new ADUFromMP3Source(env, inputSource, includeADUdescriptors); +} + +void ADUFromMP3Source::resetInput() { + fSegments->reset(); +} + +Boolean ADUFromMP3Source::setScaleFactor(int scale) { + if (scale < 1) return False; + fScale = scale; + return True; +} + +void ADUFromMP3Source::doGetNextFrame() { + if (!fAreEnqueueingMP3Frame) { + // Arrange to enqueue a new MP3 frame: + fTotalDataSizeBeforePreviousRead = fSegments->totalDataSize(); + fAreEnqueueingMP3Frame = True; + fSegments->enqueueNewSegment(fInputSource, this); + } else { + // Deliver an ADU from a previously-read MP3 frame: + fAreEnqueueingMP3Frame = False; + + if (!doGetNextFrame1()) { + // An internal error occurred; act as if our source went away: + handleClosure(); + } + } +} + +Boolean ADUFromMP3Source::doGetNextFrame1() { + // First, check whether we have enough previously-read data to output an + // ADU for the last-read MP3 frame: + unsigned tailIndex; + Segment* tailSeg; + Boolean needMoreData; + + if (fSegments->isEmpty()) { + needMoreData = True; + tailSeg = NULL; tailIndex = 0; // unneeded, but stops compiler warnings + } else { + tailIndex = SegmentQueue::prevIndex(fSegments->nextFreeIndex()); + tailSeg = &(fSegments->s[tailIndex]); + + needMoreData + = fTotalDataSizeBeforePreviousRead < tailSeg->backpointer // bp points back too far + || tailSeg->backpointer + tailSeg->dataHere() < tailSeg->aduSize; // not enough data + } + + if (needMoreData) { + // We don't have enough data to output an ADU from the last-read MP3 + // frame, so need to read another one and try again: + doGetNextFrame(); + return True; + } + + // Output an ADU from the tail segment: + fFrameSize = tailSeg->headerSize+tailSeg->sideInfoSize+tailSeg->aduSize; + fPresentationTime = tailSeg->presentationTime; + fDurationInMicroseconds = tailSeg->durationInMicroseconds; + unsigned descriptorSize + = fIncludeADUdescriptors ? ADUdescriptor::computeSize(fFrameSize) : 0; +#ifdef DEBUG + fprintf(stderr, "m->a:outputting ADU %d<-%d, nbr:%d, sis:%d, dh:%d, (descriptor size: %d)\n", tailSeg->aduSize, tailSeg->backpointer, fFrameSize, tailSeg->sideInfoSize, tailSeg->dataHere(), descriptorSize); +#endif + if (descriptorSize + fFrameSize > fMaxSize) { + envir() << "ADUFromMP3Source::doGetNextFrame1(): not enough room (" + << descriptorSize + fFrameSize << ">" + << fMaxSize << ")\n"; + fFrameSize = 0; + return False; + } + + unsigned char* toPtr = fTo; + // output the ADU descriptor: + if (fIncludeADUdescriptors) { + fFrameSize += ADUdescriptor::generateDescriptor(toPtr, fFrameSize); + } + + // output header and side info: + memmove(toPtr, tailSeg->dataStart(), + tailSeg->headerSize + tailSeg->sideInfoSize); + toPtr += tailSeg->headerSize + tailSeg->sideInfoSize; + + // go back to the frame that contains the start of our data: + unsigned offset = 0; + unsigned i = tailIndex; + unsigned prevBytes = tailSeg->backpointer; + while (prevBytes > 0) { + i = SegmentQueue::prevIndex(i); + unsigned dataHere = fSegments->s[i].dataHere(); + if (dataHere < prevBytes) { + prevBytes -= dataHere; + } else { + offset = dataHere - prevBytes; + break; + } + } + + // dequeue any segments that we no longer need: + while (fSegments->headIndex() != i) { + fSegments->dequeue(); // we're done with it + } + + unsigned bytesToUse = tailSeg->aduSize; + while (bytesToUse > 0) { + Segment& seg = fSegments->s[i]; + unsigned char* fromPtr + = &seg.dataStart()[seg.headerSize + seg.sideInfoSize + offset]; + unsigned dataHere = seg.dataHere() - offset; + unsigned bytesUsedHere = dataHere < bytesToUse ? dataHere : bytesToUse; + memmove(toPtr, fromPtr, bytesUsedHere); + bytesToUse -= bytesUsedHere; + toPtr += bytesUsedHere; + offset = 0; + i = SegmentQueue::nextIndex(i); + } + + + if (fFrameCounter++%fScale == 0) { + // Call our own 'after getting' function. Because we're not a 'leaf' + // source, we can call this directly, without risking infinite recursion. + afterGetting(this); + } else { + // Don't use this frame; get another one: + doGetNextFrame(); + } + + return True; +} + + +////////// MP3FromADUSource ////////// + +MP3FromADUSource::MP3FromADUSource(UsageEnvironment& env, + FramedSource* inputSource, + Boolean includeADUdescriptors) + : FramedFilter(env, inputSource), + fAreEnqueueingADU(False), + fSegments(new SegmentQueue(False /* because we're ADU->MP3 */, + includeADUdescriptors)) { +} + +MP3FromADUSource::~MP3FromADUSource() { + delete fSegments; +} + +char const* MP3FromADUSource::MIMEtype() const { + return "audio/MPEG"; +} + +MP3FromADUSource* MP3FromADUSource::createNew(UsageEnvironment& env, + FramedSource* inputSource, + Boolean includeADUdescriptors) { + // The source must be an MP3 ADU source: + if (strcmp(inputSource->MIMEtype(), "audio/MPA-ROBUST") != 0) { + env.setResultMsg(inputSource->name(), " is not an MP3 ADU source"); + return NULL; + } + + return new MP3FromADUSource(env, inputSource, includeADUdescriptors); +} + + +void MP3FromADUSource::doGetNextFrame() { + if (fAreEnqueueingADU) insertDummyADUsIfNecessary(); + fAreEnqueueingADU = False; + + if (needToGetAnADU()) { + // Before returning a frame, we must enqueue at least one ADU: +#ifdef TEST_LOSS + NOTE: This code no longer works, because it uses synchronous reads, + which are no longer supported. + static unsigned const framesPerPacket = 10; + static unsigned const frameCount = 0; + static Boolean packetIsLost; + while (1) { + if ((frameCount++)%framesPerPacket == 0) { + packetIsLost = (our_random()%10 == 0); // simulate 10% packet loss ##### + } + + if (packetIsLost) { + // Read and discard the next input frame (that would be part of + // a lost packet): + Segment dummySegment; + unsigned numBytesRead; + struct timeval presentationTime; + // (this works only if the source can be read synchronously) + fInputSource->syncGetNextFrame(dummySegment.buf, + sizeof dummySegment.buf, numBytesRead, + presentationTime); + } else { + break; // from while (1) + } + } +#endif + + fAreEnqueueingADU = True; + fSegments->enqueueNewSegment(fInputSource, this); + } else { + // Return a frame now: + generateFrameFromHeadADU(); + // sets fFrameSize, fPresentationTime, and fDurationInMicroseconds + + // Call our own 'after getting' function. Because we're not a 'leaf' + // source, we can call this directly, without risking infinite recursion. + afterGetting(this); + } +} + +Boolean MP3FromADUSource::needToGetAnADU() { + // Check whether we need to first enqueue a new ADU before we + // can generate a frame for our head ADU. + Boolean needToEnqueue = True; + + if (!fSegments->isEmpty()) { + unsigned index = fSegments->headIndex(); + Segment* seg = &(fSegments->headSegment()); + int const endOfHeadFrame = (int) seg->dataHere(); + unsigned frameOffset = 0; + + while (1) { + int endOfData = frameOffset - seg->backpointer + seg->aduSize; + if (endOfData >= endOfHeadFrame) { + // We already have enough data to generate a frame + needToEnqueue = False; + break; + } + + frameOffset += seg->dataHere(); + index = SegmentQueue::nextIndex(index); + if (index == fSegments->nextFreeIndex()) break; + seg = &(fSegments->s[index]); + } + } + + return needToEnqueue; +} + +void MP3FromADUSource::insertDummyADUsIfNecessary() { + if (fSegments->isEmpty()) return; // shouldn't happen + + // The tail segment (ADU) is assumed to have been recently + // enqueued. If its backpointer would overlap the data + // of the previous ADU, then we need to insert one or more + // empty, 'dummy' ADUs ahead of it. (This situation should occur + // only if an intermediate ADU was lost.) + + unsigned tailIndex + = SegmentQueue::prevIndex(fSegments->nextFreeIndex()); + Segment* tailSeg = &(fSegments->s[tailIndex]); + + while (1) { + unsigned prevADUend; // relative to the start of the new ADU + if (fSegments->headIndex() != tailIndex) { + // there is a previous segment + unsigned prevIndex = SegmentQueue::prevIndex(tailIndex); + Segment& prevSegment = fSegments->s[prevIndex]; + prevADUend = prevSegment.dataHere() + prevSegment.backpointer; + if (prevSegment.aduSize > prevADUend) { + // shouldn't happen if the previous ADU was well-formed + prevADUend = 0; + } else { + prevADUend -= prevSegment.aduSize; + } + } else { + prevADUend = 0; + } + + if (tailSeg->backpointer > prevADUend) { + // We need to insert a dummy ADU in front of the tail +#ifdef DEBUG + fprintf(stderr, "a->m:need to insert a dummy ADU (%d, %d, %d) [%d, %d]\n", tailSeg->backpointer, prevADUend, tailSeg->dataHere(), fSegments->headIndex(), fSegments->nextFreeIndex()); +#endif + tailIndex = fSegments->nextFreeIndex(); + if (!fSegments->insertDummyBeforeTail(prevADUend)) return; + tailSeg = &(fSegments->s[tailIndex]); + } else { + break; // no more dummy ADUs need to be inserted + } + } +} + +Boolean MP3FromADUSource::generateFrameFromHeadADU() { + // Output a frame for the head ADU: + if (fSegments->isEmpty()) return False; + unsigned index = fSegments->headIndex(); + Segment* seg = &(fSegments->headSegment()); +#ifdef DEBUG + fprintf(stderr, "a->m:outputting frame for %d<-%d (fs %d, dh %d), (descriptorSize: %d)\n", seg->aduSize, seg->backpointer, seg->frameSize, seg->dataHere(), seg->descriptorSize); +#endif + unsigned char* toPtr = fTo; + + // output header and side info: + fFrameSize = seg->frameSize; + fPresentationTime = seg->presentationTime; + fDurationInMicroseconds = seg->durationInMicroseconds; + memmove(toPtr, seg->dataStart(), seg->headerSize + seg->sideInfoSize); + toPtr += seg->headerSize + seg->sideInfoSize; + + // zero out the rest of the frame, in case ADU data doesn't fill it all in + unsigned bytesToZero = seg->dataHere(); + for (unsigned i = 0; i < bytesToZero; ++i) { + toPtr[i] = '\0'; + } + + // Fill in the frame with appropriate ADU data from this and + // subsequent ADUs: + unsigned frameOffset = 0; + unsigned toOffset = 0; + unsigned const endOfHeadFrame = seg->dataHere(); + + while (toOffset < endOfHeadFrame) { + int startOfData = frameOffset - seg->backpointer; + if (startOfData > (int)endOfHeadFrame) break; // no more ADUs needed + + int endOfData = startOfData + seg->aduSize; + if (endOfData > (int)endOfHeadFrame) { + endOfData = endOfHeadFrame; + } + + unsigned fromOffset; + if (startOfData <= (int)toOffset) { + fromOffset = toOffset - startOfData; + startOfData = toOffset; + if (endOfData < startOfData) endOfData = startOfData; + } else { + fromOffset = 0; + + // we may need some padding bytes beforehand + unsigned bytesToZero = startOfData - toOffset; +#ifdef DEBUG + if (bytesToZero > 0) fprintf(stderr, "a->m:outputting %d zero bytes (%d, %d, %d, %d)\n", bytesToZero, startOfData, toOffset, frameOffset, seg->backpointer); +#endif + toOffset += bytesToZero; + } + + unsigned char* fromPtr + = &seg->dataStart()[seg->headerSize + seg->sideInfoSize + fromOffset]; + unsigned bytesUsedHere = endOfData - startOfData; +#ifdef DEBUG + if (bytesUsedHere > 0) fprintf(stderr, "a->m:outputting %d bytes from %d<-%d\n", bytesUsedHere, seg->aduSize, seg->backpointer); +#endif + memmove(toPtr + toOffset, fromPtr, bytesUsedHere); + toOffset += bytesUsedHere; + + frameOffset += seg->dataHere(); + index = SegmentQueue::nextIndex(index); + if (index == fSegments->nextFreeIndex()) break; + seg = &(fSegments->s[index]); + } + + fSegments->dequeue(); + + return True; +} + + +////////// Segment ////////// + +unsigned Segment::dataHere() { + int result = frameSize - (headerSize + sideInfoSize); + if (result < 0) { + return 0; + } + + return (unsigned)result; +} + +////////// SegmentQueue ////////// + +void SegmentQueue::enqueueNewSegment(FramedSource* inputSource, + FramedSource* usingSource) { + if (isFull()) { + usingSource->envir() << "SegmentQueue::enqueueNewSegment() overflow\n"; + usingSource->handleClosure(); + return; + } + + fUsingSource = usingSource; + + Segment& seg = nextFreeSegment(); + inputSource->getNextFrame(seg.buf, sizeof seg.buf, + sqAfterGettingSegment, this, + FramedSource::handleClosure, usingSource); +} + +void SegmentQueue::sqAfterGettingSegment(void* clientData, + unsigned numBytesRead, + unsigned /*numTruncatedBytes*/, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + SegmentQueue* segQueue = (SegmentQueue*)clientData; + Segment& seg = segQueue->nextFreeSegment(); + + seg.presentationTime = presentationTime; + seg.durationInMicroseconds = durationInMicroseconds; + + if (segQueue->sqAfterGettingCommon(seg, numBytesRead)) { +#ifdef DEBUG + char const* direction = segQueue->fDirectionIsToADU ? "m->a" : "a->m"; + fprintf(stderr, "%s:read frame %d<-%d, fs:%d, sis:%d, dh:%d, (descriptor size: %d)\n", direction, seg.aduSize, seg.backpointer, seg.frameSize, seg.sideInfoSize, seg.dataHere(), seg.descriptorSize); +#endif + } + + // Continue our original calling source where it left off: + segQueue->fUsingSource->doGetNextFrame(); +} + +// Common code called after a new segment is enqueued +Boolean SegmentQueue::sqAfterGettingCommon(Segment& seg, + unsigned numBytesRead) { + unsigned char* fromPtr = seg.buf; + + if (fIncludeADUdescriptors) { + // The newly-read data is assumed to be an ADU with a descriptor + // in front + (void)ADUdescriptor::getRemainingFrameSize(fromPtr); + seg.descriptorSize = (unsigned)(fromPtr-seg.buf); + } else { + seg.descriptorSize = 0; + } + + // parse the MP3-specific info in the frame to get the ADU params + unsigned hdr; + MP3SideInfo sideInfo; + if (!GetADUInfoFromMP3Frame(fromPtr, numBytesRead, + hdr, seg.frameSize, + sideInfo, seg.sideInfoSize, + seg.backpointer, seg.aduSize)) { + return False; + } + + // If we've just read an ADU (rather than a regular MP3 frame), then use the + // entire "numBytesRead" data for the 'aduSize', so that we include any + // 'ancillary data' that may be present at the end of the ADU: + if (!fDirectionIsToADU) { + unsigned newADUSize + = numBytesRead - seg.descriptorSize - 4/*header size*/ - seg.sideInfoSize; + if (newADUSize > seg.aduSize) seg.aduSize = newADUSize; + } + fTotalDataSize += seg.dataHere(); + fNextFreeIndex = nextIndex(fNextFreeIndex); + + return True; +} + +Boolean SegmentQueue::dequeue() { + if (isEmpty()) { + fUsingSource->envir() << "SegmentQueue::dequeue(): underflow!\n"; + return False; + } + + Segment& seg = s[headIndex()]; + fTotalDataSize -= seg.dataHere(); + fHeadIndex = nextIndex(fHeadIndex); + return True; +} + +Boolean SegmentQueue::insertDummyBeforeTail(unsigned backpointer) { + if (isEmptyOrFull()) return False; + + // Copy the current tail segment to its new position, then modify the + // old tail segment to be a 'dummy' ADU + + unsigned newTailIndex = nextFreeIndex(); + Segment& newTailSeg = s[newTailIndex]; + + unsigned oldTailIndex = prevIndex(newTailIndex); + Segment& oldTailSeg = s[oldTailIndex]; + + newTailSeg = oldTailSeg; // structure copy + + // Begin by setting (replacing) the ADU descriptor of the dummy ADU: + unsigned char* ptr = oldTailSeg.buf; + if (fIncludeADUdescriptors) { + unsigned remainingFrameSize + = oldTailSeg.headerSize + oldTailSeg.sideInfoSize + 0 /* 0-size ADU */; + unsigned currentDescriptorSize = oldTailSeg.descriptorSize; + + if (currentDescriptorSize == 2) { + ADUdescriptor::generateTwoByteDescriptor(ptr, remainingFrameSize); + } else { + (void)ADUdescriptor::generateDescriptor(ptr, remainingFrameSize); + } + } + + // Then zero out the side info of the dummy frame: + if (!ZeroOutMP3SideInfo(ptr, oldTailSeg.frameSize, + backpointer)) return False; + + unsigned dummyNumBytesRead + = oldTailSeg.descriptorSize + 4/*header size*/ + oldTailSeg.sideInfoSize; + return sqAfterGettingCommon(oldTailSeg, dummyNumBytesRead); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MP3ADURTPSink.cpp b/AnyCore/lib_rtsp/liveMedia/MP3ADURTPSink.cpp new file mode 100644 index 0000000..483c0b6 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3ADURTPSink.cpp @@ -0,0 +1,119 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for 'ADUized' MP3 frames ("mpa-robust") +// Implementation + +#include "MP3ADURTPSink.hh" + +MP3ADURTPSink::MP3ADURTPSink(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char RTPPayloadType) + : AudioRTPSink(env, RTPgs, RTPPayloadType, 90000, "MPA-ROBUST") { +} + +MP3ADURTPSink::~MP3ADURTPSink() { +} + +MP3ADURTPSink* +MP3ADURTPSink::createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char RTPPayloadType) { + return new MP3ADURTPSink(env, RTPgs, RTPPayloadType); +} + +static void badDataSize(UsageEnvironment& env, unsigned numBytesInFrame) { + env << "MP3ADURTPSink::doSpecialFrameHandling(): invalid size (" + << numBytesInFrame << ") of non-fragmented input ADU!\n"; +} + +void MP3ADURTPSink::doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes) { + // If this is the first (or only) fragment of an ADU, then + // check the "ADU descriptor" (that should be at the front) for validity: + if (fragmentationOffset == 0) { + unsigned aduDescriptorSize; + + if (numBytesInFrame < 1) { + badDataSize(envir(), numBytesInFrame); + return; + } + if (frameStart[0]&0x40) { + // We have a 2-byte ADU descriptor + aduDescriptorSize = 2; + if (numBytesInFrame < 2) { + badDataSize(envir(), numBytesInFrame); + return; + } + fCurADUSize = ((frameStart[0]&~0xC0)<<8) | frameStart[1]; + } else { + // We have a 1-byte ADU descriptor + aduDescriptorSize = 1; + fCurADUSize = frameStart[0]&~0x80; + } + + if (frameStart[0]&0x80) { + envir() << "Unexpected \"C\" bit seen on non-fragment input ADU!\n"; + return; + } + + // Now, check whether the ADU size in the ADU descriptor is consistent + // with the total data size of (all fragments of) the input frame: + unsigned expectedADUSize = + fragmentationOffset + numBytesInFrame + numRemainingBytes + - aduDescriptorSize; + if (fCurADUSize != expectedADUSize) { + envir() << "MP3ADURTPSink::doSpecialFrameHandling(): Warning: Input ADU size " + << expectedADUSize << " (=" << fragmentationOffset + << "+" << numBytesInFrame << "+" << numRemainingBytes + << "-" << aduDescriptorSize + << ") did not match the value (" << fCurADUSize + << ") in the ADU descriptor!\n"; + fCurADUSize = expectedADUSize; + } + } else { + // This is the second (or subsequent) fragment. + // Insert a new ADU descriptor: + unsigned char aduDescriptor[2]; + aduDescriptor[0] = 0xC0|(fCurADUSize>>8); + aduDescriptor[1] = fCurADUSize&0xFF; + setSpecialHeaderBytes(aduDescriptor, 2); + } + + // Important: Also call our base class's doSpecialFrameHandling(), + // to set the packet's timestamp: + MultiFramedRTPSink::doSpecialFrameHandling(fragmentationOffset, + frameStart, numBytesInFrame, + framePresentationTime, + numRemainingBytes); +} + +unsigned MP3ADURTPSink::specialHeaderSize() const { + // Normally there's no special header. + // (The "ADU descriptor" is already present in the data.) + unsigned specialHeaderSize = 0; + + // However, if we're about to output the second (or subsequent) fragment + // of a fragmented ADU, then we need to insert a new ADU descriptor at + // the front of the packet: + if (curFragmentationOffset() > 0) { + specialHeaderSize = 2; + } + + return specialHeaderSize; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MP3ADURTPSource.cpp b/AnyCore/lib_rtsp/liveMedia/MP3ADURTPSource.cpp new file mode 100644 index 0000000..4229e98 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3ADURTPSource.cpp @@ -0,0 +1,80 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP source for 'ADUized' MP3 frames ("mpa-robust") +// Implementation + +#include "MP3ADURTPSource.hh" +#include "MP3ADUdescriptor.hh" + +////////// ADUBufferedPacket and ADUBufferedPacketFactory ////////// + +class ADUBufferedPacket: public BufferedPacket { +private: // redefined virtual functions + virtual unsigned nextEnclosedFrameSize(unsigned char*& framePtr, + unsigned dataSize); +}; + +class ADUBufferedPacketFactory: public BufferedPacketFactory { +private: // redefined virtual functions + virtual BufferedPacket* createNewPacket(MultiFramedRTPSource* ourSource); +}; + +///////// MP3ADURTPSource implementation //////// + +MP3ADURTPSource* +MP3ADURTPSource::createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) { + return new MP3ADURTPSource(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency); +} + +MP3ADURTPSource::MP3ADURTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) + : MultiFramedRTPSource(env, RTPgs, + rtpPayloadFormat, rtpTimestampFrequency, + new ADUBufferedPacketFactory) { +} + +MP3ADURTPSource::~MP3ADURTPSource() { +} + +char const* MP3ADURTPSource::MIMEtype() const { + return "audio/MPA-ROBUST"; +} + +////////// ADUBufferedPacket and ADUBufferredPacketFactory implementation + +unsigned ADUBufferedPacket +::nextEnclosedFrameSize(unsigned char*& framePtr, unsigned dataSize) { + // Return the size of the next MP3 'ADU', on the assumption that + // the input data is ADU-encoded MP3 frames. + unsigned char* frameDataPtr = framePtr; + unsigned remainingFrameSize + = ADUdescriptor::getRemainingFrameSize(frameDataPtr); + unsigned descriptorSize = (unsigned)(frameDataPtr - framePtr); + unsigned fullADUSize = descriptorSize + remainingFrameSize; + + return (fullADUSize <= dataSize) ? fullADUSize : dataSize; +} + +BufferedPacket* ADUBufferedPacketFactory +::createNewPacket(MultiFramedRTPSource* /*ourSource*/) { + return new ADUBufferedPacket; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MP3ADUTranscoder.cpp b/AnyCore/lib_rtsp/liveMedia/MP3ADUTranscoder.cpp new file mode 100644 index 0000000..161db8e --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3ADUTranscoder.cpp @@ -0,0 +1,92 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Transcoder for ADUized MP3 frames +// Implementation + +#include "MP3ADUTranscoder.hh" +#include "MP3Internals.hh" +#include + +MP3ADUTranscoder::MP3ADUTranscoder(UsageEnvironment& env, + unsigned outBitrate /* in kbps */, + FramedSource* inputSource) + : FramedFilter(env, inputSource), + fOutBitrate(outBitrate), + fAvailableBytesForBackpointer(0), + fOrigADU(new unsigned char[MAX_MP3_FRAME_SIZE]) { +} + +MP3ADUTranscoder::~MP3ADUTranscoder() { + delete[] fOrigADU; +} + +MP3ADUTranscoder* MP3ADUTranscoder::createNew(UsageEnvironment& env, + unsigned outBitrate /* in kbps */, + FramedSource* inputSource) { + // The source must be an MP3 ADU source: + if (strcmp(inputSource->MIMEtype(), "audio/MPA-ROBUST") != 0) { + env.setResultMsg(inputSource->name(), " is not an MP3 ADU source"); + return NULL; + } + + return new MP3ADUTranscoder(env, outBitrate, inputSource); +} + +void MP3ADUTranscoder::getAttributes() const { + // Begin by getting the attributes from our input source: + fInputSource->getAttributes(); + + // Then modify them by appending the corrected bandwidth + char buffer[30]; + sprintf(buffer, " bandwidth %d", outBitrate()); + envir().appendToResultMsg(buffer); +} + +void MP3ADUTranscoder::doGetNextFrame() { + fInputSource->getNextFrame(fOrigADU, MAX_MP3_FRAME_SIZE, + afterGettingFrame, this, handleClosure, this); +} + +void MP3ADUTranscoder::afterGettingFrame(void* clientData, + unsigned numBytesRead, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + MP3ADUTranscoder* transcoder = (MP3ADUTranscoder*)clientData; + transcoder->afterGettingFrame1(numBytesRead, numTruncatedBytes, + presentationTime, durationInMicroseconds); +} + +void MP3ADUTranscoder::afterGettingFrame1(unsigned numBytesRead, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + fNumTruncatedBytes = numTruncatedBytes; // but can we handle this being >0? ##### + fPresentationTime = presentationTime; + fDurationInMicroseconds = durationInMicroseconds; + fFrameSize = TranscodeMP3ADU(fOrigADU, numBytesRead, fOutBitrate, + fTo, fMaxSize, fAvailableBytesForBackpointer); + if (fFrameSize == 0) { // internal error - bad ADU data? + handleClosure(); + return; + } + + // Call our own 'after getting' function. Because we're not a 'leaf' + // source, we can call this directly, without risking infinite recursion. + afterGetting(this); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MP3ADUdescriptor.cpp b/AnyCore/lib_rtsp/liveMedia/MP3ADUdescriptor.cpp new file mode 100644 index 0000000..fffa087 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3ADUdescriptor.cpp @@ -0,0 +1,65 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Descriptor preceding frames of 'ADU' MP3 streams (for improved loss-tolerance) +// Implementation + +#include "MP3ADUdescriptor.hh" + +////////// ADUdescriptor ////////// + +//##### NOTE: For now, ignore fragmentation. Fix this later! ##### + +#define TWO_BYTE_DESCR_FLAG 0x40 + +unsigned ADUdescriptor::generateDescriptor(unsigned char*& toPtr, + unsigned remainingFrameSize) { + unsigned descriptorSize = ADUdescriptor::computeSize(remainingFrameSize); + switch (descriptorSize) { + case 1: { + *toPtr++ = (unsigned char)remainingFrameSize; + break; + } + case 2: { + generateTwoByteDescriptor(toPtr, remainingFrameSize); + break; + } + } + + return descriptorSize; +} + +void ADUdescriptor::generateTwoByteDescriptor(unsigned char*& toPtr, + unsigned remainingFrameSize) { + *toPtr++ = (TWO_BYTE_DESCR_FLAG|(unsigned char)(remainingFrameSize>>8)); + *toPtr++ = (unsigned char)(remainingFrameSize&0xFF); +} + +unsigned ADUdescriptor::getRemainingFrameSize(unsigned char*& fromPtr) { + unsigned char firstByte = *fromPtr++; + + if (firstByte&TWO_BYTE_DESCR_FLAG) { + // This is a 2-byte descriptor + unsigned char secondByte = *fromPtr++; + + return ((firstByte&0x3F)<<8) | secondByte; + } else { + // This is a 1-byte descriptor + return (firstByte&0x3F); + } +} + diff --git a/AnyCore/lib_rtsp/liveMedia/MP3ADUdescriptor.hh b/AnyCore/lib_rtsp/liveMedia/MP3ADUdescriptor.hh new file mode 100644 index 0000000..a87fbf3 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3ADUdescriptor.hh @@ -0,0 +1,43 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Descriptor preceding frames of 'ADU' MP3 streams (for improved loss-tolerance) +// C++ header + +#ifndef _MP3_ADU_DESCRIPTOR_HH +#define _MP3_ADU_DESCRIPTOR_HH + +// A class for handling the descriptor that begins each ADU frame: +// (Note: We don't yet implement fragmentation) +class ADUdescriptor { +public: + // Operations for generating a new descriptor + static unsigned computeSize(unsigned remainingFrameSize) { + return remainingFrameSize >= 64 ? 2 : 1; + } + static unsigned generateDescriptor(unsigned char*& toPtr, unsigned remainingFrameSize); + // returns descriptor size; increments "toPtr" afterwards + static void generateTwoByteDescriptor(unsigned char*& toPtr, unsigned remainingFrameSize); + // always generates a 2-byte descriptor, even if "remainingFrameSize" is + // small enough for a 1-byte descriptor + + // Operations for reading a descriptor + static unsigned getRemainingFrameSize(unsigned char*& fromPtr); + // increments "fromPtr" afterwards +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/MP3ADUinterleaving.cpp b/AnyCore/lib_rtsp/liveMedia/MP3ADUinterleaving.cpp new file mode 100644 index 0000000..723740d --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3ADUinterleaving.cpp @@ -0,0 +1,517 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Interleaving of MP3 ADUs +// Implementation + +#include "MP3ADUinterleaving.hh" +#include "MP3ADUdescriptor.hh" + +#include + +#ifdef TEST_LOSS +#include "GroupsockHelper.hh" +#endif + +////////// Interleaving ////////// + +Interleaving::Interleaving(unsigned cycleSize, + unsigned char const* cycleArray) + : fCycleSize(cycleSize) { + for (unsigned i = 0; i < fCycleSize; ++i) { + fInverseCycle[cycleArray[i]] = i; + } +} + +Interleaving::~Interleaving() { +} + +////////// MP3ADUinterleaverBase ////////// + +MP3ADUinterleaverBase::MP3ADUinterleaverBase(UsageEnvironment& env, + FramedSource* inputSource) + : FramedFilter(env, inputSource) { +} +MP3ADUinterleaverBase::~MP3ADUinterleaverBase() { +} + +FramedSource* MP3ADUinterleaverBase::getInputSource(UsageEnvironment& env, + char const* inputSourceName) { + FramedSource* inputSource; + if (!FramedSource::lookupByName(env, inputSourceName, inputSource)) + return NULL; + + if (strcmp(inputSource->MIMEtype(), "audio/MPA-ROBUST") != 0) { + env.setResultMsg(inputSourceName, " is not an MP3 ADU source"); + return NULL; + } + + return inputSource; +} + +void MP3ADUinterleaverBase::afterGettingFrame(void* clientData, + unsigned numBytesRead, + unsigned /*numTruncatedBytes*/, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + MP3ADUinterleaverBase* interleaverBase = (MP3ADUinterleaverBase*)clientData; + // Finish up after reading: + interleaverBase->afterGettingFrame(numBytesRead, + presentationTime, durationInMicroseconds); + + // Then, continue to deliver an outgoing frame: + interleaverBase->doGetNextFrame(); +} + + +////////// InterleavingFrames (definition) ////////// + +class InterleavingFrames { +public: + InterleavingFrames(unsigned maxCycleSize); + virtual ~InterleavingFrames(); + + Boolean haveReleaseableFrame(); + void getIncomingFrameParams(unsigned char index, + unsigned char*& dataPtr, + unsigned& bytesAvailable); + void getReleasingFrameParams(unsigned char index, + unsigned char*& dataPtr, + unsigned& bytesInUse, + struct timeval& presentationTime, + unsigned& durationInMicroseconds); + void setFrameParams(unsigned char index, + unsigned char icc, unsigned char ii, + unsigned frameSize, struct timeval presentationTime, + unsigned durationInMicroseconds); + unsigned nextIndexToRelease() {return fNextIndexToRelease;} + void releaseNext(); + +private: + unsigned fMaxCycleSize; + unsigned fNextIndexToRelease; + class InterleavingFrameDescriptor* fDescriptors; +}; + +////////// MP3ADUinterleaver ////////// + + +MP3ADUinterleaver::MP3ADUinterleaver(UsageEnvironment& env, + Interleaving const& interleaving, + FramedSource* inputSource) + : MP3ADUinterleaverBase(env, inputSource), + fInterleaving(interleaving), + fFrames(new InterleavingFrames(interleaving.cycleSize())), + fII(0), fICC(0) { +} + +MP3ADUinterleaver::~MP3ADUinterleaver() { + delete fFrames; +} + +MP3ADUinterleaver* MP3ADUinterleaver::createNew(UsageEnvironment& env, + Interleaving const& interleaving, + FramedSource* inputSource) { + return new MP3ADUinterleaver(env, interleaving, inputSource); +} + +void MP3ADUinterleaver::doGetNextFrame() { + // If there's a frame immediately available, deliver it, otherwise get new + // frames from the source until one's available: + if (fFrames->haveReleaseableFrame()) { + releaseOutgoingFrame(); + + // Call our own 'after getting' function. Because we're not a 'leaf' + // source, we can call this directly, without risking infinite recursion. + afterGetting(this); + } else { + fPositionOfNextIncomingFrame = fInterleaving.lookupInverseCycle(fII); + unsigned char* dataPtr; + unsigned bytesAvailable; + fFrames->getIncomingFrameParams(fPositionOfNextIncomingFrame, + dataPtr, bytesAvailable); + + // Read the next incoming frame (asynchronously) + fInputSource->getNextFrame(dataPtr, bytesAvailable, + &MP3ADUinterleaverBase::afterGettingFrame, this, + handleClosure, this); + } +} + +void MP3ADUinterleaver::releaseOutgoingFrame() { + unsigned char* fromPtr; + fFrames->getReleasingFrameParams(fFrames->nextIndexToRelease(), + fromPtr, fFrameSize, + fPresentationTime, fDurationInMicroseconds); + + if (fFrameSize > fMaxSize) { + fNumTruncatedBytes = fFrameSize - fMaxSize; + fFrameSize = fMaxSize; + } + memmove(fTo, fromPtr, fFrameSize); + + fFrames->releaseNext(); +} + +void MP3ADUinterleaver::afterGettingFrame(unsigned numBytesRead, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + // Set the (icc,ii) and frame size of the newly-read frame: + fFrames->setFrameParams(fPositionOfNextIncomingFrame, + fICC, fII, numBytesRead, + presentationTime, durationInMicroseconds); + + // Prepare our counters for the next frame: + if (++fII == fInterleaving.cycleSize()) { + fII = 0; + fICC = (fICC+1)%8; + } +} + +////////// DeinterleavingFrames (definition) ////////// + +class DeinterleavingFrames { +public: + DeinterleavingFrames(); + virtual ~DeinterleavingFrames(); + + Boolean haveReleaseableFrame(); + void getIncomingFrameParams(unsigned char*& dataPtr, + unsigned& bytesAvailable); + void getIncomingFrameParamsAfter(unsigned frameSize, + struct timeval presentationTime, + unsigned durationInMicroseconds, + unsigned char& icc, unsigned char& ii); + void getReleasingFrameParams(unsigned char*& dataPtr, + unsigned& bytesInUse, + struct timeval& presentationTime, + unsigned& durationInMicroseconds); + void moveIncomingFrameIntoPlace(); + void releaseNext(); + void startNewCycle(); + +private: + unsigned fNextIndexToRelease; + Boolean fHaveEndedCycle; + unsigned fIIlastSeen; + unsigned fMinIndexSeen, fMaxIndexSeen; // actually, max+1 + class DeinterleavingFrameDescriptor* fDescriptors; +}; + +////////// MP3ADUdeinterleaver ////////// + +MP3ADUdeinterleaver::MP3ADUdeinterleaver(UsageEnvironment& env, + FramedSource* inputSource) + : MP3ADUinterleaverBase(env, inputSource), + fFrames(new DeinterleavingFrames), + fIIlastSeen(~0), fICClastSeen(~0) { +} + +MP3ADUdeinterleaver::~MP3ADUdeinterleaver() { + delete fFrames; +} + +MP3ADUdeinterleaver* MP3ADUdeinterleaver::createNew(UsageEnvironment& env, + FramedSource* inputSource) { + return new MP3ADUdeinterleaver(env, inputSource); +} + +void MP3ADUdeinterleaver::doGetNextFrame() { + // If there's a frame immediately available, deliver it, otherwise get new + // frames from the source until one's available: + if (fFrames->haveReleaseableFrame()) { + releaseOutgoingFrame(); + + // Call our own 'after getting' function. Because we're not a 'leaf' + // source, we can call this directly, without risking infinite recursion. + afterGetting(this); + } else { +#ifdef TEST_LOSS + NOTE: This code no longer works, because it uses synchronous reads, + which are no longer supported. + static unsigned const framesPerPacket = 3; + static unsigned const frameCount = 0; + static Boolean packetIsLost; + while (1) { + unsigned packetCount = frameCount/framesPerPacket; + if ((frameCount++)%framesPerPacket == 0) { + packetIsLost = (our_random()%10 == 0); // simulate 10% packet loss ##### + } + + if (packetIsLost) { + // Read and discard the next input frame (that would be part of + // a lost packet): + unsigned char dummyBuf[2000]; + unsigned numBytesRead; + struct timeval presentationTime; + // (this works only if the source can be read synchronously) + fInputSource->syncGetNextFrame(dummyBuf, sizeof dummyBuf, + numBytesRead, presentationTime); + } else { + break; // from while (1) + } + } +#endif + unsigned char* dataPtr; + unsigned bytesAvailable; + fFrames->getIncomingFrameParams(dataPtr, bytesAvailable); + + // Read the next incoming frame (asynchronously) + fInputSource->getNextFrame(dataPtr, bytesAvailable, + &MP3ADUinterleaverBase::afterGettingFrame, this, + handleClosure, this); + } +} + +void MP3ADUdeinterleaver::afterGettingFrame(unsigned numBytesRead, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + // Get the (icc,ii) and set the frame size of the newly-read frame: + unsigned char icc, ii; + fFrames->getIncomingFrameParamsAfter(numBytesRead, + presentationTime, durationInMicroseconds, + icc, ii); + + // Compare these to the values we saw last: + if (icc != fICClastSeen || ii == fIIlastSeen) { + // We've started a new interleave cycle + // (or interleaving was not used). Release all + // pending ADU frames to the ADU->MP3 conversion step: + fFrames->startNewCycle(); + } else { + // We're still in the same cycle as before. + // Move the newly-read frame into place, so it can be used: + fFrames->moveIncomingFrameIntoPlace(); + } + + fICClastSeen = icc; + fIIlastSeen = ii; +} + +void MP3ADUdeinterleaver::releaseOutgoingFrame() { + unsigned char* fromPtr; + fFrames->getReleasingFrameParams(fromPtr, fFrameSize, + fPresentationTime, fDurationInMicroseconds); + + if (fFrameSize > fMaxSize) { + fNumTruncatedBytes = fFrameSize - fMaxSize; + fFrameSize = fMaxSize; + } + memmove(fTo, fromPtr, fFrameSize); + + fFrames->releaseNext(); +} + +////////// InterleavingFrames (implementation) ////////// + +#define MAX_FRAME_SIZE 2000 /* conservatively high */ + +class InterleavingFrameDescriptor { +public: + InterleavingFrameDescriptor() {frameDataSize = 0;} + + unsigned frameDataSize; // includes ADU descriptor and (modified) MPEG hdr + struct timeval presentationTime; + unsigned durationInMicroseconds; + unsigned char frameData[MAX_FRAME_SIZE]; // ditto +}; + +InterleavingFrames::InterleavingFrames(unsigned maxCycleSize) + : fMaxCycleSize(maxCycleSize), fNextIndexToRelease(0), + fDescriptors(new InterleavingFrameDescriptor[maxCycleSize]) { +} +InterleavingFrames::~InterleavingFrames() { + delete[] fDescriptors; +} + +Boolean InterleavingFrames::haveReleaseableFrame() { + return fDescriptors[fNextIndexToRelease].frameDataSize > 0; +} + +void InterleavingFrames::getIncomingFrameParams(unsigned char index, + unsigned char*& dataPtr, + unsigned& bytesAvailable) { + InterleavingFrameDescriptor& desc = fDescriptors[index]; + dataPtr = &desc.frameData[0]; + bytesAvailable = MAX_FRAME_SIZE; +} + +void InterleavingFrames::getReleasingFrameParams(unsigned char index, + unsigned char*& dataPtr, + unsigned& bytesInUse, + struct timeval& presentationTime, + unsigned& durationInMicroseconds) { + InterleavingFrameDescriptor& desc = fDescriptors[index]; + dataPtr = &desc.frameData[0]; + bytesInUse = desc.frameDataSize; + presentationTime = desc.presentationTime; + durationInMicroseconds = desc.durationInMicroseconds; +} + +void InterleavingFrames::setFrameParams(unsigned char index, + unsigned char icc, + unsigned char ii, + unsigned frameSize, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + InterleavingFrameDescriptor& desc = fDescriptors[index]; + desc.frameDataSize = frameSize; + desc.presentationTime = presentationTime; + desc.durationInMicroseconds = durationInMicroseconds; + + // Advance over the ADU descriptor, to get to the MPEG 'syncword': + unsigned char* ptr = &desc.frameData[0]; + (void)ADUdescriptor::getRemainingFrameSize(ptr); + + // Replace the next 11 bits with (ii,icc): + *ptr++ = ii; + *ptr &=~ 0xE0; + *ptr |= (icc<<5); +} + +void InterleavingFrames::releaseNext() { + fDescriptors[fNextIndexToRelease].frameDataSize = 0; + fNextIndexToRelease = (fNextIndexToRelease+1)%fMaxCycleSize; +} + +////////// DeinterleavingFrames (implementation) ////////// + +class DeinterleavingFrameDescriptor { +public: + DeinterleavingFrameDescriptor() {frameDataSize = 0; frameData = NULL;} + virtual ~DeinterleavingFrameDescriptor() {delete[] frameData;} + + unsigned frameDataSize; // includes ADU descriptor and (modified) MPEG hdr + struct timeval presentationTime; + unsigned durationInMicroseconds; + unsigned char* frameData; +}; + +DeinterleavingFrames::DeinterleavingFrames() + : fNextIndexToRelease(0), fHaveEndedCycle(False), + fMinIndexSeen(MAX_CYCLE_SIZE), fMaxIndexSeen(0), + fDescriptors(new DeinterleavingFrameDescriptor[MAX_CYCLE_SIZE+1]) { +} +DeinterleavingFrames::~DeinterleavingFrames() { + delete[] fDescriptors; +} + +Boolean DeinterleavingFrames::haveReleaseableFrame() { + if (!fHaveEndedCycle) { + // Check just the next frame in the sequence + return fDescriptors[fNextIndexToRelease].frameDataSize > 0; + } else { + // We've just ended a cycle, so we can skip over frames that didn't + // get filled in (due to packet loss): + if (fNextIndexToRelease < fMinIndexSeen) { + fNextIndexToRelease = fMinIndexSeen; + } + while (fNextIndexToRelease < fMaxIndexSeen + && fDescriptors[fNextIndexToRelease].frameDataSize == 0) { + ++fNextIndexToRelease; + } + if (fNextIndexToRelease >= fMaxIndexSeen) { + // No more frames are available from the cycle that we just ended, so + // clear out all previously stored frames, then make available + // the last-read frame, and return false for now: + for (unsigned i = fMinIndexSeen; i < fMaxIndexSeen; ++i) { + fDescriptors[i].frameDataSize = 0; + } + + fMinIndexSeen = MAX_CYCLE_SIZE; fMaxIndexSeen = 0; + moveIncomingFrameIntoPlace(); + + fHaveEndedCycle = False; + fNextIndexToRelease = 0; + return False; + } + + return True; + } +} + +void DeinterleavingFrames::getIncomingFrameParams(unsigned char*& dataPtr, + unsigned& bytesAvailable) { + // Use fDescriptors[MAX_CYCLE_SIZE] to store the incoming frame, + // prior to figuring out its real position: + DeinterleavingFrameDescriptor& desc = fDescriptors[MAX_CYCLE_SIZE]; + if (desc.frameData == NULL) { + // There's no buffer yet, so allocate a new one: + desc.frameData = new unsigned char[MAX_FRAME_SIZE]; + } + dataPtr = desc.frameData; + bytesAvailable = MAX_FRAME_SIZE; +} + +void DeinterleavingFrames +::getIncomingFrameParamsAfter(unsigned frameSize, + struct timeval presentationTime, + unsigned durationInMicroseconds, + unsigned char& icc, unsigned char& ii) { + DeinterleavingFrameDescriptor& desc = fDescriptors[MAX_CYCLE_SIZE]; + desc.frameDataSize = frameSize; + desc.presentationTime = presentationTime; + desc.durationInMicroseconds = durationInMicroseconds; + + // Advance over the ADU descriptor, to get to the MPEG 'syncword': + unsigned char* ptr = desc.frameData; + (void)ADUdescriptor::getRemainingFrameSize(ptr); + + // Read the next 11 bits into (ii,icc), and replace them with all-1s: + fIIlastSeen = ii = *ptr; *ptr++ = 0xFF; + icc = (*ptr&0xE0)>>5; *ptr |= 0xE0; +} + +void DeinterleavingFrames::getReleasingFrameParams(unsigned char*& dataPtr, + unsigned& bytesInUse, + struct timeval& presentationTime, + unsigned& durationInMicroseconds) { + DeinterleavingFrameDescriptor& desc = fDescriptors[fNextIndexToRelease]; + dataPtr = desc.frameData; + bytesInUse = desc.frameDataSize; + presentationTime = desc.presentationTime; + durationInMicroseconds = desc.durationInMicroseconds; +} + +void DeinterleavingFrames::moveIncomingFrameIntoPlace() { + DeinterleavingFrameDescriptor& fromDesc = fDescriptors[MAX_CYCLE_SIZE]; + DeinterleavingFrameDescriptor& toDesc = fDescriptors[fIIlastSeen]; + + toDesc.frameDataSize = fromDesc.frameDataSize; + toDesc.presentationTime = fromDesc.presentationTime; + + // Move the data pointer into place by swapping the data pointers: + unsigned char* tmp = toDesc.frameData; + toDesc.frameData = fromDesc.frameData; + fromDesc.frameData = tmp; + + if (fIIlastSeen < fMinIndexSeen) { + fMinIndexSeen = fIIlastSeen; + } + if (fIIlastSeen + 1 > fMaxIndexSeen) { + fMaxIndexSeen = fIIlastSeen + 1; + } +} + +void DeinterleavingFrames::releaseNext() { + fDescriptors[fNextIndexToRelease].frameDataSize = 0; + fNextIndexToRelease = (fNextIndexToRelease+1)%MAX_CYCLE_SIZE; +} + +void DeinterleavingFrames::startNewCycle() { + fHaveEndedCycle = True; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MP3AudioFileServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/MP3AudioFileServerMediaSubsession.cpp new file mode 100644 index 0000000..f5b0cba --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3AudioFileServerMediaSubsession.cpp @@ -0,0 +1,179 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a MP3 audio file. +// (Actually, any MPEG-1 or MPEG-2 audio file should work.) +// Implementation + +#include "MP3AudioFileServerMediaSubsession.hh" +#include "MPEG1or2AudioRTPSink.hh" +#include "MP3ADURTPSink.hh" +#include "MP3FileSource.hh" +#include "MP3ADU.hh" + +MP3AudioFileServerMediaSubsession* MP3AudioFileServerMediaSubsession +::createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource, + Boolean generateADUs, Interleaving* interleaving) { + return new MP3AudioFileServerMediaSubsession(env, fileName, reuseFirstSource, + generateADUs, interleaving); +} + +MP3AudioFileServerMediaSubsession +::MP3AudioFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, Boolean reuseFirstSource, + Boolean generateADUs, + Interleaving* interleaving) + : FileServerMediaSubsession(env, fileName, reuseFirstSource), + fGenerateADUs(generateADUs), fInterleaving(interleaving), fFileDuration(0.0) { +} + +MP3AudioFileServerMediaSubsession +::~MP3AudioFileServerMediaSubsession() { + delete fInterleaving; +} + +FramedSource* MP3AudioFileServerMediaSubsession +::createNewStreamSourceCommon(FramedSource* baseMP3Source, unsigned mp3NumBytes, unsigned& estBitrate) { + FramedSource* streamSource; + do { + streamSource = baseMP3Source; // by default + if (streamSource == NULL) break; + + // Use the MP3 file size, plus the duration, to estimate the stream's bitrate: + if (mp3NumBytes > 0 && fFileDuration > 0.0) { + estBitrate = (unsigned)(mp3NumBytes/(125*fFileDuration) + 0.5); // kbps, rounded + } else { + estBitrate = 128; // kbps, estimate + } + + if (fGenerateADUs) { + // Add a filter that converts the source MP3s to ADUs: + streamSource = ADUFromMP3Source::createNew(envir(), streamSource); + if (streamSource == NULL) break; + + if (fInterleaving != NULL) { + // Add another filter that interleaves the ADUs before packetizing: + streamSource = MP3ADUinterleaver::createNew(envir(), *fInterleaving, + streamSource); + if (streamSource == NULL) break; + } + } else if (fFileDuration > 0.0) { + // Because this is a seekable file, insert a pair of filters: one that + // converts the input MP3 stream to ADUs; another that converts these + // ADUs back to MP3. This allows us to seek within the input stream without + // tripping over the MP3 'bit reservoir': + streamSource = ADUFromMP3Source::createNew(envir(), streamSource); + if (streamSource == NULL) break; + + streamSource = MP3FromADUSource::createNew(envir(), streamSource); + if (streamSource == NULL) break; + } + } while (0); + + return streamSource; +} + +void MP3AudioFileServerMediaSubsession::getBaseStreams(FramedSource* frontStream, + FramedSource*& sourceMP3Stream, ADUFromMP3Source*& aduStream/*if any*/) { + if (fGenerateADUs) { + // There's an ADU stream. + if (fInterleaving != NULL) { + // There's an interleaving filter in front of the ADU stream. So go back one, to reach the ADU stream: + aduStream = (ADUFromMP3Source*)(((FramedFilter*)frontStream)->inputSource()); + } else { + aduStream = (ADUFromMP3Source*)frontStream; + } + + // Then, go back one more, to reach the MP3 source: + sourceMP3Stream = (MP3FileSource*)(aduStream->inputSource()); + } else if (fFileDuration > 0.0) { + // There are a pair of filters - MP3->ADU and ADU->MP3 - in front of the + // original MP3 source. So, go back one, to reach the ADU source: + aduStream = (ADUFromMP3Source*)(((FramedFilter*)frontStream)->inputSource()); + + // Then, go back one more, to reach the MP3 source: + sourceMP3Stream = (MP3FileSource*)(aduStream->inputSource()); + } else { + // There's no filter in front of the source MP3 stream (and there's no ADU stream): + aduStream = NULL; + sourceMP3Stream = frontStream; + } +} + + +void MP3AudioFileServerMediaSubsession +::seekStreamSource(FramedSource* inputSource, double& seekNPT, double streamDuration, u_int64_t& /*numBytes*/) { + FramedSource* sourceMP3Stream; + ADUFromMP3Source* aduStream; + getBaseStreams(inputSource, sourceMP3Stream, aduStream); + + if (aduStream != NULL) aduStream->resetInput(); // because we're about to seek within its source + ((MP3FileSource*)sourceMP3Stream)->seekWithinFile(seekNPT, streamDuration); +} + +void MP3AudioFileServerMediaSubsession +::setStreamSourceScale(FramedSource* inputSource, float scale) { + + FramedSource* sourceMP3Stream; + ADUFromMP3Source* aduStream; + getBaseStreams(inputSource, sourceMP3Stream, aduStream); + + if (aduStream == NULL) return; // because, in this case, the stream's not scalable + + int iScale = (int)scale; + aduStream->setScaleFactor(iScale); + ((MP3FileSource*)sourceMP3Stream)->setPresentationTimeScale(iScale); +} + +FramedSource* MP3AudioFileServerMediaSubsession +::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) { + MP3FileSource* mp3Source = MP3FileSource::createNew(envir(), fFileName); + if (mp3Source == NULL) return NULL; + fFileDuration = mp3Source->filePlayTime(); + + return createNewStreamSourceCommon(mp3Source, mp3Source->fileSize(), estBitrate); +} + +RTPSink* MP3AudioFileServerMediaSubsession +::createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* /*inputSource*/) { + if (fGenerateADUs) { + return MP3ADURTPSink::createNew(envir(), rtpGroupsock, + rtpPayloadTypeIfDynamic); + } else { + return MPEG1or2AudioRTPSink::createNew(envir(), rtpGroupsock); + } +} + +void MP3AudioFileServerMediaSubsession::testScaleFactor(float& scale) { + if (fFileDuration <= 0.0) { + // The file is non-seekable, so is probably a live input source. + // We don't support scale factors other than 1 + scale = 1; + } else { + // We support any integral scale >= 1 + int iScale = (int)(scale + 0.5); // round + if (iScale < 1) iScale = 1; + scale = (float)iScale; + } +} + +float MP3AudioFileServerMediaSubsession::duration() const { + return fFileDuration; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MP3AudioMatroskaFileServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/MP3AudioMatroskaFileServerMediaSubsession.cpp new file mode 100644 index 0000000..d79998d --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3AudioMatroskaFileServerMediaSubsession.cpp @@ -0,0 +1,57 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from an MP3 audio track within a Matroska file. +// (Actually, MPEG-1 or MPEG-2 audio file should also work.) +// Implementation + +#include "MP3AudioMatroskaFileServerMediaSubsession.hh" +#include "MatroskaDemuxedTrack.hh" + +MP3AudioMatroskaFileServerMediaSubsession* MP3AudioMatroskaFileServerMediaSubsession +::createNew(MatroskaFileServerDemux& demux, MatroskaTrack* track, + Boolean generateADUs, Interleaving* interleaving) { + return new MP3AudioMatroskaFileServerMediaSubsession(demux, track, generateADUs, interleaving); +} + +MP3AudioMatroskaFileServerMediaSubsession +::MP3AudioMatroskaFileServerMediaSubsession(MatroskaFileServerDemux& demux, MatroskaTrack* track, + Boolean generateADUs, Interleaving* interleaving) + : MP3AudioFileServerMediaSubsession(demux.envir(), demux.fileName(), False, generateADUs, interleaving), + fOurDemux(demux), fTrackNumber(track->trackNumber) { + fFileDuration = fOurDemux.fileDuration(); +} + +MP3AudioMatroskaFileServerMediaSubsession::~MP3AudioMatroskaFileServerMediaSubsession() { +} + +void MP3AudioMatroskaFileServerMediaSubsession +::seekStreamSource(FramedSource* inputSource, double& seekNPT, double /*streamDuration*/, u_int64_t& /*numBytes*/) { + FramedSource* sourceMP3Stream; + ADUFromMP3Source* aduStream; + getBaseStreams(inputSource, sourceMP3Stream, aduStream); + + if (aduStream != NULL) aduStream->resetInput(); // because we're about to seek within its source + ((MatroskaDemuxedTrack*)sourceMP3Stream)->seekToTime(seekNPT); +} + +FramedSource* MP3AudioMatroskaFileServerMediaSubsession +::createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate) { + FramedSource* baseMP3Source = fOurDemux.newDemuxedTrack(clientSessionId, fTrackNumber); + return createNewStreamSourceCommon(baseMP3Source, 0, estBitrate); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MP3AudioMatroskaFileServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/MP3AudioMatroskaFileServerMediaSubsession.hh new file mode 100644 index 0000000..92d9436 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3AudioMatroskaFileServerMediaSubsession.hh @@ -0,0 +1,57 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from an MP3 audio track within a Matroska file. +// (Actually, MPEG-1 or MPEG-2 audio should also work.) +// C++ header + +#ifndef _MP3_AUDIO_MATROSKA_FILE_SERVER_MEDIA_SUBSESSION_HH +#define _MP3_AUDIO_MATROSKA_FILE_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _MP3_AUDIO_FILE_SERVER_MEDIA_SUBSESSION_HH +#include "MP3AudioFileServerMediaSubsession.hh" +#endif +#ifndef _MATROSKA_FILE_SERVER_DEMUX_HH +#include "MatroskaFileServerDemux.hh" +#endif + +class MP3AudioMatroskaFileServerMediaSubsession: public MP3AudioFileServerMediaSubsession { +public: + static MP3AudioMatroskaFileServerMediaSubsession* + createNew(MatroskaFileServerDemux& demux, MatroskaTrack* track, + Boolean generateADUs = False, Interleaving* interleaving = NULL); + // Note: "interleaving" is used only if "generateADUs" is True, + // (and a value of NULL means 'no interleaving') + +private: + MP3AudioMatroskaFileServerMediaSubsession(MatroskaFileServerDemux& demux, MatroskaTrack* track, + Boolean generateADUs, Interleaving* interleaving); + // called only by createNew(); + virtual ~MP3AudioMatroskaFileServerMediaSubsession(); + +private: // redefined virtual functions + virtual void seekStreamSource(FramedSource* inputSource, double& seekNPT, double streamDuration, u_int64_t& numBytes); + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate); + +private: + MatroskaFileServerDemux& fOurDemux; + unsigned fTrackNumber; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/MP3FileSource.cpp b/AnyCore/lib_rtsp/liveMedia/MP3FileSource.cpp new file mode 100644 index 0000000..7551a9e --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3FileSource.cpp @@ -0,0 +1,178 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MP3 File Sources +// Implementation + +#include "MP3FileSource.hh" +#include "MP3StreamState.hh" +#include "InputFile.hh" + +////////// MP3FileSource ////////// + +MP3FileSource::MP3FileSource(UsageEnvironment& env, FILE* fid) + : FramedFileSource(env, fid), + fStreamState(new MP3StreamState(env)) { +} + +MP3FileSource::~MP3FileSource() { + delete fStreamState; +} + +char const* MP3FileSource::MIMEtype() const { + return "audio/MPEG"; +} + +MP3FileSource* MP3FileSource::createNew(UsageEnvironment& env, char const* fileName) { + MP3FileSource* newSource = NULL; + + do { + FILE* fid; + + fid = OpenInputFile(env, fileName); + if (fid == NULL) break; + + newSource = new MP3FileSource(env, fid); + if (newSource == NULL) break; + + unsigned fileSize = (unsigned)GetFileSize(fileName, fid); + newSource->assignStream(fid, fileSize); + if (!newSource->initializeStream()) break; + + return newSource; + } while (0); + + Medium::close(newSource); + return NULL; +} + +float MP3FileSource::filePlayTime() const { + return fStreamState->filePlayTime(); +} + +unsigned MP3FileSource::fileSize() const { + return fStreamState->fileSize(); +} + +void MP3FileSource::setPresentationTimeScale(unsigned scale) { + fStreamState->setPresentationTimeScale(scale); +} + +void MP3FileSource::seekWithinFile(double seekNPT, double streamDuration) { + float fileDuration = filePlayTime(); + + // First, make sure that 0.0 <= seekNPT <= seekNPT + streamDuration <= fileDuration + if (seekNPT < 0.0) { + seekNPT = 0.0; + } else if (seekNPT > fileDuration) { + seekNPT = fileDuration; + } + if (streamDuration < 0.0) { + streamDuration = 0.0; + } else if (seekNPT + streamDuration > fileDuration) { + streamDuration = fileDuration - seekNPT; + } + + float seekFraction = (float)seekNPT/fileDuration; + unsigned seekByteNumber = fStreamState->getByteNumberFromPositionFraction(seekFraction); + fStreamState->seekWithinFile(seekByteNumber); + + fLimitNumBytesToStream = False; // by default + if (streamDuration > 0.0) { + float endFraction = (float)(seekNPT + streamDuration)/fileDuration; + unsigned endByteNumber = fStreamState->getByteNumberFromPositionFraction(endFraction); + if (endByteNumber > seekByteNumber) { // sanity check + fNumBytesToStream = endByteNumber - seekByteNumber; + fLimitNumBytesToStream = True; + } + } +} + +void MP3FileSource::getAttributes() const { + char buffer[200]; + fStreamState->getAttributes(buffer, sizeof buffer); + envir().setResultMsg(buffer); +} + +void MP3FileSource::doGetNextFrame() { + if (!doGetNextFrame1()) { + handleClosure(); + return; + } + + // Switch to another task: +#if defined(__WIN32__) || defined(_WIN32) + // HACK: liveCaster/lc uses an implementation of scheduleDelayedTask() + // that performs very badly (chewing up lots of CPU time, apparently polling) + // on Windows. Until this is fixed, we just call our "afterGetting()" + // function directly. This avoids infinite recursion, as long as our sink + // is discontinuous, which is the case for the RTP sink that liveCaster/lc + // uses. ##### + afterGetting(this); +#else + nextTask() = envir().taskScheduler().scheduleDelayedTask(0, + (TaskFunc*)afterGetting, this); +#endif +} + +Boolean MP3FileSource::doGetNextFrame1() { + if (fLimitNumBytesToStream && fNumBytesToStream == 0) return False; // we've already streamed as much as we were asked for + + if (!fHaveJustInitialized) { + if (fStreamState->findNextHeader(fPresentationTime) == 0) return False; + } else { + fPresentationTime = fFirstFramePresentationTime; + fHaveJustInitialized = False; + } + + if (!fStreamState->readFrame(fTo, fMaxSize, fFrameSize, fDurationInMicroseconds)) { + char tmp[200]; + sprintf(tmp, + "Insufficient buffer size %d for reading MPEG audio frame (needed %d)\n", + fMaxSize, fFrameSize); + envir().setResultMsg(tmp); + fFrameSize = fMaxSize; + return False; + } + if (fNumBytesToStream > fFrameSize) fNumBytesToStream -= fFrameSize; else fNumBytesToStream = 0; + + return True; +} + +void MP3FileSource::assignStream(FILE* fid, unsigned fileSize) { + fStreamState->assignStream(fid, fileSize); +} + + +Boolean MP3FileSource::initializeStream() { + // Make sure the file has an appropriate header near the start: + if (fStreamState->findNextHeader(fFirstFramePresentationTime) == 0) { + envir().setResultMsg("not an MPEG audio file"); + return False; + } + + fStreamState->checkForXingHeader(); // in case this is a VBR file + + fHaveJustInitialized = True; + fLimitNumBytesToStream = False; + fNumBytesToStream = 0; + + // Hack: It's possible that our environment's 'result message' has been + // reset within this function, so set it again to our name now: + envir().setResultMsg(name()); + return True; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MP3Internals.cpp b/AnyCore/lib_rtsp/liveMedia/MP3Internals.cpp new file mode 100644 index 0000000..2bb71f2 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3Internals.cpp @@ -0,0 +1,808 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MP3 internal implementation details +// Implementation + +#include "MP3InternalsHuffman.hh" + +#include +#include +#include +#include + +// This is crufty old code that needs to be cleaned up ##### + +static unsigned const live_tabsel[2][3][16] = { + { {32,32,64,96,128,160,192,224,256,288,320,352,384,416,448,448}, + {32,32,48,56, 64, 80, 96,112,128,160,192,224,256,320,384,384}, + {32,32,40,48, 56, 64, 80, 96,112,128,160,192,224,256,320,320} }, + + { {32,32,48,56,64,80,96,112,128,144,160,176,192,224,256,256}, + {8,8,16,24,32,40,48,56,64,80,96,112,128,144,160,160}, + {8,8,16,24,32,40,48,56,64,80,96,112,128,144,160,160} } +}; +/* Note: live_tabsel[*][*][0 or 15] shouldn't occur; use dummy values there */ + +static long const live_freqs[] += { 44100, 48000, 32000, 22050, 24000, 16000, 11025, 12000, 8000, 0 }; + +struct bandInfoStruct { + int longIdx[23]; + int longDiff[22]; + int shortIdx[14]; + int shortDiff[13]; +}; + +static struct bandInfoStruct const bandInfo[7] = { +/* MPEG 1.0 */ + { {0,4,8,12,16,20,24,30,36,44,52,62,74, 90,110,134,162,196,238,288,342,418,576}, + {4,4,4,4,4,4,6,6,8, 8,10,12,16,20,24,28,34,42,50,54, 76,158}, + {0,4*3,8*3,12*3,16*3,22*3,30*3,40*3,52*3,66*3, 84*3,106*3,136*3,192*3}, + {4,4,4,4,6,8,10,12,14,18,22,30,56} } , + + { {0,4,8,12,16,20,24,30,36,42,50,60,72, 88,106,128,156,190,230,276,330,384,576}, + {4,4,4,4,4,4,6,6,6, 8,10,12,16,18,22,28,34,40,46,54, 54,192}, + {0,4*3,8*3,12*3,16*3,22*3,28*3,38*3,50*3,64*3, 80*3,100*3,126*3,192*3}, + {4,4,4,4,6,6,10,12,14,16,20,26,66} } , + + { {0,4,8,12,16,20,24,30,36,44,54,66,82,102,126,156,194,240,296,364,448,550,576} , + {4,4,4,4,4,4,6,6,8,10,12,16,20,24,30,38,46,56,68,84,102, 26} , + {0,4*3,8*3,12*3,16*3,22*3,30*3,42*3,58*3,78*3,104*3,138*3,180*3,192*3} , + {4,4,4,4,6,8,12,16,20,26,34,42,12} } , + +/* MPEG 2.0 */ + { {0,6,12,18,24,30,36,44,54,66,80,96,116,140,168,200,238,284,336,396,464,522,576}, + {6,6,6,6,6,6,8,10,12,14,16,20,24,28,32,38,46,52,60,68,58,54 } , + {0,4*3,8*3,12*3,18*3,24*3,32*3,42*3,56*3,74*3,100*3,132*3,174*3,192*3} , + {4,4,4,6,6,8,10,14,18,26,32,42,18 } } , + + { {0,6,12,18,24,30,36,44,54,66,80,96,114,136,162,194,232,278,330,394,464,540,576}, + {6,6,6,6,6,6,8,10,12,14,16,18,22,26,32,38,46,52,64,70,76,36 } , + {0,4*3,8*3,12*3,18*3,26*3,36*3,48*3,62*3,80*3,104*3,136*3,180*3,192*3} , + {4,4,4,6,8,10,12,14,18,24,32,44,12 } } , + + { {0,6,12,18,24,30,36,44,54,66,80,96,116,140,168,200,238,284,336,396,464,522,576}, + {6,6,6,6,6,6,8,10,12,14,16,20,24,28,32,38,46,52,60,68,58,54 }, + {0,4*3,8*3,12*3,18*3,26*3,36*3,48*3,62*3,80*3,104*3,134*3,174*3,192*3}, + {4,4,4,6,8,10,12,14,18,24,30,40,18 } } , + +/* MPEG 2.5, wrong! table (it's just a copy of MPEG 2.0/44.1kHz) */ + { {0,6,12,18,24,30,36,44,54,66,80,96,116,140,168,200,238,284,336,396,464,522,576}, + {6,6,6,6,6,6,8,10,12,14,16,20,24,28,32,38,46,52,60,68,58,54 } , + {0,4*3,8*3,12*3,18*3,24*3,32*3,42*3,56*3,74*3,100*3,132*3,174*3,192*3} , + {4,4,4,6,6,8,10,14,18,26,32,42,18 } } , +}; + +unsigned int n_slen2[512]; /* MPEG 2.0 slen for 'normal' mode */ +unsigned int i_slen2[256]; /* MPEG 2.0 slen for intensity stereo */ + +#define MPG_MD_MONO 3 + + +////////// MP3FrameParams ////////// + +MP3FrameParams::MP3FrameParams() + : bv(frameBytes, 0, sizeof frameBytes) /* by default */ { + oldHdr = firstHdr = 0; + + static Boolean doneInit = False; + if (doneInit) return; + + int i,j,k,l; + + for (i=0;i<5;i++) { + for (j=0;j<6;j++) { + for (k=0;k<6;k++) { + int n = k + j * 6 + i * 36; + i_slen2[n] = i|(j<<3)|(k<<6)|(3<<12); + } + } + } + for (i=0;i<4;i++) { + for (j=0;j<4;j++) { + for (k=0;k<4;k++) { + int n = k + j * 4 + i * 16; + i_slen2[n+180] = i|(j<<3)|(k<<6)|(4<<12); + } + } + } + for (i=0;i<4;i++) { + for (j=0;j<3;j++) { + int n = j + i * 3; + i_slen2[n+244] = i|(j<<3) | (5<<12); + n_slen2[n+500] = i|(j<<3) | (2<<12) | (1<<15); + } + } + + for (i=0;i<5;i++) { + for (j=0;j<5;j++) { + for (k=0;k<4;k++) { + for (l=0;l<4;l++) { + int n = l + k * 4 + j * 16 + i * 80; + n_slen2[n] = i|(j<<3)|(k<<6)|(l<<9)|(0<<12); + } + } + } + } + for (i=0;i<5;i++) { + for (j=0;j<5;j++) { + for (k=0;k<4;k++) { + int n = k + j * 4 + i * 20; + n_slen2[n+400] = i|(j<<3)|(k<<6)|(1<<12); + } + } + } + doneInit = True; +} + +MP3FrameParams::~MP3FrameParams() { +} + +void MP3FrameParams::setParamsFromHeader() { + if (hdr & (1<<20)) { + isMPEG2 = (hdr & (1<<19)) ? 0x0 : 0x1; + isMPEG2_5 = 0; + } + else { + isMPEG2 = 1; + isMPEG2_5 = 1; + } + + layer = 4-((hdr>>17)&3); + if (layer == 4) layer = 3; // layer==4 is not allowed + bitrateIndex = ((hdr>>12)&0xf); + + if (isMPEG2_5) { + samplingFreqIndex = ((hdr>>10)&0x3) + 6; + } else { + samplingFreqIndex = ((hdr>>10)&0x3) + (isMPEG2*3); + } + + hasCRC = (hdr & 0x10000) == 0; + + padding = ((hdr>>9)&0x1); + extension = ((hdr>>8)&0x1); + mode = ((hdr>>6)&0x3); + mode_ext = ((hdr>>4)&0x3); + copyright = ((hdr>>3)&0x1); + original = ((hdr>>2)&0x1); + emphasis = hdr & 0x3; + + stereo = (mode == MPG_MD_MONO) ? 1 : 2; + + if (((hdr>>10)&0x3) == 0x3) { +#ifdef DEBUG_ERRORS + fprintf(stderr,"Stream error - hdr: 0x%08x\n", hdr); +#endif + } + + bitrate = live_tabsel[isMPEG2][layer-1][bitrateIndex]; + samplingFreq = live_freqs[samplingFreqIndex]; + isStereo = (stereo > 1); + isFreeFormat = (bitrateIndex == 0); + frameSize + = ComputeFrameSize(bitrate, samplingFreq, padding, isMPEG2, layer); + sideInfoSize = computeSideInfoSize(); + } + +unsigned MP3FrameParams::computeSideInfoSize() { + unsigned size; + + if (isMPEG2) { + size = isStereo ? 17 : 9; + } else { + size = isStereo ? 32 : 17; + } + + if (hasCRC) { + size += 2; + } + + return size; +} + +unsigned ComputeFrameSize(unsigned bitrate, unsigned samplingFreq, + Boolean usePadding, Boolean isMPEG2, + unsigned char layer) { + if (samplingFreq == 0) return 0; + unsigned const bitrateMultiplier = (layer == 1) ? 12000*4 : 144000; + unsigned framesize; + + framesize = bitrate*bitrateMultiplier; + framesize /= samplingFreq<<(isMPEG2 ? 1 : 0); + framesize = framesize + usePadding - 4; + + return framesize; +} + +#define TRUNC_FAIRLY +static unsigned updateSideInfoSizes(MP3SideInfo& sideInfo, Boolean isMPEG2, + unsigned char const* mainDataPtr, + unsigned allowedNumBits, + unsigned& part23Length0a, + unsigned& part23Length0aTruncation, + unsigned& part23Length0b, + unsigned& part23Length0bTruncation, + unsigned& part23Length1a, + unsigned& part23Length1aTruncation, + unsigned& part23Length1b, + unsigned& part23Length1bTruncation) { + unsigned p23L0, p23L1 = 0, p23L0Trunc = 0, p23L1Trunc = 0; + + p23L0 = sideInfo.ch[0].gr[0].part2_3_length; + p23L1 = isMPEG2 ? 0 : sideInfo.ch[0].gr[1].part2_3_length; +#ifdef TRUNC_ONLY0 + if (p23L0 < allowedNumBits) + allowedNumBits = p23L0; +#endif +#ifdef TRUNC_ONLY1 + if (p23L1 < allowedNumBits) + allowedNumBits = p23L1; +#endif + if (p23L0 + p23L1 > allowedNumBits) { + /* We need to shorten one or both fields */ + unsigned truncation = p23L0 + p23L1 - allowedNumBits; +#ifdef TRUNC_FAIRLY + p23L0Trunc = (truncation*p23L0)/(p23L0 + p23L1); + p23L1Trunc = truncation - p23L0Trunc; +#endif +#if defined(TRUNC_FAVOR0) || defined(TRUNC_ONLY0) + p23L1Trunc = (truncation>p23L1) ? p23L1 : truncation; + p23L0Trunc = truncation - p23L1Trunc; +#endif +#if defined(TRUNC_FAVOR1) || defined(TRUNC_ONLY1) + p23L0Trunc = (truncation>p23L0) ? p23L0 : truncation; + p23L1Trunc = truncation - p23L0Trunc; +#endif + } + + /* ASSERT: (p23L0Trunc <= p23L0) && (p23l1Trunc <= p23L1) */ + p23L0 -= p23L0Trunc; p23L1 -= p23L1Trunc; +#ifdef DEBUG + fprintf(stderr, "updateSideInfoSizes (allowed: %d): %d->%d, %d->%d\n", allowedNumBits, p23L0+p23L0Trunc, p23L0, p23L1+p23L1Trunc, p23L1); +#endif + + // The truncations computed above are still estimates. We need to + // adjust them so that the new fields will continue to end on + // Huffman-encoded sample boundaries: + updateSideInfoForHuffman(sideInfo, isMPEG2, mainDataPtr, + p23L0, p23L1, + part23Length0a, part23Length0aTruncation, + part23Length0b, part23Length0bTruncation, + part23Length1a, part23Length1aTruncation, + part23Length1b, part23Length1bTruncation); + p23L0 = part23Length0a + part23Length0b; + p23L1 = part23Length1a + part23Length1b; + + sideInfo.ch[0].gr[0].part2_3_length = p23L0; + sideInfo.ch[0].gr[1].part2_3_length = p23L1; + part23Length0bTruncation + += sideInfo.ch[1].gr[0].part2_3_length; /* allow for stereo */ + sideInfo.ch[1].gr[0].part2_3_length = 0; /* output mono */ + sideInfo.ch[1].gr[1].part2_3_length = 0; /* output mono */ + + return p23L0 + p23L1; +} + + +Boolean GetADUInfoFromMP3Frame(unsigned char const* framePtr, + unsigned totFrameSize, + unsigned& hdr, unsigned& frameSize, + MP3SideInfo& sideInfo, unsigned& sideInfoSize, + unsigned& backpointer, unsigned& aduSize) { + if (totFrameSize < 4) return False; // there's not enough data + + MP3FrameParams fr; + fr.hdr = ((unsigned)framePtr[0] << 24) | ((unsigned)framePtr[1] << 16) + | ((unsigned)framePtr[2] << 8) | (unsigned)framePtr[3]; + fr.setParamsFromHeader(); + fr.setBytePointer(framePtr + 4, totFrameSize - 4); // skip hdr + + frameSize = 4 + fr.frameSize; + + if (fr.layer != 3) { + // Special case for non-layer III frames + backpointer = 0; + sideInfoSize = 0; + aduSize = fr.frameSize; + return True; + } + + sideInfoSize = fr.sideInfoSize; + if (totFrameSize < 4 + sideInfoSize) return False; // not enough data + + fr.getSideInfo(sideInfo); + + hdr = fr.hdr; + backpointer = sideInfo.main_data_begin; + unsigned numBits = sideInfo.ch[0].gr[0].part2_3_length; + numBits += sideInfo.ch[0].gr[1].part2_3_length; + numBits += sideInfo.ch[1].gr[0].part2_3_length; + numBits += sideInfo.ch[1].gr[1].part2_3_length; + aduSize = (numBits+7)/8; +#ifdef DEBUG + fprintf(stderr, "mp3GetADUInfoFromFrame: hdr: %08x, frameSize: %d, part2_3_lengths: %d,%d,%d,%d, aduSize: %d, backpointer: %d\n", hdr, frameSize, sideInfo.ch[0].gr[0].part2_3_length, sideInfo.ch[0].gr[1].part2_3_length, sideInfo.ch[1].gr[0].part2_3_length, sideInfo.ch[1].gr[1].part2_3_length, aduSize, backpointer); +#endif + + return True; +} + + +static void getSideInfo1(MP3FrameParams& fr, MP3SideInfo& si, + int stereo, int ms_stereo, long sfreq, + int /*single*/) { + int ch, gr; +#if 0 + int powdiff = (single == 3) ? 4 : 0; +#endif + + /* initialize all four "part2_3_length" fields to zero: */ + si.ch[0].gr[0].part2_3_length = 0; si.ch[1].gr[0].part2_3_length = 0; + si.ch[0].gr[1].part2_3_length = 0; si.ch[1].gr[1].part2_3_length = 0; + + si.main_data_begin = fr.getBits(9); + if (stereo == 1) + si.private_bits = fr.getBits(5); + else + si.private_bits = fr.getBits(3); + + for (ch=0; ch win-sw-flag = 0 */ + gr_info.window_switching_flag = fr.get1Bit(); + if (gr_info.window_switching_flag) { + int i; + gr_info.block_type = fr.getBits(2); + gr_info.mixed_block_flag = fr.get1Bit(); + gr_info.table_select[0] = fr.getBits(5); + gr_info.table_select[1] = fr.getBits(5); + /* + * table_select[2] not needed, because there is no region2, + * but to satisfy some verifications tools we set it either. + */ + gr_info.table_select[2] = 0; + for (i=0;i<3;i++) { + gr_info.subblock_gain[i] = fr.getBits(3); + gr_info.full_gain[i] + = gr_info.pow2gain + ((gr_info.subblock_gain[i])<<3); + } + +#ifdef DEBUG_ERRORS + if (gr_info.block_type == 0) { + fprintf(stderr,"Blocktype == 0 and window-switching == 1 not allowed.\n"); + } +#endif + /* region_count/start parameters are implicit in this case. */ + gr_info.region1start = 36>>1; + gr_info.region2start = 576>>1; + } + else + { + int i,r0c,r1c; + for (i=0; i<3; i++) { + gr_info.table_select[i] = fr.getBits(5); + } + r0c = gr_info.region0_count = fr.getBits(4); + r1c = gr_info.region1_count = fr.getBits(3); + gr_info.region1start = bandInfo[sfreq].longIdx[r0c+1] >> 1 ; + gr_info.region2start = bandInfo[sfreq].longIdx[r0c+1+r1c+1] >> 1; + gr_info.block_type = 0; + gr_info.mixed_block_flag = 0; + } + gr_info.preflag = fr.get1Bit(); + gr_info.scalefac_scale = fr.get1Bit(); + gr_info.count1table_select = fr.get1Bit(); + } + } +} + +static void getSideInfo2(MP3FrameParams& fr, MP3SideInfo& si, + int stereo, int ms_stereo, long sfreq, + int /*single*/) { + int ch; +#if 0 + int powdiff = (single == 3) ? 4 : 0; +#endif + + /* initialize all four "part2_3_length" fields to zero: */ + si.ch[0].gr[0].part2_3_length = 0; si.ch[1].gr[0].part2_3_length = 0; + si.ch[0].gr[1].part2_3_length = 0; si.ch[1].gr[1].part2_3_length = 0; + + si.main_data_begin = fr.getBits(8); + if (stereo == 1) + si.private_bits = fr.get1Bit(); + else + si.private_bits = fr.getBits(2); + + for (ch=0; ch win-sw-flag = 0 */ + gr_info.window_switching_flag = fr.get1Bit(); + if (gr_info.window_switching_flag) { + int i; + gr_info.block_type = fr.getBits(2); + gr_info.mixed_block_flag = fr.get1Bit(); + gr_info.table_select[0] = fr.getBits(5); + gr_info.table_select[1] = fr.getBits(5); + /* + * table_select[2] not needed, because there is no region2, + * but to satisfy some verifications tools we set it either. + */ + gr_info.table_select[2] = 0; + for (i=0;i<3;i++) { + gr_info.subblock_gain[i] = fr.getBits(3); + gr_info.full_gain[i] + = gr_info.pow2gain + ((gr_info.subblock_gain[i])<<3); + } + +#ifdef DEBUG_ERRORS + if (gr_info.block_type == 0) { + fprintf(stderr,"Blocktype == 0 and window-switching == 1 not allowed.\n"); + } +#endif + /* region_count/start parameters are implicit in this case. */ +/* check this again! */ + if (gr_info.block_type == 2) + gr_info.region1start = 36>>1; + else { + gr_info.region1start = 54>>1; + } + gr_info.region2start = 576>>1; + } + else + { + int i,r0c,r1c; + for (i=0; i<3; i++) { + gr_info.table_select[i] = fr.getBits(5); + } + r0c = gr_info.region0_count = fr.getBits(4); + r1c = gr_info.region1_count = fr.getBits(3); + gr_info.region1start = bandInfo[sfreq].longIdx[r0c+1] >> 1 ; + gr_info.region2start = bandInfo[sfreq].longIdx[r0c+1+r1c+1] >> 1; + gr_info.block_type = 0; + gr_info.mixed_block_flag = 0; + } + gr_info.scalefac_scale = fr.get1Bit(); + gr_info.count1table_select = fr.get1Bit(); + } +} + + +#define MPG_MD_JOINT_STEREO 1 + +void MP3FrameParams::getSideInfo(MP3SideInfo& si) { + // First skip over the CRC if present: + if (hasCRC) getBits(16); + + int single = -1; + int ms_stereo; + int sfreq = samplingFreqIndex; + + if (stereo == 1) { + single = 0; + } + + ms_stereo = (mode == MPG_MD_JOINT_STEREO) && (mode_ext & 0x2); + + if (isMPEG2) { + getSideInfo2(*this, si, stereo, ms_stereo, sfreq, single); + } else { + getSideInfo1(*this, si, stereo, ms_stereo, sfreq, single); + } +} + +static void putSideInfo1(BitVector& bv, + MP3SideInfo const& si, Boolean isStereo) { + int ch, gr, i; + int stereo = isStereo ? 2 : 1; + + bv.putBits(si.main_data_begin,9); + if (stereo == 1) + bv.putBits(si.private_bits, 5); + else + bv.putBits(si.private_bits, 3); + + for (ch=0; ch= bitrate) + return i; + } + + // "bitrate" was larger than any possible, so return the largest possible: + return 14; +} + +static void outputHeader(unsigned char* toPtr, unsigned hdr) { + toPtr[0] = (unsigned char)(hdr>>24); + toPtr[1] = (unsigned char)(hdr>>16); + toPtr[2] = (unsigned char)(hdr>>8); + toPtr[3] = (unsigned char)(hdr); +} + +static void assignADUBackpointer(MP3FrameParams const& fr, + unsigned aduSize, + MP3SideInfo& sideInfo, + unsigned& availableBytesForBackpointer) { + // Give the ADU as large a backpointer as possible: + unsigned maxBackpointerSize = fr.isMPEG2 ? 255 : 511; + + unsigned backpointerSize = availableBytesForBackpointer; + if (backpointerSize > maxBackpointerSize) { + backpointerSize = maxBackpointerSize; + } + + // Store the new backpointer now: + sideInfo.main_data_begin = backpointerSize; + + // Figure out how many bytes are available for the *next* ADU's backpointer: + availableBytesForBackpointer + = backpointerSize + fr.frameSize - fr.sideInfoSize ; + if (availableBytesForBackpointer < aduSize) { + availableBytesForBackpointer = 0; + } else { + availableBytesForBackpointer -= aduSize; + } +} + +unsigned TranscodeMP3ADU(unsigned char const* fromPtr, unsigned fromSize, + unsigned toBitrate, + unsigned char* toPtr, unsigned toMaxSize, + unsigned& availableBytesForBackpointer) { + // Begin by parsing the input ADU's parameters: + unsigned hdr, inFrameSize, inSideInfoSize, backpointer, inAduSize; + MP3SideInfo sideInfo; + if (!GetADUInfoFromMP3Frame(fromPtr, fromSize, + hdr, inFrameSize, sideInfo, inSideInfoSize, + backpointer, inAduSize)) { + return 0; + } + fromPtr += (4+inSideInfoSize); // skip to 'main data' + + // Alter the 4-byte MPEG header to reflect the output ADU: + // (different bitrate; mono; no CRC) + Boolean isMPEG2 = ((hdr&0x00080000) == 0); + unsigned toBitrateIndex = MP3BitrateToBitrateIndex(toBitrate, isMPEG2); + hdr &=~ 0xF000; hdr |= (toBitrateIndex<<12); // set bitrate index + hdr |= 0x10200; // turn on !error-prot and padding bits + hdr &=~ 0xC0; hdr |= 0xC0; // set mode to 3 (mono) + + // Set up the rest of the parameters of the new ADU: + MP3FrameParams outFr; + outFr.hdr = hdr; + outFr.setParamsFromHeader(); + + // Figure out how big to make the output ADU: + unsigned inAveAduSize = inFrameSize - inSideInfoSize; + unsigned outAveAduSize = outFr.frameSize - outFr.sideInfoSize; + unsigned desiredOutAduSize /*=inAduSize*outAveAduSize/inAveAduSize*/ + = (2*inAduSize*outAveAduSize + inAveAduSize)/(2*inAveAduSize); + // this rounds to the nearest integer + + if (toMaxSize < (4 + outFr.sideInfoSize)) return 0; + unsigned maxOutAduSize = toMaxSize - (4 + outFr.sideInfoSize); + if (desiredOutAduSize > maxOutAduSize) { + desiredOutAduSize = maxOutAduSize; + } + + // Figure out the new sizes of the various 'part23 lengths', + // and how much they are truncated: + unsigned part23Length0a, part23Length0aTruncation; + unsigned part23Length0b, part23Length0bTruncation; + unsigned part23Length1a, part23Length1aTruncation; + unsigned part23Length1b, part23Length1bTruncation; + unsigned numAduBits + = updateSideInfoSizes(sideInfo, outFr.isMPEG2, + fromPtr, 8*desiredOutAduSize, + part23Length0a, part23Length0aTruncation, + part23Length0b, part23Length0bTruncation, + part23Length1a, part23Length1aTruncation, + part23Length1b, part23Length1bTruncation); +#ifdef DEBUG +fprintf(stderr, "shrinkage %d->%d [(%d,%d),(%d,%d)] (trunc: [(%d,%d),(%d,%d)]) {%d}\n", inAduSize, (numAduBits+7)/8, + part23Length0a, part23Length0b, part23Length1a, part23Length1b, + part23Length0aTruncation, part23Length0bTruncation, + part23Length1aTruncation, part23Length1bTruncation, + maxOutAduSize); +#endif + unsigned actualOutAduSize = (numAduBits+7)/8; + + // Give the new ADU an appropriate 'backpointer': + assignADUBackpointer(outFr, actualOutAduSize, sideInfo, availableBytesForBackpointer); + + ///// Now output the new ADU: + + // 4-byte header + outputHeader(toPtr, hdr); toPtr += 4; + + // side info + PutMP3SideInfoIntoFrame(sideInfo, outFr, toPtr); toPtr += outFr.sideInfoSize; + + // 'main data', using the new lengths + unsigned toBitOffset = 0; + unsigned fromBitOffset = 0; + + /* rebuild portion 0a: */ + memmove(toPtr, fromPtr, (part23Length0a+7)/8); + toBitOffset += part23Length0a; + fromBitOffset += part23Length0a + part23Length0aTruncation; + + /* rebuild portion 0b: */ + shiftBits(toPtr, toBitOffset, fromPtr, fromBitOffset, part23Length0b); + toBitOffset += part23Length0b; + fromBitOffset += part23Length0b + part23Length0bTruncation; + + /* rebuild portion 1a: */ + shiftBits(toPtr, toBitOffset, fromPtr, fromBitOffset, part23Length1a); + toBitOffset += part23Length1a; + fromBitOffset += part23Length1a + part23Length1aTruncation; + + /* rebuild portion 1b: */ + shiftBits(toPtr, toBitOffset, fromPtr, fromBitOffset, part23Length1b); + toBitOffset += part23Length1b; + + /* zero out any remaining bits (probably unnecessary, but...) */ + unsigned char const zero = '\0'; + shiftBits(toPtr, toBitOffset, &zero, 0, + actualOutAduSize*8 - numAduBits); + + return 4 + outFr.sideInfoSize + actualOutAduSize; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MP3Internals.hh b/AnyCore/lib_rtsp/liveMedia/MP3Internals.hh new file mode 100644 index 0000000..28472de --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3Internals.hh @@ -0,0 +1,143 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MP3 internal implementation details +// C++ header + +#ifndef _MP3_INTERNALS_HH +#define _MP3_INTERNALS_HH + +#ifndef _BOOLEAN_HH +#include "Boolean.hh" +#endif +#ifndef _BIT_VECTOR_HH +#include "BitVector.hh" +#endif + +typedef struct MP3SideInfo { + unsigned main_data_begin; + unsigned private_bits; + typedef struct gr_info_s { + int scfsi; + unsigned part2_3_length; + unsigned big_values; + unsigned global_gain; + unsigned scalefac_compress; + unsigned window_switching_flag; + unsigned block_type; + unsigned mixed_block_flag; + unsigned table_select[3]; + unsigned region0_count; + unsigned region1_count; + unsigned subblock_gain[3]; + unsigned maxband[3]; + unsigned maxbandl; + unsigned maxb; + unsigned region1start; + unsigned region2start; + unsigned preflag; + unsigned scalefac_scale; + unsigned count1table_select; + double *full_gain[3]; + double *pow2gain; + } gr_info_s_t; + struct { + gr_info_s_t gr[2]; + } ch[2]; +} MP3SideInfo_t; + +#define SBLIMIT 32 +#define MAX_MP3_FRAME_SIZE 2500 /* also big enough for an 'ADU'ized frame */ + +class MP3FrameParams { +public: + MP3FrameParams(); + ~MP3FrameParams(); + + // 4-byte MPEG header: + unsigned hdr; + + // a buffer that can be used to hold the rest of the frame: + unsigned char frameBytes[MAX_MP3_FRAME_SIZE]; + + // public parameters derived from the header + void setParamsFromHeader(); // this sets them + Boolean isMPEG2; + unsigned layer; // currently only 3 is supported + unsigned bitrate; // in kbps + unsigned samplingFreq; + Boolean isStereo; + Boolean isFreeFormat; + unsigned frameSize; // doesn't include the initial 4-byte header + unsigned sideInfoSize; + Boolean hasCRC; + + void setBytePointer(unsigned char const* restOfFrame, + unsigned totNumBytes) {// called during setup + bv.setup((unsigned char*)restOfFrame, 0, 8*totNumBytes); + } + + // other, public parameters used when parsing input (perhaps get rid of) + unsigned oldHdr, firstHdr; + + // Extract (unpack) the side info from the frame into a struct: + void getSideInfo(MP3SideInfo& si); + + // The bit pointer used for reading data from frame data + unsigned getBits(unsigned numBits) { return bv.getBits(numBits); } + unsigned get1Bit() { return bv.get1Bit(); } + +private: + BitVector bv; + + // other, private parameters derived from the header + unsigned bitrateIndex; + unsigned samplingFreqIndex; + Boolean isMPEG2_5; + Boolean padding; + Boolean extension; + unsigned mode; + unsigned mode_ext; + Boolean copyright; + Boolean original; + unsigned emphasis; + unsigned stereo; + +private: + unsigned computeSideInfoSize(); +}; + +unsigned ComputeFrameSize(unsigned bitrate, unsigned samplingFreq, + Boolean usePadding, Boolean isMPEG2, + unsigned char layer); + +Boolean GetADUInfoFromMP3Frame(unsigned char const* framePtr, + unsigned totFrameSize, + unsigned& hdr, unsigned& frameSize, + MP3SideInfo& sideInfo, unsigned& sideInfoSize, + unsigned& backpointer, unsigned& aduSize); + +Boolean ZeroOutMP3SideInfo(unsigned char* framePtr, unsigned totFrameSize, + unsigned newBackpointer); + +unsigned TranscodeMP3ADU(unsigned char const* fromPtr, unsigned fromSize, + unsigned toBitrate, + unsigned char* toPtr, unsigned toMaxSize, + unsigned& availableBytesForBackpointer); + // returns the size of the resulting ADU (0 on failure) + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/MP3InternalsHuffman.cpp b/AnyCore/lib_rtsp/liveMedia/MP3InternalsHuffman.cpp new file mode 100644 index 0000000..e88bde3 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3InternalsHuffman.cpp @@ -0,0 +1,976 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MP3 internal implementation details (Huffman encoding) +// Implementation + +#include "MP3InternalsHuffman.hh" +#include +#include +#include + +MP3HuffmanEncodingInfo +::MP3HuffmanEncodingInfo(Boolean includeDecodedValues) { + if (includeDecodedValues) { + decodedValues = new unsigned[(SBLIMIT*SSLIMIT + 1)*4]; + } else { + decodedValues = NULL; + } +} + +MP3HuffmanEncodingInfo::~MP3HuffmanEncodingInfo() { + delete[] decodedValues; +} + +// This is crufty old code that needs to be cleaned up ##### + +static unsigned debugCount = 0; /* for debugging */ + +#define TRUNC_FAVORa + +void updateSideInfoForHuffman(MP3SideInfo& sideInfo, Boolean isMPEG2, + unsigned char const* mainDataPtr, + unsigned p23L0, unsigned p23L1, + unsigned& part23Length0a, + unsigned& part23Length0aTruncation, + unsigned& part23Length0b, + unsigned& part23Length0bTruncation, + unsigned& part23Length1a, + unsigned& part23Length1aTruncation, + unsigned& part23Length1b, + unsigned& part23Length1bTruncation) { + int i, j; + unsigned sfLength, origTotABsize, adjustment; + MP3SideInfo::gr_info_s_t* gr; + + /* First, Huffman-decode each part of the segment's main data, + to see at which bit-boundaries the samples appear: + */ + MP3HuffmanEncodingInfo hei; + + ++debugCount; +#ifdef DEBUG + fprintf(stderr, "usifh-start: p23L0: %d, p23L1: %d\n", p23L0, p23L1); +#endif + + /* Process granule 0 */ + { + gr = &(sideInfo.ch[0].gr[0]); + origTotABsize = gr->part2_3_length; + + MP3HuffmanDecode(gr, isMPEG2, mainDataPtr, 0, origTotABsize, sfLength, hei); + + /* Begin by computing new sizes for parts a & b (& their truncations) */ +#ifdef DEBUG + fprintf(stderr, "usifh-0: %d, %d:%d, %d:%d, %d:%d, %d:%d, %d:%d\n", + hei.numSamples, + sfLength/8, sfLength%8, + hei.reg1Start/8, hei.reg1Start%8, + hei.reg2Start/8, hei.reg2Start%8, + hei.bigvalStart/8, hei.bigvalStart%8, + origTotABsize/8, origTotABsize%8); +#endif + if (p23L0 < sfLength) { + /* We can't use this, so give it all to the next granule: */ + p23L1 += p23L0; + p23L0 = 0; + } + + part23Length0a = hei.bigvalStart; + part23Length0b = origTotABsize - hei.bigvalStart; + part23Length0aTruncation = part23Length0bTruncation = 0; + if (origTotABsize > p23L0) { + /* We need to shorten one or both of fields a & b */ + unsigned truncation = origTotABsize - p23L0; +#ifdef TRUNC_FAIRLY + part23Length0aTruncation = (truncation*(part23Length0a-sfLength)) + /(origTotABsize-sfLength); + part23Length0bTruncation = truncation - part23Length0aTruncation; +#endif +#ifdef TRUNC_FAVORa + part23Length0bTruncation + = (truncation > part23Length0b) ? part23Length0b : truncation; + part23Length0aTruncation = truncation - part23Length0bTruncation; +#endif +#ifdef TRUNC_FAVORb + part23Length0aTruncation = (truncation > part23Length0a-sfLength) + ? (part23Length0a-sfLength) : truncation; + part23Length0bTruncation = truncation - part23Length0aTruncation; +#endif + } + /* ASSERT: part23Length0xTruncation <= part23Length0x */ + part23Length0a -= part23Length0aTruncation; + part23Length0b -= part23Length0bTruncation; +#ifdef DEBUG + fprintf(stderr, "usifh-0: interim sizes: %d (%d), %d (%d)\n", + part23Length0a, part23Length0aTruncation, + part23Length0b, part23Length0bTruncation); +#endif + + /* Adjust these new lengths so they end on sample bit boundaries: */ + for (i = 0; i < (int)hei.numSamples; ++i) { + if (hei.allBitOffsets[i] == part23Length0a) break; + else if (hei.allBitOffsets[i] > part23Length0a) {--i; break;} + } + if (i < 0) { /* should happen only if we couldn't fit sfLength */ + i = 0; adjustment = 0; + } else { + adjustment = part23Length0a - hei.allBitOffsets[i]; + } +#ifdef DEBUG + fprintf(stderr, "%d usifh-0: adjustment 1: %d\n", debugCount, adjustment); +#endif + part23Length0a -= adjustment; + part23Length0aTruncation += adjustment; + /* Assign the bits we just shaved to field b and granule 1: */ + if (part23Length0bTruncation < adjustment) { + p23L1 += (adjustment - part23Length0bTruncation); + adjustment = part23Length0bTruncation; + } + part23Length0b += adjustment; + part23Length0bTruncation -= adjustment; + for (j = i; j < (int)hei.numSamples; ++j) { + if (hei.allBitOffsets[j] + == part23Length0a + part23Length0aTruncation + part23Length0b) + break; + else if (hei.allBitOffsets[j] + > part23Length0a + part23Length0aTruncation + part23Length0b) + {--j; break;} + } + if (j < 0) { /* should happen only if we couldn't fit sfLength */ + j = 0; adjustment = 0; + } else { + adjustment = part23Length0a+part23Length0aTruncation+part23Length0b + - hei.allBitOffsets[j]; + } +#ifdef DEBUG + fprintf(stderr, "%d usifh-0: adjustment 2: %d\n", debugCount, adjustment); +#endif + if (adjustment > part23Length0b) adjustment = part23Length0b; /*sanity*/ + part23Length0b -= adjustment; + part23Length0bTruncation += adjustment; + /* Assign the bits we just shaved to granule 1 */ + p23L1 += adjustment; + + if (part23Length0aTruncation > 0) { + /* Change the granule's 'big_values' field to reflect the truncation */ + gr->big_values = i; + } + } + + /* Process granule 1 (MPEG-1 only) */ + + if (isMPEG2) { + part23Length1a = part23Length1b = 0; + part23Length1aTruncation = part23Length1bTruncation = 0; + } else { + unsigned granule1Offset + = origTotABsize + sideInfo.ch[1].gr[0].part2_3_length; + + gr = &(sideInfo.ch[0].gr[1]); + origTotABsize = gr->part2_3_length; + + MP3HuffmanDecode(gr, isMPEG2, mainDataPtr, granule1Offset, + origTotABsize, sfLength, hei); + + /* Begin by computing new sizes for parts a & b (& their truncations) */ +#ifdef DEBUG + fprintf(stderr, "usifh-1: %d, %d:%d, %d:%d, %d:%d, %d:%d, %d:%d\n", + hei.numSamples, + sfLength/8, sfLength%8, + hei.reg1Start/8, hei.reg1Start%8, + hei.reg2Start/8, hei.reg2Start%8, + hei.bigvalStart/8, hei.bigvalStart%8, + origTotABsize/8, origTotABsize%8); +#endif + if (p23L1 < sfLength) { + /* We can't use this, so give up on this granule: */ + p23L1 = 0; + } + + part23Length1a = hei.bigvalStart; + part23Length1b = origTotABsize - hei.bigvalStart; + part23Length1aTruncation = part23Length1bTruncation = 0; + if (origTotABsize > p23L1) { + /* We need to shorten one or both of fields a & b */ + unsigned truncation = origTotABsize - p23L1; +#ifdef TRUNC_FAIRLY + part23Length1aTruncation = (truncation*(part23Length1a-sfLength)) + /(origTotABsize-sfLength); + part23Length1bTruncation = truncation - part23Length1aTruncation; +#endif +#ifdef TRUNC_FAVORa + part23Length1bTruncation + = (truncation > part23Length1b) ? part23Length1b : truncation; + part23Length1aTruncation = truncation - part23Length1bTruncation; +#endif +#ifdef TRUNC_FAVORb + part23Length1aTruncation = (truncation > part23Length1a-sfLength) + ? (part23Length1a-sfLength) : truncation; + part23Length1bTruncation = truncation - part23Length1aTruncation; +#endif + } + /* ASSERT: part23Length1xTruncation <= part23Length1x */ + part23Length1a -= part23Length1aTruncation; + part23Length1b -= part23Length1bTruncation; +#ifdef DEBUG + fprintf(stderr, "usifh-1: interim sizes: %d (%d), %d (%d)\n", + part23Length1a, part23Length1aTruncation, + part23Length1b, part23Length1bTruncation); +#endif + + /* Adjust these new lengths so they end on sample bit boundaries: */ + for (i = 0; i < (int)hei.numSamples; ++i) { + if (hei.allBitOffsets[i] == part23Length1a) break; + else if (hei.allBitOffsets[i] > part23Length1a) {--i; break;} + } + if (i < 0) { /* should happen only if we couldn't fit sfLength */ + i = 0; adjustment = 0; + } else { + adjustment = part23Length1a - hei.allBitOffsets[i]; + } +#ifdef DEBUG + fprintf(stderr, "%d usifh-1: adjustment 0: %d\n", debugCount, adjustment); +#endif + part23Length1a -= adjustment; + part23Length1aTruncation += adjustment; + /* Assign the bits we just shaved to field b: */ + if (part23Length1bTruncation < adjustment) { + adjustment = part23Length1bTruncation; + } + part23Length1b += adjustment; + part23Length1bTruncation -= adjustment; + for (j = i; j < (int)hei.numSamples; ++j) { + if (hei.allBitOffsets[j] + == part23Length1a + part23Length1aTruncation + part23Length1b) + break; + else if (hei.allBitOffsets[j] + > part23Length1a + part23Length1aTruncation + part23Length1b) + {--j; break;} + } + if (j < 0) { /* should happen only if we couldn't fit sfLength */ + j = 0; adjustment = 0; + } else { + adjustment = part23Length1a+part23Length1aTruncation+part23Length1b + - hei.allBitOffsets[j]; + } +#ifdef DEBUG + fprintf(stderr, "%d usifh-1: adjustment 1: %d\n", debugCount, adjustment); +#endif + if (adjustment > part23Length1b) adjustment = part23Length1b; /*sanity*/ + part23Length1b -= adjustment; + part23Length1bTruncation += adjustment; + + if (part23Length1aTruncation > 0) { + /* Change the granule's 'big_values' field to reflect the truncation */ + gr->big_values = i; + } + } +#ifdef DEBUG + fprintf(stderr, "usifh-end, new vals: %d (%d), %d (%d), %d (%d), %d (%d)\n", + part23Length0a, part23Length0aTruncation, + part23Length0b, part23Length0bTruncation, + part23Length1a, part23Length1aTruncation, + part23Length1b, part23Length1bTruncation); +#endif +} + +static void rsf_getline(char* line, unsigned max, unsigned char**fi) { + unsigned i; + for (i = 0; i < max; ++i) { + line[i] = *(*fi)++; + if (line[i] == '\n') { + line[i++] = '\0'; + return; + } + } + line[i] = '\0'; +} + +static void rsfscanf(unsigned char **fi, unsigned int* v) { + while (sscanf((char*)*fi, "%x", v) == 0) { + /* skip past the next '\0' */ + while (*(*fi)++ != '\0') {} + } + + /* skip past any white-space before the value: */ + while (*(*fi) <= ' ') ++(*fi); + + /* skip past the value: */ + while (*(*fi) > ' ') ++(*fi); +} + +#define HUFFBITS unsigned long int +#define SIZEOF_HUFFBITS 4 +#define HTN 34 +#define MXOFF 250 + +struct huffcodetab { + char tablename[3]; /*string, containing table_description */ + unsigned int xlen; /*max. x-index+ */ + unsigned int ylen; /*max. y-index+ */ + unsigned int linbits; /*number of linbits */ + unsigned int linmax; /*max number to be stored in linbits */ + int ref; /*a positive value indicates a reference*/ + HUFFBITS *table; /*pointer to array[xlen][ylen] */ + unsigned char *hlen; /*pointer to array[xlen][ylen] */ + unsigned char(*val)[2];/*decoder tree */ + unsigned int treelen; /*length of decoder tree */ +}; + +static struct huffcodetab rsf_ht[HTN]; // array of all huffcodetable headers + /* 0..31 Huffman code table 0..31 */ + /* 32,33 count1-tables */ + +/* read the huffman decoder table */ +static int read_decoder_table(unsigned char* fi) { + int n,i,nn,t; + unsigned int v0,v1; + char command[100],line[100]; + for (n=0;nscalefac_compress]; + int num1 = slen[1][gr_info->scalefac_compress]; + + if (gr_info->block_type == 2) + { + numbits = (num0 + num1) * 18; + + if (gr_info->mixed_block_flag) { + numbits -= num0; /* num0 * 17 + num1 * 18 */ + } + } + else + { + int scfsi = gr_info->scfsi; + + if(scfsi < 0) { /* scfsi < 0 => granule == 0 */ + numbits = (num0 + num1) * 10 + num0; + } + else { + numbits = 0; + if(!(scfsi & 0x8)) { + numbits += num0 * 6; + } + else { + } + + if(!(scfsi & 0x4)) { + numbits += num0 * 5; + } + else { + } + + if(!(scfsi & 0x2)) { + numbits += num1 * 5; + } + else { + } + + if(!(scfsi & 0x1)) { + numbits += num1 * 5; + } + else { + } + } + } + + return numbits; +} + +extern unsigned n_slen2[]; +extern unsigned i_slen2[]; + +static unsigned rsf_get_scale_factors_2(MP3SideInfo::gr_info_s_t *gr_info) { + unsigned char const* pnt; + int i; + unsigned int slen; + int n = 0; + int numbits = 0; + + slen = n_slen2[gr_info->scalefac_compress]; + + gr_info->preflag = (slen>>15) & 0x1; + + n = 0; + if( gr_info->block_type == 2 ) { + n++; + if(gr_info->mixed_block_flag) + n++; + } + + pnt = stab[n][(slen>>12)&0x7]; + + for(i=0;i<4;i++) { + int num = slen & 0x7; + slen >>= 3; + numbits += pnt[i] * num; + } + + return numbits; +} + +static unsigned getScaleFactorsLength(MP3SideInfo::gr_info_s_t* gr, + Boolean isMPEG2) { + return isMPEG2 ? rsf_get_scale_factors_2(gr) + : rsf_get_scale_factors_1(gr); +} + +static int rsf_huffman_decoder(BitVector& bv, + struct huffcodetab const* h, + int* x, int* y, int* v, int* w); // forward + +void MP3HuffmanDecode(MP3SideInfo::gr_info_s_t* gr, Boolean isMPEG2, + unsigned char const* fromBasePtr, + unsigned fromBitOffset, unsigned fromLength, + unsigned& scaleFactorsLength, + MP3HuffmanEncodingInfo& hei) { + unsigned i; + int x, y, v, w; + struct huffcodetab *h; + BitVector bv((unsigned char*)fromBasePtr, fromBitOffset, fromLength); + + /* Compute the size of the scale factors (& also advance bv): */ + scaleFactorsLength = getScaleFactorsLength(gr, isMPEG2); + bv.skipBits(scaleFactorsLength); + + initialize_huffman(); + + hei.reg1Start = hei.reg2Start = hei.numSamples = 0; + + /* Read bigvalues area. */ + if (gr->big_values < gr->region1start + gr->region2start) { + gr->big_values = gr->region1start + gr->region2start; /* sanity check */ + } + for (i = 0; i < gr->big_values; ++i) { + if (i < gr->region1start) { + /* in region 0 */ + h = &rsf_ht[gr->table_select[0]]; + } else if (i < gr->region2start) { + /* in region 1 */ + h = &rsf_ht[gr->table_select[1]]; + if (hei.reg1Start == 0) { + hei.reg1Start = bv.curBitIndex(); + } + } else { + /* in region 2 */ + h = &rsf_ht[gr->table_select[2]]; + if (hei.reg2Start == 0) { + hei.reg2Start = bv.curBitIndex(); + } + } + + hei.allBitOffsets[i] = bv.curBitIndex(); + rsf_huffman_decoder(bv, h, &x, &y, &v, &w); + if (hei.decodedValues != NULL) { + // Record the decoded values: + unsigned* ptr = &hei.decodedValues[4*i]; + ptr[0] = x; ptr[1] = y; ptr[2] = v; ptr[3] = w; + } + } + hei.bigvalStart = bv.curBitIndex(); + + /* Read count1 area. */ + h = &rsf_ht[gr->count1table_select+32]; + while (bv.curBitIndex() < bv.totNumBits() && i < SSLIMIT*SBLIMIT) { + hei.allBitOffsets[i] = bv.curBitIndex(); + rsf_huffman_decoder(bv, h, &x, &y, &v, &w); + if (hei.decodedValues != NULL) { + // Record the decoded values: + unsigned* ptr = &hei.decodedValues[4*i]; + ptr[0] = x; ptr[1] = y; ptr[2] = v; ptr[3] = w; + } + ++i; + } + + hei.allBitOffsets[i] = bv.curBitIndex(); + hei.numSamples = i; +} + +HUFFBITS dmask = 1 << (SIZEOF_HUFFBITS*8-1); +unsigned int hs = SIZEOF_HUFFBITS*8; + +/* do the huffman-decoding */ +static int rsf_huffman_decoder(BitVector& bv, + struct huffcodetab const* h, // ptr to huffman code record + /* unsigned */ int *x, // returns decoded x value + /* unsigned */ int *y, // returns decoded y value + int* v, int* w) { + HUFFBITS level; + unsigned point = 0; + int error = 1; + level = dmask; + *x = *y = *v = *w = 0; + if (h->val == NULL) return 2; + + /* table 0 needs no bits */ + if (h->treelen == 0) return 0; + + /* Lookup in Huffman table. */ + + do { + if (h->val[point][0]==0) { /*end of tree*/ + *x = h->val[point][1] >> 4; + *y = h->val[point][1] & 0xf; + + error = 0; + break; + } + if (bv.get1Bit()) { + while (h->val[point][1] >= MXOFF) point += h->val[point][1]; + point += h->val[point][1]; + } + else { + while (h->val[point][0] >= MXOFF) point += h->val[point][0]; + point += h->val[point][0]; + } + level >>= 1; + } while (level || (point < h->treelen) ); +///// } while (level || (point < rsf_ht->treelen) ); + + /* Check for error. */ + + if (error) { /* set x and y to a medium value as a simple concealment */ + printf("Illegal Huffman code in data.\n"); + *x = ((h->xlen-1) << 1); + *y = ((h->ylen-1) << 1); + } + + /* Process sign encodings for quadruples tables. */ + + if (h->tablename[0] == '3' + && (h->tablename[1] == '2' || h->tablename[1] == '3')) { + *v = (*y>>3) & 1; + *w = (*y>>2) & 1; + *x = (*y>>1) & 1; + *y = *y & 1; + + if (*v) + if (bv.get1Bit() == 1) *v = -*v; + if (*w) + if (bv.get1Bit() == 1) *w = -*w; + if (*x) + if (bv.get1Bit() == 1) *x = -*x; + if (*y) + if (bv.get1Bit() == 1) *y = -*y; + } + + /* Process sign and escape encodings for dual tables. */ + + else { + if (h->linbits) + if ((h->xlen-1) == (unsigned)*x) + *x += bv.getBits(h->linbits); + if (*x) + if (bv.get1Bit() == 1) *x = -*x; + if (h->linbits) + if ((h->ylen-1) == (unsigned)*y) + *y += bv.getBits(h->linbits); + if (*y) + if (bv.get1Bit() == 1) *y = -*y; + } + + return error; +} + +#ifdef DO_HUFFMAN_ENCODING +inline int getNextSample(unsigned char const*& fromPtr) { + int sample +#ifdef FOUR_BYTE_SAMPLES + = (fromPtr[0]<<24) | (fromPtr[1]<<16) | (fromPtr[2]<<8) | fromPtr[3]; +#else +#ifdef TWO_BYTE_SAMPLES + = (fromPtr[0]<<8) | fromPtr[1]; +#else + // ONE_BYTE_SAMPLES + = fromPtr[0]; +#endif +#endif + fromPtr += BYTES_PER_SAMPLE_VALUE; + return sample; +} + +static void rsf_huffman_encoder(BitVector& bv, + struct huffcodetab* h, + int x, int y, int v, int w); // forward + +unsigned MP3HuffmanEncode(MP3SideInfo::gr_info_s_t const* gr, + unsigned char const* fromPtr, + unsigned char* toPtr, unsigned toBitOffset, + unsigned numHuffBits) { + unsigned i; + struct huffcodetab *h; + int x, y, v, w; + BitVector bv(toPtr, toBitOffset, numHuffBits); + + initialize_huffman(); + + // Encode big_values area: + unsigned big_values = gr->big_values; + if (big_values < gr->region1start + gr->region2start) { + big_values = gr->region1start + gr->region2start; /* sanity check */ + } + for (i = 0; i < big_values; ++i) { + if (i < gr->region1start) { + /* in region 0 */ + h = &rsf_ht[gr->table_select[0]]; + } else if (i < gr->region2start) { + /* in region 1 */ + h = &rsf_ht[gr->table_select[1]]; + } else { + /* in region 2 */ + h = &rsf_ht[gr->table_select[2]]; + } + + x = getNextSample(fromPtr); + y = getNextSample(fromPtr); + v = getNextSample(fromPtr); + w = getNextSample(fromPtr); + rsf_huffman_encoder(bv, h, x, y, v, w); + } + + // Encode count1 area: + h = &rsf_ht[gr->count1table_select+32]; + while (bv.curBitIndex() < bv.totNumBits() && i < SSLIMIT*SBLIMIT) { + x = getNextSample(fromPtr); + y = getNextSample(fromPtr); + v = getNextSample(fromPtr); + w = getNextSample(fromPtr); + rsf_huffman_encoder(bv, h, x, y, v, w); + ++i; + } + + return i; +} + +static Boolean lookupHuffmanTableEntry(struct huffcodetab const* h, + HUFFBITS bits, unsigned bitsLength, + unsigned char& xy) { + unsigned point = 0; + unsigned mask = 1; + unsigned numBitsTestedSoFar = 0; + do { + if (h->val[point][0]==0) { // end of tree + xy = h->val[point][1]; + if (h->hlen[xy] == 0) { // this entry hasn't already been used + h->table[xy] = bits; + h->hlen[xy] = bitsLength; + return True; + } else { // this entry has already been seen + return False; + } + } + + if (numBitsTestedSoFar++ == bitsLength) { + // We don't yet have enough bits for this prefix + return False; + } + if (bits&mask) { + while (h->val[point][1] >= MXOFF) point += h->val[point][1]; + point += h->val[point][1]; + } else { + while (h->val[point][0] >= MXOFF) point += h->val[point][0]; + point += h->val[point][0]; + } + mask <<= 1; + } while (mask || (point < h->treelen)); + + return False; +} + +static void buildHuffmanEncodingTable(struct huffcodetab* h) { + h->table = new unsigned long[256]; + h->hlen = new unsigned char[256]; + if (h->table == NULL || h->hlen == NULL) { h->table = NULL; return; } + for (unsigned i = 0; i < 256; ++i) { + h->table[i] = 0; h->hlen[i] = 0; + } + + // Look up entries for each possible bit sequence length: + unsigned maxNumEntries = h->xlen * h->ylen; + unsigned numEntries = 0; + unsigned powerOf2 = 1; + for (unsigned bitsLength = 1; + bitsLength <= 8*SIZEOF_HUFFBITS; ++bitsLength) { + powerOf2 *= 2; + for (HUFFBITS bits = 0; bits < powerOf2; ++bits) { + // Find the table value - if any - for 'bits' (length 'bitsLength'): + unsigned char xy; + if (lookupHuffmanTableEntry(h, bits, bitsLength, xy)) { + ++numEntries; + if (numEntries == maxNumEntries) return; // we're done + } + } + } +#ifdef DEBUG + fprintf(stderr, "Didn't find enough entries!\n"); // shouldn't happen +#endif +} + +static void lookupXYandPutBits(BitVector& bv, struct huffcodetab const* h, + unsigned char xy) { + HUFFBITS bits = h->table[xy]; + unsigned bitsLength = h->hlen[xy]; + + // Note that "bits" is in reverse order, so read them from right-to-left: + while (bitsLength-- > 0) { + bv.put1Bit(bits&0x00000001); + bits >>= 1; + } +} + +static void putLinbits(BitVector& bv, struct huffcodetab const* h, + HUFFBITS bits) { + bv.putBits(bits, h->linbits); +} + +static void rsf_huffman_encoder(BitVector& bv, + struct huffcodetab* h, + int x, int y, int v, int w) { + if (h->val == NULL) return; + + /* table 0 produces no bits */ + if (h->treelen == 0) return; + + if (h->table == NULL) { + // We haven't yet built the encoding array for this table; do it now: + buildHuffmanEncodingTable(h); + if (h->table == NULL) return; + } + + Boolean xIsNeg = False, yIsNeg = False, vIsNeg = False, wIsNeg = False; + unsigned char xy; + +#ifdef FOUR_BYTE_SAMPLES +#else +#ifdef TWO_BYTE_SAMPLES + // Convert 2-byte negative numbers to their 4-byte equivalents: + if (x&0x8000) x |= 0xFFFF0000; + if (y&0x8000) y |= 0xFFFF0000; + if (v&0x8000) v |= 0xFFFF0000; + if (w&0x8000) w |= 0xFFFF0000; +#else + // ONE_BYTE_SAMPLES + // Convert 1-byte negative numbers to their 4-byte equivalents: + if (x&0x80) x |= 0xFFFFFF00; + if (y&0x80) y |= 0xFFFFFF00; + if (v&0x80) v |= 0xFFFFFF00; + if (w&0x80) w |= 0xFFFFFF00; +#endif +#endif + + if (h->tablename[0] == '3' + && (h->tablename[1] == '2' || h->tablename[1] == '3')) {// quad tables + if (x < 0) { xIsNeg = True; x = -x; } + if (y < 0) { yIsNeg = True; y = -y; } + if (v < 0) { vIsNeg = True; v = -v; } + if (w < 0) { wIsNeg = True; w = -w; } + + // Sanity check: x,y,v,w must all be 0 or 1: + if (x>1 || y>1 || v>1 || w>1) { +#ifdef DEBUG + fprintf(stderr, "rsf_huffman_encoder quad sanity check fails: %x,%x,%x,%x\n", x, y, v, w); +#endif + } + + xy = (v<<3)|(w<<2)|(x<<1)|y; + lookupXYandPutBits(bv, h, xy); + + if (v) bv.put1Bit(vIsNeg); + if (w) bv.put1Bit(wIsNeg); + if (x) bv.put1Bit(xIsNeg); + if (y) bv.put1Bit(yIsNeg); + } else { // dual tables + // Sanity check: v and w must be 0: + if (v != 0 || w != 0) { +#ifdef DEBUG + fprintf(stderr, "rsf_huffman_encoder dual sanity check 1 fails: %x,%x,%x,%x\n", x, y, v, w); +#endif + } + + if (x < 0) { xIsNeg = True; x = -x; } + if (y < 0) { yIsNeg = True; y = -y; } + + // Sanity check: x and y must be <= 255: + if (x > 255 || y > 255) { +#ifdef DEBUG + fprintf(stderr, "rsf_huffman_encoder dual sanity check 2 fails: %x,%x,%x,%x\n", x, y, v, w); +#endif + } + + int xl1 = h->xlen-1; + int yl1 = h->ylen-1; + unsigned linbitsX = 0; unsigned linbitsY = 0; + + if (((x < xl1) || (xl1 == 0)) && (y < yl1)) { + // normal case + xy = (x<<4)|y; + lookupXYandPutBits(bv, h, xy); + if (x) bv.put1Bit(xIsNeg); + if (y) bv.put1Bit(yIsNeg); + } else if (x >= xl1) { + linbitsX = (unsigned)(x - xl1); + if (linbitsX > h->linmax) { +#ifdef DEBUG + fprintf(stderr,"warning: Huffman X table overflow\n"); +#endif + linbitsX = h->linmax; + }; + + if (y >= yl1) { + xy = (xl1<<4)|yl1; + lookupXYandPutBits(bv, h, xy); + linbitsY = (unsigned)(y - yl1); + if (linbitsY > h->linmax) { +#ifdef DEBUG + fprintf(stderr,"warning: Huffman Y table overflow\n"); +#endif + linbitsY = h->linmax; + }; + + if (h->linbits) putLinbits(bv, h, linbitsX); + if (x) bv.put1Bit(xIsNeg); + if (h->linbits) putLinbits(bv, h, linbitsY); + if (y) bv.put1Bit(yIsNeg); + } else { /* x >= h->xlen, y < h->ylen */ + xy = (xl1<<4)|y; + lookupXYandPutBits(bv, h, xy); + if (h->linbits) putLinbits(bv, h, linbitsX); + if (x) bv.put1Bit(xIsNeg); + if (y) bv.put1Bit(yIsNeg); + } + } else { /* ((x < h->xlen) && (y >= h->ylen)) */ + xy = (x<<4)|yl1; + lookupXYandPutBits(bv, h, xy); + linbitsY = y-yl1; + if (linbitsY > h->linmax) { +#ifdef DEBUG + fprintf(stderr,"warning: Huffman Y table overflow\n"); +#endif + linbitsY = h->linmax; + }; + if (x) bv.put1Bit(xIsNeg); + if (h->linbits) putLinbits(bv, h, linbitsY); + if (y) bv.put1Bit(yIsNeg); + } + } +} +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/MP3InternalsHuffman.hh b/AnyCore/lib_rtsp/liveMedia/MP3InternalsHuffman.hh new file mode 100644 index 0000000..926cfab --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3InternalsHuffman.hh @@ -0,0 +1,82 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MP3 internal implementation details (Huffman encoding) +// C++ header + +#ifndef _MP3_INTERNALS_HUFFMAN_HH +#define _MP3_INTERNALS_HUFFMAN_HH + +#ifndef _MP3_INTERNALS_HH +#include "MP3Internals.hh" +#endif + +void updateSideInfoForHuffman(MP3SideInfo& sideInfo, Boolean isMPEG2, + unsigned char const* mainDataPtr, + unsigned p23L0, unsigned p23L1, + unsigned& part23Length0a, + unsigned& part23Length0aTruncation, + unsigned& part23Length0b, + unsigned& part23Length0bTruncation, + unsigned& part23Length1a, + unsigned& part23Length1aTruncation, + unsigned& part23Length1b, + unsigned& part23Length1bTruncation); + +#define SSLIMIT 18 + +class MP3HuffmanEncodingInfo { +public: + MP3HuffmanEncodingInfo(Boolean includeDecodedValues = False); + ~MP3HuffmanEncodingInfo(); + +public: + unsigned numSamples; + unsigned allBitOffsets[SBLIMIT*SSLIMIT + 1]; + unsigned reg1Start, reg2Start, bigvalStart; /* special bit offsets */ + unsigned* decodedValues; +}; + +/* forward */ +void MP3HuffmanDecode(MP3SideInfo::gr_info_s_t* gr, Boolean isMPEG2, + unsigned char const* fromBasePtr, + unsigned fromBitOffset, unsigned fromLength, + unsigned& scaleFactorsLength, + MP3HuffmanEncodingInfo& hei); + +extern unsigned char huffdec[]; // huffman table data + +// The following are used if we process Huffman-decoded values +#ifdef FOUR_BYTE_SAMPLES +#define BYTES_PER_SAMPLE_VALUE 4 +#else +#ifdef TWO_BYTE_SAMPLES +#define BYTES_PER_SAMPLE_VALUE 2 +#else +// ONE_BYTE_SAMPLES +#define BYTES_PER_SAMPLE_VALUE 1 +#endif +#endif + +#ifdef DO_HUFFMAN_ENCODING +unsigned MP3HuffmanEncode(MP3SideInfo::gr_info_s_t const* gr, + unsigned char const* fromPtr, + unsigned char* toPtr, unsigned toBitOffset, + unsigned numHuffBits); +#endif + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/MP3InternalsHuffmanTable.cpp b/AnyCore/lib_rtsp/liveMedia/MP3InternalsHuffmanTable.cpp new file mode 100644 index 0000000..8ba869d --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3InternalsHuffmanTable.cpp @@ -0,0 +1,1548 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MP3 internal implementation details (Huffman encoding) +// Table + +#include "MP3InternalsHuffman.hh" + +unsigned char huffdec[] = { +0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x20, 0x30, 0x20, 0x20, +0x20, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x0a, +0x2e, 0x74, 0x72, 0x65, 0x65, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x0a, 0x2e, +0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x20, 0x31, 0x20, 0x20, 0x20, 0x37, +0x20, 0x20, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x30, 0x0a, 0x2e, 0x74, +0x72, 0x65, 0x65, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, +0x31, 0x20, 0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x37, 0x20, 0x20, 0x33, 0x20, 0x20, 0x33, 0x20, +0x20, 0x30, 0x0a, 0x2e, 0x74, 0x72, 0x65, 0x65, 0x64, 0x61, 0x74, 0x61, +0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x31, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x31, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x32, 0x30, 0x20, 0x20, 0x30, 0x20, 0x32, 0x31, +0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, +0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, +0x32, 0x20, 0x20, 0x30, 0x20, 0x32, 0x32, 0x20, 0x0a, 0x0a, 0x2e, 0x74, +0x61, 0x62, 0x6c, 0x65, 0x20, 0x20, 0x33, 0x20, 0x20, 0x31, 0x37, 0x20, +0x20, 0x33, 0x20, 0x20, 0x33, 0x20, 0x20, 0x30, 0x0a, 0x2e, 0x74, 0x72, +0x65, 0x65, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, +0x20, 0x20, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x31, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x31, 0x30, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x30, +0x20, 0x20, 0x30, 0x20, 0x32, 0x31, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, +0x32, 0x20, 0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x20, +0x34, 0x20, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x20, +0x20, 0x30, 0x0a, 0x2e, 0x74, 0x72, 0x65, 0x65, 0x64, 0x61, 0x74, 0x61, +0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x20, 0x35, 0x20, +0x20, 0x33, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x34, 0x20, 0x20, 0x30, +0x0a, 0x2e, 0x74, 0x72, 0x65, 0x65, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x31, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x31, 0x20, 0x20, +0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x30, 0x20, 0x0a, +0x20, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x32, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x32, 0x20, +0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x32, 0x20, +0x20, 0x30, 0x20, 0x33, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x20, 0x33, 0x20, 0x20, 0x30, 0x20, 0x31, 0x33, 0x20, +0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x32, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x33, +0x20, 0x20, 0x30, 0x20, 0x33, 0x33, 0x20, 0x0a, 0x0a, 0x2e, 0x74, 0x61, +0x62, 0x6c, 0x65, 0x20, 0x20, 0x36, 0x20, 0x20, 0x33, 0x31, 0x20, 0x20, +0x34, 0x20, 0x20, 0x34, 0x20, 0x20, 0x30, 0x0a, 0x2e, 0x74, 0x72, 0x65, +0x65, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x20, 0x31, 0x30, 0x20, +0x20, 0x30, 0x20, 0x31, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x30, 0x20, +0x20, 0x30, 0x20, 0x32, 0x31, 0x20, 0x0a, 0x20, 0x36, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x32, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x32, +0x20, 0x20, 0x30, 0x20, 0x32, 0x32, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x31, +0x20, 0x20, 0x30, 0x20, 0x31, 0x33, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x33, +0x30, 0x20, 0x20, 0x30, 0x20, 0x33, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x33, 0x20, 0x20, 0x30, 0x20, 0x33, +0x33, 0x20, 0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x20, +0x37, 0x20, 0x20, 0x37, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x36, 0x20, +0x20, 0x30, 0x0a, 0x2e, 0x74, 0x72, 0x65, 0x65, 0x64, 0x61, 0x74, 0x61, +0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x31, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x31, +0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x31, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x30, +0x20, 0x0a, 0x20, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, +0x31, 0x20, 0x31, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, +0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, +0x32, 0x20, 0x20, 0x30, 0x20, 0x33, 0x30, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, +0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x31, 0x33, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x20, 0x33, 0x20, 0x20, 0x30, 0x20, 0x33, 0x32, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x33, 0x20, 0x20, 0x30, 0x20, +0x20, 0x34, 0x20, 0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x34, 0x30, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x34, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x34, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x32, 0x20, 0x20, 0x30, +0x20, 0x32, 0x34, 0x20, 0x20, 0x63, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, +0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x33, 0x20, 0x20, 0x30, +0x20, 0x34, 0x33, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x35, 0x30, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x33, 0x34, 0x20, 0x20, 0x30, 0x20, 0x20, 0x35, 0x20, 0x20, +0x30, 0x20, 0x35, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x35, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x32, 0x20, 0x20, +0x30, 0x20, 0x32, 0x35, 0x20, 0x0a, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x34, 0x20, +0x20, 0x30, 0x20, 0x33, 0x35, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x33, 0x20, +0x20, 0x30, 0x20, 0x35, 0x34, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x34, 0x35, 0x20, 0x20, 0x30, 0x20, 0x35, 0x35, 0x20, +0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x20, 0x38, 0x20, +0x20, 0x37, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x36, 0x20, 0x20, 0x30, +0x0a, 0x2e, 0x74, 0x72, 0x65, 0x65, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, +0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x31, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x31, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x32, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x32, 0x20, 0x0a, +0x20, 0x65, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x30, 0x20, +0x20, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x32, 0x32, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x30, 0x20, +0x20, 0x30, 0x20, 0x20, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x0a, 0x20, 0x30, 0x20, 0x33, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x33, +0x20, 0x20, 0x65, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x33, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, 0x33, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x30, +0x20, 0x20, 0x30, 0x20, 0x20, 0x34, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x0a, 0x20, 0x30, 0x20, 0x34, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x34, 0x20, 0x20, 0x30, 0x20, 0x34, +0x32, 0x20, 0x20, 0x63, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, +0x34, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, +0x33, 0x20, 0x20, 0x30, 0x20, 0x35, 0x30, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x34, 0x33, 0x20, 0x20, 0x30, 0x20, 0x33, 0x34, 0x20, 0x20, 0x30, 0x20, +0x35, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x35, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x35, 0x20, 0x20, 0x30, 0x20, +0x35, 0x32, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x32, 0x35, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x34, 0x20, 0x20, 0x30, +0x20, 0x33, 0x35, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x35, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x34, 0x35, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x35, 0x34, 0x20, 0x20, 0x30, 0x20, 0x35, 0x35, 0x20, 0x0a, 0x0a, +0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x20, 0x39, 0x20, 0x20, 0x37, +0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x36, 0x20, 0x20, 0x30, 0x0a, 0x2e, +0x74, 0x72, 0x65, 0x65, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x38, 0x20, +0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x20, +0x31, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x31, 0x20, 0x20, 0x61, 0x20, +0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x30, 0x20, 0x0a, 0x20, 0x30, +0x20, 0x32, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x31, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x20, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, 0x32, 0x20, 0x20, 0x63, +0x20, 0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x33, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x33, 0x20, 0x0a, 0x20, +0x30, 0x20, 0x33, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x31, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x33, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, 0x33, 0x20, 0x20, +0x63, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x31, 0x20, 0x20, +0x30, 0x20, 0x31, 0x34, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x0a, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x30, 0x20, +0x20, 0x30, 0x20, 0x33, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x34, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, 0x34, 0x20, +0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x20, 0x34, 0x20, 0x20, 0x30, 0x20, 0x35, 0x30, 0x20, +0x0a, 0x20, 0x30, 0x20, 0x34, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x33, 0x34, 0x20, 0x20, 0x30, 0x20, 0x35, 0x31, +0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x35, +0x20, 0x20, 0x30, 0x20, 0x35, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x32, 0x35, 0x20, 0x20, 0x30, 0x20, 0x34, 0x34, +0x20, 0x0a, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, +0x35, 0x20, 0x20, 0x30, 0x20, 0x35, 0x34, 0x20, 0x20, 0x30, 0x20, 0x35, +0x33, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, +0x35, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, +0x35, 0x20, 0x20, 0x30, 0x20, 0x35, 0x35, 0x20, 0x0a, 0x0a, 0x2e, 0x74, +0x61, 0x62, 0x6c, 0x65, 0x20, 0x31, 0x30, 0x20, 0x31, 0x32, 0x37, 0x20, +0x20, 0x38, 0x20, 0x20, 0x38, 0x20, 0x20, 0x30, 0x0a, 0x2e, 0x74, 0x72, +0x65, 0x65, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x30, +0x20, 0x20, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x61, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x31, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x32, 0x30, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x20, +0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, +0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x32, 0x20, 0x31, 0x63, 0x20, 0x20, +0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, +0x32, 0x20, 0x20, 0x30, 0x20, 0x33, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, +0x31, 0x33, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x20, 0x33, 0x20, 0x20, 0x30, 0x20, 0x33, 0x32, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x33, 0x20, 0x20, 0x30, 0x20, +0x34, 0x30, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x31, 0x20, 0x0a, 0x20, 0x30, +0x20, 0x31, 0x34, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x34, 0x20, 0x20, 0x30, +0x20, 0x33, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x34, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, 0x34, 0x20, 0x31, 0x63, +0x20, 0x20, 0x31, 0x20, 0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, +0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x30, 0x20, 0x20, +0x30, 0x20, 0x20, 0x35, 0x20, 0x20, 0x30, 0x20, 0x36, 0x30, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, 0x31, 0x20, 0x20, +0x30, 0x20, 0x31, 0x36, 0x20, 0x20, 0x63, 0x20, 0x20, 0x31, 0x20, 0x20, +0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x33, 0x20, 0x0a, +0x20, 0x30, 0x20, 0x33, 0x34, 0x20, 0x20, 0x30, 0x20, 0x35, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x35, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x32, 0x20, +0x20, 0x30, 0x20, 0x32, 0x35, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x36, 0x20, +0x20, 0x30, 0x20, 0x33, 0x36, 0x20, 0x20, 0x30, 0x20, 0x37, 0x31, 0x20, +0x0a, 0x31, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x37, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x34, 0x34, 0x20, 0x20, 0x30, 0x20, 0x35, 0x33, +0x20, 0x20, 0x30, 0x20, 0x20, 0x36, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x0a, 0x20, 0x30, 0x20, 0x33, 0x35, 0x20, 0x20, 0x30, 0x20, 0x34, +0x35, 0x20, 0x20, 0x30, 0x20, 0x36, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x37, 0x20, 0x20, 0x30, 0x20, 0x36, +0x34, 0x20, 0x20, 0x65, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, +0x32, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x32, 0x37, 0x20, 0x20, 0x36, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x36, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x35, 0x34, 0x20, 0x20, 0x30, 0x20, 0x35, 0x35, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x36, 0x20, 0x20, 0x30, 0x20, +0x37, 0x33, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x33, 0x37, 0x20, 0x20, 0x30, 0x20, 0x36, 0x35, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x36, 0x20, 0x20, 0x30, +0x20, 0x37, 0x34, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x37, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, 0x36, 0x20, 0x20, 0x30, +0x20, 0x37, 0x35, 0x20, 0x0a, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x37, 0x20, 0x20, +0x30, 0x20, 0x37, 0x36, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x36, 0x37, 0x20, 0x20, 0x30, 0x20, 0x37, 0x37, 0x20, 0x0a, +0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x31, 0x31, 0x20, 0x31, +0x32, 0x37, 0x20, 0x20, 0x38, 0x20, 0x20, 0x38, 0x20, 0x20, 0x30, 0x0a, +0x2e, 0x74, 0x72, 0x65, 0x65, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x36, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x20, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x31, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x31, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x30, 0x20, 0x0a, 0x20, +0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x30, 0x20, 0x31, 0x32, 0x20, 0x31, +0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x32, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x30, 0x20, 0x20, +0x30, 0x20, 0x20, 0x33, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x0a, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x31, 0x20, +0x20, 0x30, 0x20, 0x31, 0x33, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x32, 0x20, +0x20, 0x30, 0x20, 0x32, 0x33, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x30, 0x20, +0x20, 0x30, 0x20, 0x20, 0x34, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x0a, 0x20, 0x30, 0x20, 0x34, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x34, +0x20, 0x31, 0x65, 0x20, 0x20, 0x31, 0x20, 0x31, 0x30, 0x20, 0x20, 0x31, +0x20, 0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x32, +0x20, 0x20, 0x30, 0x20, 0x32, 0x34, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x33, +0x20, 0x0a, 0x20, 0x30, 0x20, 0x34, 0x33, 0x20, 0x20, 0x30, 0x20, 0x35, +0x30, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x34, 0x20, 0x20, 0x30, 0x20, 0x35, +0x31, 0x20, 0x20, 0x30, 0x20, 0x36, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, +0x36, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, +0x36, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x32, 0x36, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, 0x32, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x35, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x35, 0x20, 0x20, 0x30, 0x20, +0x35, 0x32, 0x20, 0x31, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x61, 0x20, +0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x32, 0x35, 0x20, 0x20, 0x30, 0x20, 0x34, 0x34, 0x20, 0x20, 0x30, +0x20, 0x36, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x36, 0x33, 0x20, 0x20, 0x30, 0x20, 0x33, 0x36, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x37, 0x30, 0x20, 0x20, 0x30, 0x20, 0x31, 0x37, 0x20, 0x20, 0x30, +0x20, 0x37, 0x31, 0x20, 0x0a, 0x31, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, +0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x37, 0x20, 0x20, +0x30, 0x20, 0x36, 0x34, 0x20, 0x20, 0x30, 0x20, 0x37, 0x32, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x37, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x35, 0x33, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x33, 0x35, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x34, 0x20, +0x20, 0x30, 0x20, 0x34, 0x35, 0x20, 0x20, 0x61, 0x20, 0x20, 0x31, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x34, 0x36, 0x20, 0x20, 0x30, 0x20, 0x37, 0x33, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x37, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x36, 0x35, +0x20, 0x20, 0x30, 0x20, 0x35, 0x36, 0x20, 0x20, 0x61, 0x20, 0x20, 0x31, +0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x35, +0x20, 0x20, 0x30, 0x20, 0x35, 0x37, 0x20, 0x20, 0x30, 0x20, 0x37, 0x34, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x37, +0x20, 0x20, 0x30, 0x20, 0x36, 0x36, 0x20, 0x0a, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, +0x35, 0x20, 0x20, 0x30, 0x20, 0x37, 0x36, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x36, 0x37, 0x20, 0x20, 0x30, 0x20, 0x37, +0x37, 0x20, 0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x31, +0x32, 0x20, 0x31, 0x32, 0x37, 0x20, 0x20, 0x38, 0x20, 0x20, 0x38, 0x20, +0x20, 0x30, 0x0a, 0x2e, 0x74, 0x72, 0x65, 0x65, 0x64, 0x61, 0x74, 0x61, +0x0a, 0x20, 0x63, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x30, +0x20, 0x20, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x31, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x32, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x32, +0x20, 0x0a, 0x31, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, +0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x32, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, +0x32, 0x20, 0x20, 0x30, 0x20, 0x33, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x33, 0x30, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x33, 0x20, 0x20, 0x30, 0x20, +0x34, 0x30, 0x20, 0x31, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, +0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x32, 0x20, 0x20, 0x30, 0x20, +0x32, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x34, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x33, 0x33, 0x20, 0x20, 0x61, +0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x34, 0x20, 0x20, 0x30, +0x20, 0x34, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x32, 0x34, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x20, 0x34, 0x20, 0x20, 0x30, 0x20, 0x35, 0x30, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x34, 0x33, 0x20, 0x20, 0x30, 0x20, 0x33, 0x34, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x31, 0x20, 0x20, +0x30, 0x20, 0x31, 0x35, 0x20, 0x31, 0x63, 0x20, 0x20, 0x31, 0x20, 0x20, +0x65, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x35, 0x32, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x32, 0x35, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x33, 0x20, +0x20, 0x30, 0x20, 0x33, 0x35, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, 0x30, 0x20, +0x20, 0x30, 0x20, 0x31, 0x36, 0x20, 0x20, 0x30, 0x20, 0x36, 0x31, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x36, 0x32, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x32, 0x36, +0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x35, +0x20, 0x20, 0x30, 0x20, 0x20, 0x36, 0x20, 0x20, 0x30, 0x20, 0x34, 0x34, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x34, +0x20, 0x20, 0x30, 0x20, 0x34, 0x35, 0x20, 0x31, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, +0x33, 0x20, 0x20, 0x30, 0x20, 0x33, 0x36, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, +0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x37, 0x20, 0x20, 0x30, 0x20, 0x37, +0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x37, 0x20, 0x0a, 0x20, 0x30, 0x20, +0x36, 0x34, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x34, 0x36, 0x20, 0x20, 0x30, 0x20, 0x37, 0x32, 0x20, 0x20, 0x61, 0x20, +0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x37, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x35, 0x20, 0x20, 0x30, 0x20, +0x37, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, +0x20, 0x33, 0x37, 0x20, 0x20, 0x30, 0x20, 0x35, 0x36, 0x20, 0x20, 0x38, +0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, 0x35, 0x20, 0x20, 0x30, +0x20, 0x37, 0x34, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x34, 0x37, 0x20, 0x20, 0x30, 0x20, 0x36, 0x36, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, +0x30, 0x20, 0x37, 0x35, 0x20, 0x20, 0x30, 0x20, 0x35, 0x37, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x36, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, 0x37, 0x20, 0x20, +0x30, 0x20, 0x37, 0x37, 0x20, 0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, +0x65, 0x20, 0x31, 0x33, 0x20, 0x35, 0x31, 0x31, 0x20, 0x31, 0x36, 0x20, +0x31, 0x36, 0x20, 0x20, 0x30, 0x0a, 0x2e, 0x74, 0x72, 0x65, 0x65, 0x64, +0x61, 0x74, 0x61, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x20, 0x30, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x30, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x31, 0x31, 0x20, 0x31, 0x63, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, +0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x32, 0x30, 0x20, 0x20, +0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x32, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x32, 0x20, 0x20, +0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x32, 0x20, 0x20, +0x30, 0x20, 0x33, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x20, 0x33, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x33, 0x31, 0x20, +0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x31, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x33, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, 0x33, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x34, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x34, 0x20, +0x20, 0x30, 0x20, 0x34, 0x31, 0x20, 0x0a, 0x34, 0x36, 0x20, 0x20, 0x31, +0x20, 0x31, 0x63, 0x20, 0x20, 0x31, 0x20, 0x20, 0x65, 0x20, 0x20, 0x31, +0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x31, 0x34, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x33, 0x33, 0x20, 0x20, 0x30, 0x20, 0x34, 0x32, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x32, 0x34, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x35, +0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, +0x33, 0x20, 0x20, 0x30, 0x20, 0x33, 0x34, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, +0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x35, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, +0x35, 0x20, 0x20, 0x30, 0x20, 0x35, 0x32, 0x20, 0x0a, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x35, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x34, 0x20, 0x20, 0x30, 0x20, +0x35, 0x33, 0x20, 0x20, 0x65, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, +0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, 0x30, 0x20, 0x20, 0x30, 0x20, +0x20, 0x36, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, +0x20, 0x36, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x36, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x38, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x38, 0x20, 0x20, 0x30, +0x20, 0x38, 0x31, 0x20, 0x31, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, +0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x35, 0x20, 0x0a, 0x20, +0x30, 0x20, 0x36, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x32, 0x36, 0x20, 0x20, 0x30, 0x20, 0x35, 0x34, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x34, 0x35, 0x20, 0x20, 0x30, 0x20, 0x36, 0x33, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x36, 0x20, 0x20, +0x30, 0x20, 0x37, 0x30, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x0a, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x20, 0x37, 0x20, 0x20, 0x30, 0x20, 0x35, 0x35, 0x20, +0x20, 0x30, 0x20, 0x37, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x31, 0x37, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x32, 0x37, 0x20, 0x20, 0x30, 0x20, 0x33, 0x37, 0x20, +0x34, 0x38, 0x20, 0x20, 0x31, 0x20, 0x31, 0x38, 0x20, 0x20, 0x31, 0x20, +0x0a, 0x20, 0x63, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x38, +0x20, 0x20, 0x30, 0x20, 0x38, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x32, 0x38, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, 0x34, +0x20, 0x20, 0x30, 0x20, 0x34, 0x36, 0x20, 0x20, 0x30, 0x20, 0x37, 0x32, +0x20, 0x0a, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x38, +0x34, 0x20, 0x20, 0x30, 0x20, 0x34, 0x38, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, +0x39, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, +0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x39, 0x20, 0x31, 0x38, 0x20, 0x20, +0x31, 0x20, 0x0a, 0x20, 0x65, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, +0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x33, 0x20, 0x20, 0x30, 0x20, +0x36, 0x35, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x35, 0x36, 0x20, 0x20, 0x30, 0x20, 0x37, 0x34, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x34, 0x37, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x36, 0x36, 0x20, 0x20, 0x30, +0x20, 0x38, 0x33, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x38, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x35, 0x20, 0x20, 0x30, +0x20, 0x35, 0x37, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x39, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, 0x39, 0x20, 0x20, 0x65, +0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x36, 0x37, 0x20, 0x20, 0x30, 0x20, 0x38, 0x35, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x38, 0x20, 0x20, +0x30, 0x20, 0x33, 0x39, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x39, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x34, 0x39, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x38, 0x36, 0x20, +0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x61, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x36, 0x38, 0x20, 0x20, 0x30, 0x20, 0x20, 0x61, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x31, 0x20, +0x20, 0x30, 0x20, 0x31, 0x61, 0x20, 0x34, 0x34, 0x20, 0x20, 0x31, 0x20, +0x31, 0x38, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x63, 0x20, 0x20, 0x31, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x61, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, 0x61, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x39, 0x35, 0x20, 0x20, 0x30, 0x20, 0x35, 0x39, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x33, +0x20, 0x20, 0x30, 0x20, 0x33, 0x61, 0x20, 0x0a, 0x20, 0x38, 0x20, 0x20, +0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x61, 0x20, 0x20, 0x30, 0x20, 0x39, +0x36, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x62, +0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x62, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x62, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, +0x62, 0x20, 0x31, 0x34, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x38, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x62, 0x32, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x36, 0x20, 0x20, 0x30, 0x20, +0x37, 0x37, 0x20, 0x20, 0x30, 0x20, 0x39, 0x34, 0x20, 0x20, 0x36, 0x20, +0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x38, 0x37, 0x20, 0x0a, 0x20, 0x30, +0x20, 0x37, 0x38, 0x20, 0x20, 0x30, 0x20, 0x61, 0x34, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x36, 0x39, 0x20, 0x20, 0x30, 0x20, 0x61, 0x35, 0x20, 0x20, 0x30, +0x20, 0x32, 0x62, 0x20, 0x20, 0x63, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, +0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x61, 0x20, 0x0a, 0x20, +0x30, 0x20, 0x38, 0x38, 0x20, 0x20, 0x30, 0x20, 0x62, 0x33, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x62, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x39, 0x20, 0x20, +0x30, 0x20, 0x61, 0x36, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x36, 0x61, 0x20, 0x20, 0x30, 0x20, 0x62, 0x34, 0x20, 0x0a, +0x20, 0x30, 0x20, 0x63, 0x30, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x63, 0x20, +0x20, 0x30, 0x20, 0x39, 0x38, 0x20, 0x20, 0x30, 0x20, 0x63, 0x31, 0x20, +0x33, 0x63, 0x20, 0x20, 0x31, 0x20, 0x31, 0x36, 0x20, 0x20, 0x31, 0x20, +0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x63, 0x20, +0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x38, 0x39, +0x20, 0x20, 0x30, 0x20, 0x62, 0x35, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x35, 0x62, 0x20, 0x20, 0x30, 0x20, 0x63, 0x32, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x32, 0x63, 0x20, 0x20, 0x30, 0x20, 0x33, 0x63, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x0a, 0x20, 0x30, 0x20, 0x62, 0x36, 0x20, 0x20, 0x30, 0x20, 0x36, +0x62, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x63, +0x34, 0x20, 0x20, 0x30, 0x20, 0x34, 0x63, 0x20, 0x31, 0x30, 0x20, 0x20, +0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, +0x38, 0x20, 0x20, 0x30, 0x20, 0x38, 0x61, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x64, 0x30, 0x20, 0x20, 0x30, 0x20, +0x20, 0x64, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x64, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x34, 0x62, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x39, 0x37, 0x20, 0x20, 0x30, 0x20, 0x61, 0x37, 0x20, 0x20, 0x63, 0x20, +0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x63, 0x33, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x61, 0x20, 0x20, 0x30, +0x20, 0x39, 0x39, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x63, 0x35, 0x20, 0x20, 0x30, +0x20, 0x35, 0x63, 0x20, 0x20, 0x30, 0x20, 0x62, 0x37, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x31, 0x64, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x64, 0x32, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x64, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x62, 0x20, 0x20, +0x30, 0x20, 0x64, 0x33, 0x20, 0x33, 0x34, 0x20, 0x20, 0x31, 0x20, 0x31, +0x63, 0x20, 0x20, 0x31, 0x20, 0x20, 0x63, 0x20, 0x20, 0x31, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x33, 0x64, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x63, 0x36, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x36, 0x63, 0x20, 0x20, 0x30, 0x20, 0x61, 0x39, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x61, 0x20, +0x20, 0x30, 0x20, 0x64, 0x34, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x62, 0x38, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x38, 0x62, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x64, +0x20, 0x20, 0x30, 0x20, 0x63, 0x37, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x63, +0x20, 0x20, 0x30, 0x20, 0x64, 0x35, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x35, 0x64, 0x20, 0x20, 0x30, 0x20, 0x65, 0x30, +0x20, 0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x65, +0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x65, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, +0x65, 0x20, 0x20, 0x30, 0x20, 0x32, 0x65, 0x20, 0x20, 0x30, 0x20, 0x65, +0x32, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, +0x65, 0x33, 0x20, 0x20, 0x30, 0x20, 0x36, 0x64, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x38, 0x63, 0x20, 0x20, 0x30, 0x20, +0x65, 0x34, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x65, 0x35, 0x20, 0x20, 0x30, 0x20, +0x62, 0x61, 0x20, 0x20, 0x30, 0x20, 0x66, 0x30, 0x20, 0x32, 0x36, 0x20, +0x20, 0x31, 0x20, 0x31, 0x30, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x66, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x66, 0x20, 0x20, 0x36, +0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x61, 0x20, 0x20, 0x30, +0x20, 0x39, 0x62, 0x20, 0x20, 0x30, 0x20, 0x62, 0x39, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x65, 0x20, 0x0a, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x36, 0x20, 0x20, +0x30, 0x20, 0x63, 0x38, 0x20, 0x20, 0x63, 0x20, 0x20, 0x31, 0x20, 0x20, +0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x34, 0x65, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x64, 0x37, 0x20, 0x20, 0x30, 0x20, 0x37, 0x64, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x62, 0x20, 0x0a, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x65, 0x20, +0x20, 0x30, 0x20, 0x63, 0x39, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x66, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x63, 0x20, +0x20, 0x30, 0x20, 0x36, 0x65, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x66, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, 0x66, 0x20, +0x0a, 0x32, 0x30, 0x20, 0x20, 0x31, 0x20, 0x31, 0x30, 0x20, 0x20, 0x31, +0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x38, +0x20, 0x20, 0x30, 0x20, 0x38, 0x64, 0x20, 0x20, 0x30, 0x20, 0x33, 0x66, +0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x66, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x0a, 0x20, 0x30, 0x20, 0x65, 0x36, 0x20, 0x20, 0x30, 0x20, 0x63, +0x61, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, +0x34, 0x20, 0x20, 0x30, 0x20, 0x34, 0x66, 0x20, 0x20, 0x38, 0x20, 0x20, +0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x62, 0x62, 0x20, 0x20, 0x30, 0x20, 0x61, +0x63, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x65, +0x37, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x66, 0x35, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x64, 0x39, 0x20, 0x20, 0x30, 0x20, 0x39, 0x64, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x66, 0x20, 0x20, 0x30, 0x20, +0x65, 0x38, 0x20, 0x31, 0x65, 0x20, 0x20, 0x31, 0x20, 0x20, 0x63, 0x20, +0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x36, 0x66, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, 0x36, 0x20, 0x20, 0x30, +0x20, 0x63, 0x62, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x62, 0x63, 0x20, 0x20, 0x30, +0x20, 0x61, 0x64, 0x20, 0x20, 0x30, 0x20, 0x64, 0x61, 0x20, 0x20, 0x38, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x66, 0x37, 0x20, 0x0a, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x65, 0x20, 0x20, +0x30, 0x20, 0x37, 0x66, 0x20, 0x20, 0x30, 0x20, 0x38, 0x65, 0x20, 0x20, +0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x65, 0x20, 0x20, +0x30, 0x20, 0x61, 0x65, 0x20, 0x20, 0x30, 0x20, 0x63, 0x63, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x66, 0x38, 0x20, +0x20, 0x30, 0x20, 0x38, 0x66, 0x20, 0x31, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x62, 0x20, +0x20, 0x30, 0x20, 0x62, 0x64, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x65, 0x61, 0x20, 0x20, 0x30, 0x20, 0x66, 0x39, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x39, 0x66, 0x20, 0x20, 0x30, 0x20, 0x65, 0x62, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x62, 0x65, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x63, 0x64, +0x20, 0x20, 0x30, 0x20, 0x66, 0x61, 0x20, 0x20, 0x65, 0x20, 0x20, 0x31, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x64, 0x64, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x65, +0x63, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x65, +0x39, 0x20, 0x20, 0x30, 0x20, 0x61, 0x66, 0x20, 0x20, 0x30, 0x20, 0x64, +0x63, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x63, +0x65, 0x20, 0x20, 0x30, 0x20, 0x66, 0x62, 0x20, 0x20, 0x38, 0x20, 0x20, +0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x62, 0x66, 0x20, 0x20, 0x30, 0x20, +0x64, 0x65, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x63, 0x66, 0x20, 0x20, 0x30, 0x20, 0x65, 0x65, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x64, 0x66, 0x20, 0x20, 0x30, 0x20, 0x65, 0x66, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, 0x66, 0x20, 0x0a, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x65, 0x64, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, 0x64, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, 0x63, 0x20, 0x20, 0x30, +0x20, 0x66, 0x65, 0x20, 0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, +0x20, 0x31, 0x34, 0x20, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, +0x30, 0x20, 0x20, 0x30, 0x0a, 0x2e, 0x74, 0x72, 0x65, 0x65, 0x64, 0x61, +0x74, 0x61, 0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x31, +0x35, 0x20, 0x35, 0x31, 0x31, 0x20, 0x31, 0x36, 0x20, 0x31, 0x36, 0x20, +0x20, 0x30, 0x0a, 0x2e, 0x74, 0x72, 0x65, 0x65, 0x64, 0x61, 0x74, 0x61, +0x0a, 0x31, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x30, +0x20, 0x20, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x31, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x30, +0x20, 0x0a, 0x20, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, +0x32, 0x20, 0x33, 0x32, 0x20, 0x20, 0x31, 0x20, 0x31, 0x30, 0x20, 0x20, +0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x30, 0x20, 0x20, 0x30, 0x20, 0x33, +0x31, 0x20, 0x0a, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x33, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x33, 0x20, 0x20, 0x30, 0x20, +0x34, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x33, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, 0x33, 0x20, 0x20, 0x65, 0x20, +0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x20, 0x34, 0x20, 0x20, 0x30, 0x20, 0x31, 0x34, 0x20, 0x20, 0x30, +0x20, 0x34, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x33, 0x20, 0x20, 0x30, +0x20, 0x34, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x32, 0x34, 0x20, 0x20, 0x30, 0x20, 0x34, 0x33, 0x20, 0x20, 0x61, +0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x34, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x30, 0x20, 0x20, +0x30, 0x20, 0x20, 0x35, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x35, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x35, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x35, 0x32, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x32, 0x35, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x34, 0x34, 0x20, 0x20, 0x30, 0x20, 0x35, 0x33, 0x20, +0x20, 0x30, 0x20, 0x36, 0x31, 0x20, 0x35, 0x61, 0x20, 0x20, 0x31, 0x20, +0x32, 0x34, 0x20, 0x20, 0x31, 0x20, 0x31, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x33, 0x35, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, 0x30, +0x20, 0x20, 0x30, 0x20, 0x20, 0x36, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x31, 0x36, 0x20, 0x20, 0x30, 0x20, 0x36, 0x32, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x32, 0x36, 0x20, 0x20, 0x30, 0x20, 0x35, 0x34, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x34, +0x35, 0x20, 0x20, 0x30, 0x20, 0x36, 0x33, 0x20, 0x20, 0x61, 0x20, 0x20, +0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x36, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, +0x37, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, +0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x35, 0x20, 0x0a, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x31, 0x37, 0x20, 0x20, 0x30, 0x20, 0x36, 0x34, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x32, 0x20, 0x20, 0x30, 0x20, +0x32, 0x37, 0x20, 0x31, 0x38, 0x20, 0x20, 0x31, 0x20, 0x31, 0x30, 0x20, +0x20, 0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, +0x20, 0x34, 0x36, 0x20, 0x20, 0x30, 0x20, 0x37, 0x33, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x37, 0x20, 0x20, 0x30, +0x20, 0x36, 0x35, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x36, 0x20, 0x20, 0x30, +0x20, 0x38, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x20, 0x38, 0x20, 0x20, 0x30, 0x20, 0x37, 0x34, 0x20, 0x0a, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x38, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x38, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x38, 0x32, 0x20, 0x20, +0x30, 0x20, 0x32, 0x38, 0x20, 0x31, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, +0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x37, 0x20, 0x0a, +0x20, 0x30, 0x20, 0x36, 0x36, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x38, 0x33, 0x20, 0x20, 0x30, 0x20, 0x33, 0x38, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x37, 0x35, 0x20, 0x20, 0x30, 0x20, 0x35, 0x37, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x38, 0x34, 0x20, +0x20, 0x30, 0x20, 0x34, 0x38, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, +0x0a, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x39, 0x30, 0x20, 0x20, 0x30, 0x20, 0x31, 0x39, +0x20, 0x20, 0x30, 0x20, 0x39, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x32, +0x20, 0x20, 0x30, 0x20, 0x37, 0x36, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x36, 0x37, 0x20, 0x20, 0x30, 0x20, 0x32, 0x39, +0x20, 0x0a, 0x35, 0x63, 0x20, 0x20, 0x31, 0x20, 0x32, 0x34, 0x20, 0x20, +0x31, 0x20, 0x31, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x61, 0x20, 0x20, +0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x38, 0x35, 0x20, 0x20, 0x30, 0x20, 0x35, +0x38, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x39, 0x20, 0x20, 0x30, 0x20, 0x37, +0x37, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x39, 0x33, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x33, 0x39, 0x20, 0x20, 0x30, 0x20, 0x39, 0x34, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x39, 0x20, 0x20, 0x30, 0x20, +0x38, 0x36, 0x20, 0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x36, 0x38, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x61, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x61, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x31, 0x20, 0x20, 0x30, +0x20, 0x31, 0x61, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x32, 0x20, 0x20, 0x30, +0x20, 0x32, 0x61, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x39, 0x35, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x35, 0x39, 0x20, 0x31, +0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x65, 0x20, 0x20, 0x31, 0x20, 0x20, +0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x61, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x33, 0x61, 0x20, 0x20, 0x30, 0x20, 0x38, 0x37, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x37, 0x38, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x61, 0x34, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x61, 0x20, +0x20, 0x30, 0x20, 0x39, 0x36, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x36, 0x39, 0x20, 0x20, 0x30, 0x20, 0x62, 0x30, 0x20, +0x20, 0x30, 0x20, 0x62, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x31, 0x62, +0x20, 0x20, 0x30, 0x20, 0x61, 0x35, 0x20, 0x20, 0x30, 0x20, 0x62, 0x32, +0x20, 0x20, 0x65, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x35, 0x61, 0x20, 0x20, 0x30, 0x20, 0x32, 0x62, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x38, 0x38, +0x20, 0x20, 0x30, 0x20, 0x39, 0x37, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x62, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x39, 0x20, 0x20, 0x30, 0x20, 0x33, +0x62, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, +0x61, 0x20, 0x20, 0x30, 0x20, 0x62, 0x34, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x62, 0x20, 0x0a, 0x20, 0x30, 0x20, +0x63, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x38, 0x20, 0x20, 0x30, 0x20, +0x38, 0x39, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x31, 0x63, 0x20, 0x20, 0x30, 0x20, 0x62, 0x35, 0x20, 0x35, 0x30, 0x20, +0x20, 0x31, 0x20, 0x32, 0x32, 0x20, 0x20, 0x31, 0x20, 0x31, 0x30, 0x20, +0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x35, 0x62, 0x20, 0x20, 0x30, 0x20, 0x32, 0x63, 0x20, 0x20, 0x30, +0x20, 0x63, 0x32, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x20, 0x62, 0x20, 0x20, 0x30, 0x20, 0x63, 0x30, 0x20, 0x20, 0x30, +0x20, 0x61, 0x36, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, +0x30, 0x20, 0x61, 0x37, 0x20, 0x20, 0x30, 0x20, 0x37, 0x61, 0x20, 0x20, +0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x63, 0x33, 0x20, 0x20, +0x30, 0x20, 0x33, 0x63, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x63, 0x20, 0x20, +0x30, 0x20, 0x39, 0x39, 0x20, 0x20, 0x30, 0x20, 0x62, 0x36, 0x20, 0x0a, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x36, 0x62, 0x20, 0x20, 0x30, 0x20, 0x63, 0x34, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x63, 0x20, +0x20, 0x30, 0x20, 0x61, 0x38, 0x20, 0x31, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x38, 0x61, 0x20, +0x0a, 0x20, 0x30, 0x20, 0x63, 0x35, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x30, +0x20, 0x20, 0x30, 0x20, 0x35, 0x63, 0x20, 0x20, 0x30, 0x20, 0x64, 0x31, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x62, 0x37, 0x20, 0x20, 0x30, 0x20, 0x37, 0x62, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x64, +0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, +0x64, 0x20, 0x20, 0x30, 0x20, 0x32, 0x64, 0x20, 0x20, 0x63, 0x20, 0x20, +0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x32, 0x20, 0x20, 0x30, 0x20, 0x64, +0x33, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x64, 0x20, 0x20, 0x30, 0x20, 0x63, +0x36, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x36, 0x63, 0x20, 0x20, 0x30, 0x20, 0x61, 0x39, 0x20, 0x20, 0x36, 0x20, +0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x61, 0x20, 0x20, 0x30, 0x20, +0x62, 0x38, 0x20, 0x20, 0x30, 0x20, 0x64, 0x34, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x38, 0x62, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x34, 0x64, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x63, 0x37, 0x20, 0x20, 0x30, +0x20, 0x37, 0x63, 0x20, 0x34, 0x34, 0x20, 0x20, 0x31, 0x20, 0x32, 0x32, +0x20, 0x20, 0x31, 0x20, 0x31, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x61, +0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x35, 0x20, 0x20, 0x30, +0x20, 0x35, 0x64, 0x20, 0x0a, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x65, 0x30, 0x20, 0x20, +0x30, 0x20, 0x20, 0x65, 0x20, 0x20, 0x30, 0x20, 0x65, 0x31, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x31, 0x65, 0x20, 0x20, 0x30, 0x20, 0x65, 0x32, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x61, 0x20, 0x20, +0x30, 0x20, 0x32, 0x65, 0x20, 0x0a, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x62, 0x39, 0x20, 0x20, 0x30, 0x20, 0x39, 0x62, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x65, 0x33, 0x20, +0x20, 0x30, 0x20, 0x64, 0x36, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, 0x64, 0x20, +0x20, 0x30, 0x20, 0x33, 0x65, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x63, 0x38, 0x20, 0x20, 0x30, 0x20, 0x38, 0x63, +0x20, 0x31, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x65, 0x34, 0x20, 0x20, 0x30, 0x20, 0x34, 0x65, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x37, +0x20, 0x20, 0x30, 0x20, 0x37, 0x64, 0x20, 0x0a, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x65, +0x35, 0x20, 0x20, 0x30, 0x20, 0x62, 0x61, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x62, 0x20, 0x20, 0x30, 0x20, 0x35, +0x65, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x63, +0x39, 0x20, 0x20, 0x30, 0x20, 0x39, 0x63, 0x20, 0x0a, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, 0x31, 0x20, 0x20, 0x30, 0x20, +0x31, 0x66, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x66, 0x30, 0x20, 0x20, 0x30, 0x20, 0x36, 0x65, 0x20, 0x20, 0x30, 0x20, +0x66, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x32, 0x66, 0x20, 0x20, 0x30, 0x20, 0x65, 0x36, 0x20, 0x0a, 0x32, 0x36, +0x20, 0x20, 0x31, 0x20, 0x31, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, +0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x38, 0x20, 0x20, 0x30, +0x20, 0x66, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x33, 0x66, 0x20, 0x20, 0x30, 0x20, 0x66, 0x34, 0x20, 0x20, 0x36, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, +0x30, 0x20, 0x34, 0x66, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x38, 0x64, 0x20, 0x20, 0x30, 0x20, 0x64, 0x39, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x62, 0x62, 0x20, 0x20, +0x30, 0x20, 0x63, 0x61, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x61, 0x63, 0x20, 0x20, 0x30, 0x20, 0x65, 0x37, 0x20, 0x0a, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x65, 0x20, +0x20, 0x30, 0x20, 0x66, 0x35, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x39, 0x64, 0x20, 0x20, 0x30, 0x20, 0x35, 0x66, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x65, 0x38, 0x20, +0x20, 0x30, 0x20, 0x38, 0x65, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x0a, 0x20, 0x30, 0x20, 0x66, 0x36, 0x20, 0x20, 0x30, 0x20, 0x63, 0x62, +0x20, 0x32, 0x32, 0x20, 0x20, 0x31, 0x20, 0x31, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x20, 0x66, 0x20, 0x20, 0x30, 0x20, 0x61, 0x65, +0x20, 0x20, 0x30, 0x20, 0x36, 0x66, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x0a, 0x20, 0x30, 0x20, 0x62, 0x63, 0x20, 0x20, 0x30, 0x20, 0x64, +0x61, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x64, 0x20, 0x20, 0x30, 0x20, 0x66, +0x37, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, +0x66, 0x20, 0x20, 0x30, 0x20, 0x65, 0x39, 0x20, 0x20, 0x38, 0x20, 0x20, +0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x39, 0x65, 0x20, 0x20, 0x30, 0x20, +0x63, 0x63, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x66, 0x38, 0x20, 0x20, 0x30, 0x20, 0x38, 0x66, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x64, 0x62, 0x20, 0x20, 0x30, 0x20, 0x62, 0x64, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x65, 0x61, 0x20, 0x20, 0x30, 0x20, +0x66, 0x39, 0x20, 0x0a, 0x31, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, +0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x66, 0x20, 0x20, 0x30, +0x20, 0x64, 0x63, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x63, 0x64, 0x20, 0x20, 0x30, 0x20, 0x65, 0x62, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x62, 0x65, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x66, 0x61, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x66, 0x20, 0x20, +0x30, 0x20, 0x64, 0x64, 0x20, 0x20, 0x65, 0x20, 0x20, 0x31, 0x20, 0x20, +0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x65, 0x63, 0x20, 0x20, +0x30, 0x20, 0x63, 0x65, 0x20, 0x20, 0x30, 0x20, 0x66, 0x62, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x62, 0x66, 0x20, 0x20, 0x30, 0x20, 0x65, 0x64, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x65, 0x20, +0x20, 0x30, 0x20, 0x66, 0x63, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x63, 0x66, 0x20, 0x20, 0x30, 0x20, 0x66, 0x64, 0x20, +0x20, 0x30, 0x20, 0x65, 0x65, 0x20, 0x0a, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x66, +0x20, 0x20, 0x30, 0x20, 0x66, 0x65, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x65, 0x66, 0x20, 0x20, 0x30, 0x20, 0x66, 0x66, +0x20, 0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x31, 0x36, +0x20, 0x35, 0x31, 0x31, 0x20, 0x31, 0x36, 0x20, 0x31, 0x36, 0x20, 0x20, +0x31, 0x0a, 0x2e, 0x74, 0x72, 0x65, 0x65, 0x64, 0x61, 0x74, 0x61, 0x0a, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x20, +0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x31, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x31, 0x20, +0x32, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x0a, 0x20, 0x30, 0x20, 0x32, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x32, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x31, +0x20, 0x20, 0x30, 0x20, 0x31, 0x32, 0x20, 0x20, 0x61, 0x20, 0x20, 0x31, +0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x32, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x33, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x33, +0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, +0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x33, 0x20, 0x20, 0x61, 0x20, 0x20, +0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, +0x33, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, +0x34, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x34, 0x31, 0x20, 0x20, 0x36, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x31, 0x34, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x33, 0x33, 0x20, 0x20, 0x30, 0x20, 0x34, 0x32, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x32, 0x34, 0x20, 0x20, 0x30, 0x20, 0x35, 0x30, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x34, 0x33, 0x20, 0x20, 0x30, +0x20, 0x33, 0x34, 0x20, 0x38, 0x61, 0x20, 0x20, 0x31, 0x20, 0x32, 0x38, +0x20, 0x20, 0x31, 0x20, 0x31, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, +0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x35, 0x20, 0x20, 0x30, +0x20, 0x31, 0x35, 0x20, 0x20, 0x30, 0x20, 0x35, 0x31, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x35, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, 0x35, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x34, 0x34, 0x20, 0x20, 0x30, 0x20, 0x33, 0x35, 0x20, 0x20, +0x30, 0x20, 0x35, 0x33, 0x20, 0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, +0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x36, 0x30, 0x20, +0x20, 0x30, 0x20, 0x20, 0x36, 0x20, 0x20, 0x30, 0x20, 0x36, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x36, 0x20, +0x20, 0x30, 0x20, 0x36, 0x32, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x32, 0x36, 0x20, 0x20, 0x30, 0x20, 0x35, 0x34, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x34, 0x35, +0x20, 0x20, 0x30, 0x20, 0x36, 0x33, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x36, +0x20, 0x20, 0x30, 0x20, 0x37, 0x30, 0x20, 0x20, 0x30, 0x20, 0x37, 0x31, +0x20, 0x32, 0x38, 0x20, 0x20, 0x31, 0x20, 0x31, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x31, 0x37, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x37, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x35, 0x20, 0x20, 0x30, 0x20, 0x36, +0x34, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, +0x37, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x36, 0x20, 0x0a, 0x20, 0x30, 0x20, +0x36, 0x35, 0x20, 0x20, 0x30, 0x20, 0x37, 0x33, 0x20, 0x20, 0x61, 0x20, +0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x37, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x36, 0x20, 0x20, 0x30, 0x20, +0x20, 0x38, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x38, 0x30, 0x20, 0x20, 0x30, 0x20, 0x38, 0x31, 0x20, 0x0a, 0x20, 0x36, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x31, 0x38, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x37, 0x34, 0x20, 0x20, 0x30, 0x20, 0x34, 0x37, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x38, 0x32, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x38, 0x20, 0x20, 0x30, +0x20, 0x36, 0x36, 0x20, 0x31, 0x38, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, +0x65, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x38, 0x33, 0x20, 0x20, 0x30, 0x20, 0x33, 0x38, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x35, 0x20, 0x20, +0x30, 0x20, 0x38, 0x34, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x38, 0x20, 0x0a, +0x20, 0x30, 0x20, 0x39, 0x30, 0x20, 0x20, 0x30, 0x20, 0x39, 0x31, 0x20, +0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x31, 0x39, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x20, 0x39, 0x20, 0x20, 0x30, 0x20, 0x37, 0x36, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x32, 0x20, +0x20, 0x30, 0x20, 0x32, 0x39, 0x20, 0x20, 0x65, 0x20, 0x20, 0x31, 0x20, +0x0a, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x38, 0x35, +0x20, 0x20, 0x30, 0x20, 0x35, 0x38, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x39, 0x33, 0x20, 0x20, 0x30, 0x20, 0x33, 0x39, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x61, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x61, +0x20, 0x0a, 0x20, 0x30, 0x20, 0x31, 0x61, 0x20, 0x20, 0x38, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, +0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, +0x37, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, +0x37, 0x20, 0x20, 0x30, 0x20, 0x34, 0x39, 0x20, 0x20, 0x36, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, +0x34, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x37, 0x37, 0x20, 0x20, 0x30, 0x20, 0x38, 0x36, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, 0x38, 0x20, 0x20, 0x30, 0x20, +0x39, 0x35, 0x20, 0x64, 0x63, 0x20, 0x20, 0x31, 0x20, 0x37, 0x65, 0x20, +0x20, 0x31, 0x20, 0x33, 0x32, 0x20, 0x20, 0x31, 0x20, 0x31, 0x61, 0x20, +0x20, 0x31, 0x20, 0x0a, 0x20, 0x63, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x32, 0x61, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x35, 0x39, 0x20, 0x20, 0x30, 0x20, 0x33, 0x61, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x33, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x38, 0x37, 0x20, 0x20, 0x30, +0x20, 0x37, 0x38, 0x20, 0x0a, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x61, 0x34, 0x20, 0x20, 0x30, 0x20, 0x34, 0x61, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x36, 0x20, 0x20, +0x30, 0x20, 0x36, 0x39, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x62, 0x30, 0x20, 0x20, +0x30, 0x20, 0x20, 0x62, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x62, 0x31, 0x20, +0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x62, 0x20, +0x20, 0x30, 0x20, 0x62, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x32, 0x62, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x61, 0x35, 0x20, 0x20, 0x30, 0x20, 0x35, 0x61, 0x20, +0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x62, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x61, 0x36, 0x20, 0x20, 0x30, 0x20, 0x36, 0x61, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x62, 0x34, 0x20, 0x20, 0x30, 0x20, 0x34, 0x62, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x63, +0x20, 0x20, 0x30, 0x20, 0x63, 0x31, 0x20, 0x0a, 0x31, 0x65, 0x20, 0x20, +0x31, 0x20, 0x20, 0x65, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, +0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x62, 0x35, 0x20, 0x20, 0x30, 0x20, 0x63, +0x32, 0x20, 0x20, 0x30, 0x20, 0x32, 0x63, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, +0x37, 0x20, 0x20, 0x30, 0x20, 0x63, 0x33, 0x20, 0x0a, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, 0x62, 0x20, 0x20, 0x30, 0x20, +0x63, 0x34, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x64, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x38, 0x38, 0x20, 0x20, 0x30, 0x20, 0x39, 0x37, 0x20, 0x20, 0x30, 0x20, +0x33, 0x62, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x31, 0x20, 0x20, 0x30, +0x20, 0x64, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x32, 0x64, 0x20, 0x20, 0x30, 0x20, 0x64, 0x33, 0x20, 0x31, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x31, 0x65, 0x20, 0x20, 0x30, 0x20, 0x32, 0x65, 0x20, 0x0a, 0x20, +0x30, 0x20, 0x65, 0x32, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x37, 0x39, 0x20, 0x20, 0x30, 0x20, 0x39, 0x38, 0x20, 0x20, +0x30, 0x20, 0x63, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x31, 0x63, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x38, 0x39, 0x20, 0x20, 0x30, 0x20, 0x35, 0x62, 0x20, 0x0a, +0x20, 0x65, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x63, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x61, 0x20, +0x20, 0x30, 0x20, 0x62, 0x36, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x63, 0x20, +0x20, 0x30, 0x20, 0x39, 0x39, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x0a, 0x20, 0x30, 0x20, 0x61, 0x38, 0x20, 0x20, 0x30, 0x20, 0x38, 0x61, +0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x20, 0x64, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x63, 0x35, 0x20, 0x20, 0x30, 0x20, 0x35, 0x63, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x33, 0x64, 0x20, 0x20, 0x30, 0x20, 0x63, 0x36, +0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, +0x63, 0x20, 0x20, 0x30, 0x20, 0x39, 0x61, 0x20, 0x35, 0x38, 0x20, 0x20, +0x31, 0x20, 0x35, 0x36, 0x20, 0x20, 0x31, 0x20, 0x32, 0x34, 0x20, 0x20, +0x31, 0x20, 0x31, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, 0x20, +0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x38, 0x62, 0x20, 0x20, 0x30, 0x20, 0x34, +0x64, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x63, 0x37, 0x20, 0x20, 0x30, 0x20, 0x37, 0x63, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x64, 0x35, 0x20, 0x20, 0x30, 0x20, 0x35, 0x64, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x65, 0x30, 0x20, 0x20, 0x30, 0x20, +0x20, 0x65, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x65, 0x33, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x64, 0x30, 0x20, 0x20, 0x30, 0x20, 0x62, 0x37, 0x20, 0x20, 0x30, +0x20, 0x37, 0x62, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x61, 0x39, 0x20, 0x20, 0x30, 0x20, 0x62, 0x38, 0x20, 0x20, 0x30, +0x20, 0x64, 0x34, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x65, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x61, 0x61, 0x20, 0x20, 0x30, 0x20, 0x62, 0x39, 0x20, 0x31, +0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, +0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x62, 0x20, 0x20, +0x30, 0x20, 0x64, 0x36, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x36, 0x64, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x65, 0x20, +0x20, 0x30, 0x20, 0x63, 0x38, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x38, 0x63, 0x20, 0x20, 0x30, 0x20, 0x65, 0x34, 0x20, +0x20, 0x30, 0x20, 0x34, 0x65, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x64, 0x37, +0x20, 0x20, 0x30, 0x20, 0x65, 0x35, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x62, 0x61, 0x20, 0x20, 0x30, 0x20, 0x61, 0x62, +0x20, 0x20, 0x63, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x63, +0x20, 0x20, 0x30, 0x20, 0x65, 0x36, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x36, +0x65, 0x20, 0x20, 0x30, 0x20, 0x64, 0x38, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x38, 0x64, 0x20, 0x20, 0x30, 0x20, 0x62, +0x62, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x65, +0x37, 0x20, 0x20, 0x30, 0x20, 0x39, 0x64, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x65, 0x38, 0x20, 0x0a, 0x20, 0x30, 0x20, +0x38, 0x65, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x63, 0x62, 0x20, 0x20, 0x30, 0x20, +0x62, 0x63, 0x20, 0x20, 0x30, 0x20, 0x39, 0x65, 0x20, 0x20, 0x30, 0x20, +0x66, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x31, 0x66, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x20, 0x66, 0x20, 0x20, 0x30, 0x20, 0x32, 0x66, 0x20, 0x0a, 0x34, 0x32, +0x20, 0x20, 0x31, 0x20, 0x33, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, 0x32, 0x20, 0x33, 0x34, +0x20, 0x20, 0x31, 0x20, 0x33, 0x32, 0x20, 0x20, 0x31, 0x20, 0x31, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x62, 0x64, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x65, 0x20, 0x0a, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x64, 0x20, 0x20, +0x30, 0x20, 0x63, 0x39, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x63, 0x61, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x63, 0x20, 0x20, +0x30, 0x20, 0x37, 0x65, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x61, 0x20, 0x0a, +0x20, 0x30, 0x20, 0x61, 0x64, 0x20, 0x20, 0x30, 0x20, 0x63, 0x63, 0x20, +0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x65, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x62, 0x20, +0x20, 0x30, 0x20, 0x64, 0x63, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x63, 0x64, 0x20, 0x20, 0x30, 0x20, 0x62, 0x65, 0x20, +0x0a, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x65, 0x62, +0x20, 0x20, 0x30, 0x20, 0x65, 0x64, 0x20, 0x20, 0x30, 0x20, 0x65, 0x65, +0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x39, +0x20, 0x20, 0x30, 0x20, 0x65, 0x61, 0x20, 0x20, 0x30, 0x20, 0x65, 0x39, +0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x64, +0x65, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x64, 0x20, 0x20, 0x30, 0x20, 0x65, +0x63, 0x20, 0x20, 0x30, 0x20, 0x63, 0x65, 0x20, 0x20, 0x30, 0x20, 0x33, +0x66, 0x20, 0x20, 0x30, 0x20, 0x66, 0x30, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, +0x33, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x66, 0x34, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x66, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, 0x35, 0x20, 0x20, 0x30, 0x20, +0x35, 0x66, 0x20, 0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, 0x66, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x66, 0x36, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x36, 0x66, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, 0x37, 0x20, 0x20, 0x30, +0x20, 0x37, 0x66, 0x20, 0x20, 0x63, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x38, 0x66, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x66, 0x38, 0x20, 0x20, 0x30, 0x20, 0x66, 0x39, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x39, 0x66, 0x20, 0x20, 0x30, 0x20, 0x66, 0x61, 0x20, 0x20, +0x30, 0x20, 0x61, 0x66, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x66, 0x62, 0x20, 0x20, 0x30, 0x20, 0x62, 0x66, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, 0x63, 0x20, 0x20, +0x30, 0x20, 0x63, 0x66, 0x20, 0x0a, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, 0x64, 0x20, +0x20, 0x30, 0x20, 0x64, 0x66, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x66, 0x65, 0x20, 0x20, 0x30, 0x20, 0x65, 0x66, 0x20, +0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x31, 0x37, 0x20, +0x35, 0x31, 0x31, 0x20, 0x31, 0x36, 0x20, 0x31, 0x36, 0x20, 0x20, 0x32, +0x0a, 0x2e, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x20, +0x31, 0x36, 0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x31, +0x38, 0x20, 0x35, 0x31, 0x31, 0x20, 0x31, 0x36, 0x20, 0x31, 0x36, 0x20, +0x20, 0x33, 0x0a, 0x2e, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, +0x65, 0x20, 0x31, 0x36, 0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, +0x20, 0x31, 0x39, 0x20, 0x35, 0x31, 0x31, 0x20, 0x31, 0x36, 0x20, 0x31, +0x36, 0x20, 0x20, 0x34, 0x0a, 0x2e, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, +0x6e, 0x63, 0x65, 0x20, 0x31, 0x36, 0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, +0x6c, 0x65, 0x20, 0x32, 0x30, 0x20, 0x35, 0x31, 0x31, 0x20, 0x31, 0x36, +0x20, 0x31, 0x36, 0x20, 0x20, 0x36, 0x0a, 0x2e, 0x72, 0x65, 0x66, 0x65, +0x72, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x31, 0x36, 0x0a, 0x0a, 0x2e, 0x74, +0x61, 0x62, 0x6c, 0x65, 0x20, 0x32, 0x31, 0x20, 0x35, 0x31, 0x31, 0x20, +0x31, 0x36, 0x20, 0x31, 0x36, 0x20, 0x20, 0x38, 0x0a, 0x2e, 0x72, 0x65, +0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x31, 0x36, 0x0a, 0x0a, +0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x32, 0x32, 0x20, 0x35, 0x31, +0x31, 0x20, 0x31, 0x36, 0x20, 0x31, 0x36, 0x20, 0x31, 0x30, 0x0a, 0x2e, +0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x31, 0x36, +0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x32, 0x33, 0x20, +0x35, 0x31, 0x31, 0x20, 0x31, 0x36, 0x20, 0x31, 0x36, 0x20, 0x31, 0x33, +0x0a, 0x2e, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x20, +0x31, 0x36, 0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x32, +0x34, 0x20, 0x35, 0x31, 0x32, 0x20, 0x31, 0x36, 0x20, 0x31, 0x36, 0x20, +0x20, 0x34, 0x0a, 0x2e, 0x74, 0x72, 0x65, 0x65, 0x64, 0x61, 0x74, 0x61, +0x0a, 0x33, 0x63, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x20, 0x31, 0x30, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x31, 0x31, 0x20, 0x20, 0x65, 0x20, 0x20, 0x31, +0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, +0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, +0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, +0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, +0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x33, 0x20, 0x20, 0x65, 0x20, 0x20, +0x31, 0x20, 0x0a, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x31, 0x20, 0x20, 0x30, 0x20, +0x31, 0x33, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x32, 0x20, 0x20, 0x30, 0x20, +0x32, 0x33, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x30, 0x20, 0x20, 0x30, 0x20, +0x20, 0x34, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x34, 0x31, 0x20, 0x20, 0x38, +0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x34, 0x20, 0x20, 0x30, +0x20, 0x33, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x34, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, 0x34, 0x20, 0x20, 0x36, +0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x34, 0x33, 0x20, 0x20, +0x30, 0x20, 0x33, 0x34, 0x20, 0x20, 0x30, 0x20, 0x35, 0x31, 0x20, 0x20, +0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x30, 0x20, 0x20, +0x30, 0x20, 0x20, 0x35, 0x20, 0x20, 0x30, 0x20, 0x31, 0x35, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x32, 0x20, 0x20, +0x30, 0x20, 0x32, 0x35, 0x20, 0x0a, 0x66, 0x61, 0x20, 0x20, 0x31, 0x20, +0x36, 0x32, 0x20, 0x20, 0x31, 0x20, 0x32, 0x32, 0x20, 0x20, 0x31, 0x20, +0x31, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x61, 0x20, 0x20, 0x31, 0x20, +0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x34, 0x34, 0x20, 0x20, 0x30, 0x20, 0x35, 0x33, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x35, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x36, 0x30, +0x20, 0x20, 0x30, 0x20, 0x20, 0x36, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, 0x31, +0x20, 0x20, 0x30, 0x20, 0x31, 0x36, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x36, 0x32, 0x20, 0x20, 0x30, 0x20, 0x32, 0x36, +0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x35, +0x34, 0x20, 0x20, 0x30, 0x20, 0x34, 0x35, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x36, 0x33, 0x20, 0x20, 0x30, 0x20, 0x33, +0x36, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, +0x35, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, +0x34, 0x20, 0x20, 0x30, 0x20, 0x34, 0x36, 0x20, 0x0a, 0x32, 0x30, 0x20, +0x20, 0x31, 0x20, 0x20, 0x65, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x37, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x32, 0x37, 0x20, 0x20, 0x30, 0x20, 0x33, 0x37, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x33, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, +0x20, 0x37, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x37, 0x20, 0x20, 0x30, +0x20, 0x31, 0x37, 0x20, 0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x36, 0x35, 0x20, 0x20, 0x30, 0x20, 0x35, 0x36, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x38, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x38, 0x20, 0x0a, 0x20, +0x30, 0x20, 0x38, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x34, 0x20, 0x20, +0x30, 0x20, 0x34, 0x37, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x31, 0x38, 0x20, 0x20, 0x30, 0x20, 0x38, 0x32, 0x20, 0x31, +0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, +0x20, 0x30, 0x20, 0x32, 0x38, 0x20, 0x20, 0x30, 0x20, 0x36, 0x36, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x38, 0x33, 0x20, +0x20, 0x30, 0x20, 0x33, 0x38, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x35, 0x20, +0x20, 0x30, 0x20, 0x35, 0x37, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x38, 0x34, 0x20, 0x20, 0x30, 0x20, 0x34, 0x38, 0x20, +0x0a, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x31, +0x20, 0x20, 0x30, 0x20, 0x31, 0x39, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x39, 0x32, 0x20, 0x20, 0x30, 0x20, 0x37, 0x36, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x36, 0x37, 0x20, 0x20, 0x30, 0x20, 0x32, 0x39, +0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x38, +0x35, 0x20, 0x20, 0x30, 0x20, 0x35, 0x38, 0x20, 0x35, 0x63, 0x20, 0x20, +0x31, 0x20, 0x32, 0x32, 0x20, 0x20, 0x31, 0x20, 0x31, 0x30, 0x20, 0x20, +0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, +0x33, 0x20, 0x20, 0x30, 0x20, 0x33, 0x39, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x39, 0x34, 0x20, 0x20, 0x30, 0x20, +0x34, 0x39, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x37, 0x20, 0x20, 0x30, 0x20, +0x38, 0x36, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x36, 0x38, 0x20, 0x20, 0x30, 0x20, 0x61, 0x31, 0x20, 0x20, 0x38, 0x20, +0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x61, 0x32, 0x20, 0x20, 0x30, +0x20, 0x32, 0x61, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x39, 0x35, 0x20, 0x20, 0x30, 0x20, 0x35, 0x39, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x61, 0x33, 0x20, 0x20, 0x30, 0x20, 0x33, 0x61, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x38, 0x37, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x37, 0x38, 0x20, 0x20, +0x30, 0x20, 0x34, 0x61, 0x20, 0x31, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, +0x63, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x34, 0x20, 0x20, +0x30, 0x20, 0x39, 0x36, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x36, 0x39, 0x20, 0x20, +0x30, 0x20, 0x62, 0x31, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x31, 0x62, 0x20, 0x20, 0x30, 0x20, 0x61, 0x35, 0x20, +0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x62, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x35, 0x61, 0x20, 0x20, 0x30, 0x20, 0x32, 0x62, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x38, 0x38, 0x20, +0x20, 0x30, 0x20, 0x62, 0x33, 0x20, 0x0a, 0x31, 0x30, 0x20, 0x20, 0x31, +0x20, 0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x30, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x39, +0x20, 0x20, 0x30, 0x20, 0x61, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x39, 0x37, 0x20, 0x20, 0x30, 0x20, 0x37, 0x39, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x36, 0x20, 0x20, 0x30, 0x20, 0x36, +0x61, 0x20, 0x20, 0x30, 0x20, 0x62, 0x34, 0x20, 0x20, 0x63, 0x20, 0x20, +0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x61, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x61, 0x20, 0x20, 0x30, 0x20, 0x62, +0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, +0x33, 0x62, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x20, 0x62, 0x20, 0x20, 0x30, 0x20, 0x63, 0x30, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x34, 0x62, 0x20, 0x20, 0x30, 0x20, 0x63, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x38, 0x20, 0x20, 0x30, 0x20, +0x38, 0x39, 0x20, 0x34, 0x33, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x32, 0x32, +0x20, 0x20, 0x31, 0x20, 0x31, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, +0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x63, 0x20, 0x20, 0x30, +0x20, 0x62, 0x35, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x35, 0x62, 0x20, 0x20, 0x30, 0x20, 0x63, 0x32, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, +0x30, 0x20, 0x32, 0x63, 0x20, 0x20, 0x30, 0x20, 0x61, 0x37, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x61, 0x20, 0x20, +0x30, 0x20, 0x63, 0x33, 0x20, 0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, +0x36, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x33, 0x63, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x20, 0x63, 0x20, 0x20, 0x30, 0x20, 0x64, 0x30, 0x20, 0x0a, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x62, 0x36, 0x20, +0x20, 0x30, 0x20, 0x36, 0x62, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x63, 0x34, 0x20, +0x20, 0x30, 0x20, 0x34, 0x63, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x39, 0x39, 0x20, 0x20, 0x30, 0x20, 0x61, 0x38, 0x20, +0x31, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, +0x0a, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x38, 0x61, 0x20, 0x20, 0x30, 0x20, 0x63, 0x35, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x35, 0x63, +0x20, 0x20, 0x30, 0x20, 0x64, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x62, 0x37, +0x20, 0x20, 0x30, 0x20, 0x37, 0x62, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x0a, 0x20, 0x30, 0x20, 0x31, 0x64, 0x20, 0x20, 0x30, 0x20, 0x64, +0x32, 0x20, 0x20, 0x39, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, +0x64, 0x20, 0x20, 0x30, 0x20, 0x64, 0x33, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x64, 0x20, 0x20, 0x30, 0x20, 0x63, +0x36, 0x20, 0x35, 0x35, 0x20, 0x66, 0x61, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x36, 0x63, 0x20, 0x20, 0x30, 0x20, 0x61, 0x39, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x61, 0x20, 0x20, 0x30, 0x20, +0x64, 0x34, 0x20, 0x32, 0x30, 0x20, 0x20, 0x31, 0x20, 0x31, 0x30, 0x20, +0x20, 0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x62, 0x38, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x38, 0x62, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x34, 0x64, 0x20, 0x20, 0x30, +0x20, 0x63, 0x37, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x37, 0x63, 0x20, 0x20, 0x30, +0x20, 0x64, 0x35, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x35, 0x64, 0x20, 0x20, 0x30, 0x20, 0x65, 0x31, 0x20, 0x20, 0x38, +0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x31, 0x65, 0x20, 0x20, +0x30, 0x20, 0x65, 0x32, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x61, 0x61, 0x20, 0x20, 0x30, 0x20, 0x62, 0x39, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x39, 0x62, 0x20, 0x20, 0x30, 0x20, 0x65, 0x33, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x64, 0x36, 0x20, +0x20, 0x30, 0x20, 0x36, 0x64, 0x20, 0x31, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x36, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x33, 0x65, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x32, 0x65, 0x20, +0x20, 0x30, 0x20, 0x34, 0x65, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x63, 0x38, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x38, 0x63, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x65, 0x34, 0x20, 0x20, 0x30, 0x20, 0x64, 0x37, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x37, 0x64, 0x20, 0x20, 0x30, 0x20, 0x61, 0x62, +0x20, 0x20, 0x30, 0x20, 0x65, 0x35, 0x20, 0x20, 0x61, 0x20, 0x20, 0x31, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x62, 0x61, 0x20, 0x20, 0x30, 0x20, 0x35, +0x65, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x63, +0x39, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, +0x63, 0x20, 0x20, 0x30, 0x20, 0x36, 0x65, 0x20, 0x20, 0x38, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x65, +0x36, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, +0x20, 0x64, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x65, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x65, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x64, 0x38, 0x20, 0x20, 0x30, 0x20, 0x38, 0x64, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x62, 0x62, 0x20, 0x20, 0x30, 0x20, +0x63, 0x61, 0x20, 0x34, 0x61, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, 0x66, 0x20, 0x34, 0x30, +0x20, 0x20, 0x31, 0x20, 0x33, 0x61, 0x20, 0x20, 0x31, 0x20, 0x32, 0x30, +0x20, 0x20, 0x31, 0x20, 0x31, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, +0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x63, 0x20, 0x20, 0x30, +0x20, 0x65, 0x37, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, +0x30, 0x20, 0x37, 0x65, 0x20, 0x20, 0x30, 0x20, 0x64, 0x39, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x39, 0x64, 0x20, 0x20, 0x30, 0x20, 0x65, 0x38, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x38, 0x65, 0x20, 0x20, +0x30, 0x20, 0x63, 0x62, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, +0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x0a, +0x20, 0x30, 0x20, 0x62, 0x63, 0x20, 0x20, 0x30, 0x20, 0x64, 0x61, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x61, 0x64, 0x20, +0x20, 0x30, 0x20, 0x65, 0x39, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x65, 0x20, +0x20, 0x30, 0x20, 0x63, 0x63, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x64, 0x62, 0x20, 0x20, 0x30, 0x20, 0x62, 0x64, 0x20, +0x0a, 0x31, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x65, 0x61, 0x20, 0x20, 0x30, 0x20, 0x61, 0x65, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x63, +0x20, 0x20, 0x30, 0x20, 0x63, 0x64, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x65, 0x62, +0x20, 0x0a, 0x20, 0x30, 0x20, 0x62, 0x65, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x64, 0x20, 0x20, 0x30, 0x20, 0x65, +0x63, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, +0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x63, +0x65, 0x20, 0x20, 0x30, 0x20, 0x65, 0x64, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x64, 0x65, 0x20, 0x20, 0x30, 0x20, 0x65, +0x65, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x20, 0x66, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x66, 0x30, 0x20, 0x20, 0x30, 0x20, 0x31, 0x66, 0x20, 0x20, 0x30, 0x20, +0x66, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, 0x32, 0x20, 0x20, 0x30, 0x20, +0x32, 0x66, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x66, 0x33, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x33, 0x66, 0x20, 0x31, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x66, 0x34, 0x20, 0x20, 0x30, 0x20, 0x34, 0x66, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, 0x35, 0x20, 0x20, 0x30, +0x20, 0x35, 0x66, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x66, 0x36, 0x20, 0x20, +0x30, 0x20, 0x36, 0x66, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x66, 0x37, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x37, 0x66, 0x20, 0x20, 0x30, 0x20, 0x38, 0x66, 0x20, 0x20, +0x61, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, 0x38, 0x20, 0x20, +0x30, 0x20, 0x66, 0x39, 0x20, 0x0a, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x39, 0x66, 0x20, +0x20, 0x30, 0x20, 0x61, 0x66, 0x20, 0x20, 0x30, 0x20, 0x66, 0x61, 0x20, +0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, +0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, 0x62, 0x20, +0x20, 0x30, 0x20, 0x62, 0x66, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, +0x20, 0x30, 0x20, 0x66, 0x63, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x63, 0x66, +0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, +0x20, 0x20, 0x30, 0x20, 0x66, 0x64, 0x20, 0x20, 0x30, 0x20, 0x64, 0x66, +0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x66, 0x65, +0x20, 0x20, 0x30, 0x20, 0x65, 0x66, 0x20, 0x0a, 0x0a, 0x2e, 0x74, 0x61, +0x62, 0x6c, 0x65, 0x20, 0x32, 0x35, 0x20, 0x35, 0x31, 0x32, 0x20, 0x31, +0x36, 0x20, 0x31, 0x36, 0x20, 0x20, 0x35, 0x0a, 0x2e, 0x72, 0x65, 0x66, +0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x32, 0x34, 0x0a, 0x0a, 0x2e, +0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x32, 0x36, 0x20, 0x35, 0x31, 0x32, +0x20, 0x31, 0x36, 0x20, 0x31, 0x36, 0x20, 0x20, 0x36, 0x0a, 0x2e, 0x72, +0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x32, 0x34, 0x0a, +0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x32, 0x37, 0x20, 0x35, +0x31, 0x32, 0x20, 0x31, 0x36, 0x20, 0x31, 0x36, 0x20, 0x20, 0x37, 0x0a, +0x2e, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x32, +0x34, 0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x32, 0x38, +0x20, 0x35, 0x31, 0x32, 0x20, 0x31, 0x36, 0x20, 0x31, 0x36, 0x20, 0x20, +0x38, 0x0a, 0x2e, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, +0x20, 0x32, 0x34, 0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, +0x32, 0x39, 0x20, 0x35, 0x31, 0x32, 0x20, 0x31, 0x36, 0x20, 0x31, 0x36, +0x20, 0x20, 0x39, 0x0a, 0x2e, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, +0x63, 0x65, 0x20, 0x32, 0x34, 0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, +0x65, 0x20, 0x33, 0x30, 0x20, 0x35, 0x31, 0x32, 0x20, 0x31, 0x36, 0x20, +0x31, 0x36, 0x20, 0x31, 0x31, 0x0a, 0x2e, 0x72, 0x65, 0x66, 0x65, 0x72, +0x65, 0x6e, 0x63, 0x65, 0x20, 0x32, 0x34, 0x0a, 0x0a, 0x2e, 0x74, 0x61, +0x62, 0x6c, 0x65, 0x20, 0x33, 0x31, 0x20, 0x35, 0x31, 0x32, 0x20, 0x31, +0x36, 0x20, 0x31, 0x36, 0x20, 0x31, 0x33, 0x0a, 0x2e, 0x72, 0x65, 0x66, +0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x32, 0x34, 0x0a, 0x0a, 0x2e, +0x74, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x33, 0x32, 0x20, 0x20, 0x33, 0x31, +0x20, 0x20, 0x31, 0x20, 0x31, 0x36, 0x20, 0x20, 0x30, 0x0a, 0x2e, 0x74, +0x72, 0x65, 0x65, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x38, 0x20, 0x20, +0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x38, 0x20, 0x20, 0x30, 0x20, 0x20, +0x34, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, +0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x32, 0x20, 0x20, 0x38, 0x20, 0x20, +0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x0a, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x63, 0x20, 0x20, 0x30, 0x20, +0x20, 0x61, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x20, 0x33, 0x20, 0x20, 0x30, 0x20, 0x20, 0x36, 0x20, 0x20, 0x36, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x20, 0x39, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x20, 0x35, 0x20, 0x20, 0x30, 0x20, 0x20, 0x37, 0x20, 0x0a, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x20, 0x65, 0x20, 0x20, 0x30, 0x20, 0x20, 0x64, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x66, 0x20, 0x20, 0x30, +0x20, 0x20, 0x62, 0x20, 0x0a, 0x0a, 0x2e, 0x74, 0x61, 0x62, 0x6c, 0x65, +0x20, 0x33, 0x33, 0x20, 0x20, 0x33, 0x31, 0x20, 0x20, 0x31, 0x20, 0x31, +0x36, 0x20, 0x20, 0x30, 0x0a, 0x2e, 0x74, 0x72, 0x65, 0x65, 0x64, 0x61, +0x74, 0x61, 0x0a, 0x31, 0x30, 0x20, 0x20, 0x31, 0x20, 0x20, 0x38, 0x20, +0x20, 0x31, 0x20, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, +0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x20, 0x20, 0x30, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x20, 0x32, 0x20, 0x20, 0x30, 0x20, 0x20, 0x33, 0x20, 0x20, 0x34, 0x20, +0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, +0x20, 0x34, 0x20, 0x0a, 0x20, 0x30, 0x20, 0x20, 0x35, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x36, 0x20, 0x20, 0x30, +0x20, 0x20, 0x37, 0x20, 0x20, 0x38, 0x20, 0x20, 0x31, 0x20, 0x20, 0x34, +0x20, 0x20, 0x31, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, +0x20, 0x20, 0x38, 0x20, 0x20, 0x30, 0x20, 0x20, 0x39, 0x20, 0x20, 0x32, +0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x61, 0x20, 0x20, 0x30, +0x20, 0x20, 0x62, 0x20, 0x0a, 0x20, 0x34, 0x20, 0x20, 0x31, 0x20, 0x20, +0x32, 0x20, 0x20, 0x31, 0x20, 0x20, 0x30, 0x20, 0x20, 0x63, 0x20, 0x20, +0x30, 0x20, 0x20, 0x64, 0x20, 0x20, 0x32, 0x20, 0x20, 0x31, 0x20, 0x20, +0x30, 0x20, 0x20, 0x65, 0x20, 0x20, 0x30, 0x20, 0x20, 0x66, 0x20, 0x0a, +0x0a, 0x2e, 0x65, 0x6e, 0x64, 0x0a +}; diff --git a/AnyCore/lib_rtsp/liveMedia/MP3StreamState.cpp b/AnyCore/lib_rtsp/liveMedia/MP3StreamState.cpp new file mode 100644 index 0000000..ddcbf52 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3StreamState.cpp @@ -0,0 +1,438 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A class encapsulating the state of a MP3 stream +// Implementation + +#include "MP3StreamState.hh" +#include "InputFile.hh" +#include "GroupsockHelper.hh" + +#if defined(__WIN32__) || defined(_WIN32) +#define snprintf _snprintf +#if _MSC_VER >= 1400 // 1400 == vs2005 +#define fileno _fileno +#endif +#endif + +#define MILLION 1000000 + +MP3StreamState::MP3StreamState(UsageEnvironment& env) + : fEnv(env), fFid(NULL), fPresentationTimeScale(1) { +} + +MP3StreamState::~MP3StreamState() { + // Close our open file or socket: + if (fFid != NULL && fFid != stdin) { + if (fFidIsReallyASocket) { + intptr_t fid_long = (intptr_t)fFid; + closeSocket((int)fid_long); + } else { + CloseInputFile(fFid); + } + } +} + +void MP3StreamState::assignStream(FILE* fid, unsigned fileSize) { + fFid = fid; + + if (fileSize == (unsigned)(-1)) { /*HACK#####*/ + fFidIsReallyASocket = 1; + fFileSize = 0; + } else { + fFidIsReallyASocket = 0; + fFileSize = fileSize; + } + fNumFramesInFile = 0; // until we know otherwise + fIsVBR = fHasXingTOC = False; // ditto + + // Set the first frame's 'presentation time' to the current wall time: + gettimeofday(&fNextFramePresentationTime, NULL); +} + +struct timeval MP3StreamState::currentFramePlayTime() const { + unsigned const numSamples = 1152; + unsigned const freq = fr().samplingFreq*(1 + fr().isMPEG2); + + // result is numSamples/freq + unsigned const uSeconds + = ((numSamples*2*MILLION)/freq + 1)/2; // rounds to nearest integer + + struct timeval result; + result.tv_sec = uSeconds/MILLION; + result.tv_usec = uSeconds%MILLION; + return result; +} + +float MP3StreamState::filePlayTime() const { + unsigned numFramesInFile = fNumFramesInFile; + if (numFramesInFile == 0) { + // Estimate the number of frames from the file size, and the + // size of the current frame: + numFramesInFile = fFileSize/(4 + fCurrentFrame.frameSize); + } + + struct timeval const pt = currentFramePlayTime(); + return numFramesInFile*(pt.tv_sec + pt.tv_usec/(float)MILLION); +} + +unsigned MP3StreamState::getByteNumberFromPositionFraction(float fraction) { + if (fHasXingTOC) { + // The file is VBR, with a Xing TOC; use it to determine which byte to seek to: + float percent = fraction*100.0f; + unsigned a = (unsigned)percent; + if (a > 99) a = 99; + + unsigned fa = fXingTOC[a]; + unsigned fb; + if (a < 99) { + fb = fXingTOC[a+1]; + } else { + fb = 256; + } + fraction = (fa + (fb-fa)*(percent-a))/256.0f; + } + + return (unsigned)(fraction*fFileSize); +} + +void MP3StreamState::seekWithinFile(unsigned seekByteNumber) { + if (fFidIsReallyASocket) return; // it's not seekable + + SeekFile64(fFid, seekByteNumber, SEEK_SET); +} + +unsigned MP3StreamState::findNextHeader(struct timeval& presentationTime) { + presentationTime = fNextFramePresentationTime; + + if (!findNextFrame()) return 0; + + // From this frame, figure out the *next* frame's presentation time: + struct timeval framePlayTime = currentFramePlayTime(); + if (fPresentationTimeScale > 1) { + // Scale this value + unsigned secondsRem = framePlayTime.tv_sec % fPresentationTimeScale; + framePlayTime.tv_sec -= secondsRem; + framePlayTime.tv_usec += secondsRem*MILLION; + framePlayTime.tv_sec /= fPresentationTimeScale; + framePlayTime.tv_usec /= fPresentationTimeScale; + } + fNextFramePresentationTime.tv_usec += framePlayTime.tv_usec; + fNextFramePresentationTime.tv_sec + += framePlayTime.tv_sec + fNextFramePresentationTime.tv_usec/MILLION; + fNextFramePresentationTime.tv_usec %= MILLION; + + return fr().hdr; +} + +Boolean MP3StreamState::readFrame(unsigned char* outBuf, unsigned outBufSize, + unsigned& resultFrameSize, + unsigned& resultDurationInMicroseconds) { + /* We assume that "mp3FindNextHeader()" has already been called */ + + resultFrameSize = 4 + fr().frameSize; + + if (outBufSize < resultFrameSize) { +#ifdef DEBUG_ERRORS + fprintf(stderr, "Insufficient buffer size for reading input frame (%d, need %d)\n", + outBufSize, resultFrameSize); +#endif + if (outBufSize < 4) outBufSize = 0; + resultFrameSize = outBufSize; + + return False; + } + + if (resultFrameSize >= 4) { + unsigned& hdr = fr().hdr; + *outBuf++ = (unsigned char)(hdr>>24); + *outBuf++ = (unsigned char)(hdr>>16); + *outBuf++ = (unsigned char)(hdr>>8); + *outBuf++ = (unsigned char)(hdr); + + memmove(outBuf, fr().frameBytes, resultFrameSize-4); + } + + struct timeval const pt = currentFramePlayTime(); + resultDurationInMicroseconds = pt.tv_sec*MILLION + pt.tv_usec; + + return True; +} + +void MP3StreamState::getAttributes(char* buffer, unsigned bufferSize) const { + char const* formatStr + = "bandwidth %d MPEGnumber %d MPEGlayer %d samplingFrequency %d isStereo %d playTime %d isVBR %d"; + unsigned fpt = (unsigned)(filePlayTime() + 0.5); // rounds to nearest integer +#if defined(IRIX) || defined(ALPHA) || defined(_QNX4) || defined(IMN_PIM) || defined(CRIS) + /* snprintf() isn't defined, so just use sprintf() - ugh! */ + sprintf(buffer, formatStr, + fr().bitrate, fr().isMPEG2 ? 2 : 1, fr().layer, fr().samplingFreq, fr().isStereo, + fpt, fIsVBR); +#else + snprintf(buffer, bufferSize, formatStr, + fr().bitrate, fr().isMPEG2 ? 2 : 1, fr().layer, fr().samplingFreq, fr().isStereo, + fpt, fIsVBR); +#endif +} + +// This is crufty old code that needs to be cleaned up ##### +#define HDRCMPMASK 0xfffffd00 + +Boolean MP3StreamState::findNextFrame() { + unsigned char hbuf[8]; + unsigned l; int i; + int attempt = 0; + + read_again: + if (readFromStream(hbuf, 4) != 4) return False; + + fr().hdr = ((unsigned long) hbuf[0] << 24) + | ((unsigned long) hbuf[1] << 16) + | ((unsigned long) hbuf[2] << 8) + | (unsigned long) hbuf[3]; + +#ifdef DEBUG_PARSE + fprintf(stderr, "fr().hdr: 0x%08x\n", fr().hdr); +#endif + if (fr().oldHdr != fr().hdr || !fr().oldHdr) { + i = 0; + init_resync: +#ifdef DEBUG_PARSE + fprintf(stderr, "init_resync: fr().hdr: 0x%08x\n", fr().hdr); +#endif + if ( (fr().hdr & 0xffe00000) != 0xffe00000 + || (fr().hdr & 0x00060000) == 0 // undefined 'layer' field + || (fr().hdr & 0x0000F000) == 0 // 'free format' bitrate index + || (fr().hdr & 0x0000F000) == 0x0000F000 // undefined bitrate index + || (fr().hdr & 0x00000C00) == 0x00000C00 // undefined frequency index + || (fr().hdr & 0x00000003) != 0x00000000 // 'emphasis' field unexpectedly set + ) { + /* RSF: Do the following test even if we're not at the + start of the file, in case we have two or more + separate MP3 files cat'ed together: + */ + /* Check for RIFF hdr */ + if (fr().hdr == ('R'<<24)+('I'<<16)+('F'<<8)+'F') { + unsigned char buf[70 /*was: 40*/]; +#ifdef DEBUG_ERRORS + fprintf(stderr,"Skipped RIFF header\n"); +#endif + readFromStream(buf, 66); /* already read 4 */ + goto read_again; + } + /* Check for ID3 hdr */ + if ((fr().hdr&0xFFFFFF00) == ('I'<<24)+('D'<<16)+('3'<<8)) { + unsigned tagSize, bytesToSkip; + unsigned char buf[1000]; + readFromStream(buf, 6); /* already read 4 */ + tagSize = ((buf[2]&0x7F)<<21) + ((buf[3]&0x7F)<<14) + ((buf[4]&0x7F)<<7) + (buf[5]&0x7F); + bytesToSkip = tagSize; + while (bytesToSkip > 0) { + unsigned bytesToRead = sizeof buf; + if (bytesToRead > bytesToSkip) { + bytesToRead = bytesToSkip; + } + readFromStream(buf, bytesToRead); + bytesToSkip -= bytesToRead; + } +#ifdef DEBUG_ERRORS + fprintf(stderr,"Skipped %d-byte ID3 header\n", tagSize); +#endif + goto read_again; + } + /* give up after 20,000 bytes */ + if (i++ < 20000/*4096*//*1024*/) { + memmove (&hbuf[0], &hbuf[1], 3); + if (readFromStream(hbuf+3,1) != 1) { + return False; + } + fr().hdr <<= 8; + fr().hdr |= hbuf[3]; + fr().hdr &= 0xffffffff; +#ifdef DEBUG_PARSE + fprintf(stderr, "calling init_resync %d\n", i); +#endif + goto init_resync; + } +#ifdef DEBUG_ERRORS + fprintf(stderr,"Giving up searching valid MPEG header\n"); +#endif + return False; + +#ifdef DEBUG_ERRORS + fprintf(stderr,"Illegal Audio-MPEG-Header 0x%08lx at offset 0x%lx.\n", + fr().hdr,tell_stream(str)-4); +#endif + /* Read more bytes until we find something that looks + reasonably like a valid header. This is not a + perfect strategy, but it should get us back on the + track within a short time (and hopefully without + too much distortion in the audio output). */ + do { + attempt++; + memmove (&hbuf[0], &hbuf[1], 7); + if (readFromStream(&hbuf[3],1) != 1) { + return False; + } + + /* This is faster than combining fr().hdr from scratch */ + fr().hdr = ((fr().hdr << 8) | hbuf[3]) & 0xffffffff; + + if (!fr().oldHdr) + goto init_resync; /* "considered harmful", eh? */ + + } while ((fr().hdr & HDRCMPMASK) != (fr().oldHdr & HDRCMPMASK) + && (fr().hdr & HDRCMPMASK) != (fr().firstHdr & HDRCMPMASK)); +#ifdef DEBUG_ERRORS + fprintf (stderr, "Skipped %d bytes in input.\n", attempt); +#endif + } + if (!fr().firstHdr) { + fr().firstHdr = fr().hdr; + } + + fr().setParamsFromHeader(); + fr().setBytePointer(fr().frameBytes, fr().frameSize); + + fr().oldHdr = fr().hdr; + + if (fr().isFreeFormat) { +#ifdef DEBUG_ERRORS + fprintf(stderr,"Free format not supported.\n"); +#endif + return False; + } + +#ifdef MP3_ONLY + if (fr().layer != 3) { +#ifdef DEBUG_ERRORS + fprintf(stderr, "MPEG layer %d is not supported!\n", fr().layer); +#endif + return False; + } +#endif + } + + if ((l = readFromStream(fr().frameBytes, fr().frameSize)) + != fr().frameSize) { + if (l == 0) return False; + memset(fr().frameBytes+1, 0, fr().frameSize-1); + } + + return True; +} + +static Boolean socketIsReadable(int socket) { + const unsigned numFds = socket+1; + fd_set rd_set; + FD_ZERO(&rd_set); + FD_SET((unsigned)socket, &rd_set); + struct timeval timeout; + timeout.tv_sec = timeout.tv_usec = 0; + + int result = select(numFds, &rd_set, NULL, NULL, &timeout); + return result != 0; // not > 0, because windows can return -1 for file sockets +} + +static char watchVariable; + +static void checkFunc(void* /*clientData*/) { + watchVariable = ~0; +} + +static void waitUntilSocketIsReadable(UsageEnvironment& env, int socket) { + while (!socketIsReadable(socket)) { + // Delay a short period of time before checking again. + unsigned usecsToDelay = 1000; // 1 ms + env.taskScheduler().scheduleDelayedTask(usecsToDelay, + (TaskFunc*)checkFunc, (void*)NULL); + watchVariable = 0; + env.taskScheduler().doEventLoop(&watchVariable); + // This allows other tasks to run while we're waiting: + } +} + +unsigned MP3StreamState::readFromStream(unsigned char* buf, + unsigned numChars) { + // Hack for doing socket I/O instead of file I/O (e.g., on Windows) + if (fFidIsReallyASocket) { + intptr_t fid_long = (intptr_t)fFid; + int sock = (int)fid_long; + unsigned totBytesRead = 0; + do { + waitUntilSocketIsReadable(fEnv, sock); + int bytesRead + = recv(sock, &((char*)buf)[totBytesRead], numChars-totBytesRead, 0); + if (bytesRead < 0) return 0; + + totBytesRead += (unsigned)bytesRead; + } while (totBytesRead < numChars); + + return totBytesRead; + } else { +#ifndef _WIN32_WCE + waitUntilSocketIsReadable(fEnv, (int)fileno(fFid)); +#endif + return fread(buf, 1, numChars, fFid); + } +} + +#define XING_FRAMES_FLAG 0x0001 +#define XING_BYTES_FLAG 0x0002 +#define XING_TOC_FLAG 0x0004 +#define XING_VBR_SCALE_FLAG 0x0008 + +void MP3StreamState::checkForXingHeader() { + // Look for 'Xing' in the first 4 bytes after the 'side info': + if (fr().frameSize < fr().sideInfoSize) return; + unsigned bytesAvailable = fr().frameSize - fr().sideInfoSize; + unsigned char* p = &(fr().frameBytes[fr().sideInfoSize]); + + if (bytesAvailable < 8) return; + if (p[0] != 'X' || p[1] != 'i' || p[2] != 'n' || p[3] != 'g') return; + + // We found it. + fIsVBR = True; + + u_int32_t flags = (p[4]<<24) | (p[5]<<16) | (p[6]<<8) | p[7]; + unsigned i = 8; + bytesAvailable -= 8; + + if (flags&XING_FRAMES_FLAG) { + // The next 4 bytes are the number of frames: + if (bytesAvailable < 4) return; + fNumFramesInFile = (p[i]<<24)|(p[i+1]<<16)|(p[i+2]<<8)|(p[i+3]); + i += 4; bytesAvailable -= 4; + } + + if (flags&XING_BYTES_FLAG) { + // The next 4 bytes is the file size: + if (bytesAvailable < 4) return; + fFileSize = (p[i]<<24)|(p[i+1]<<16)|(p[i+2]<<8)|(p[i+3]); + i += 4; bytesAvailable -= 4; + } + + if (flags&XING_TOC_FLAG) { + // Fill in the Xing 'table of contents': + if (bytesAvailable < XING_TOC_LENGTH) return; + fHasXingTOC = True; + for (unsigned j = 0; j < XING_TOC_LENGTH; ++j) { + fXingTOC[j] = p[i+j]; + } + i += XING_TOC_FLAG; bytesAvailable -= XING_TOC_FLAG; + } +} diff --git a/AnyCore/lib_rtsp/liveMedia/MP3StreamState.hh b/AnyCore/lib_rtsp/liveMedia/MP3StreamState.hh new file mode 100644 index 0000000..9ce5f49 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3StreamState.hh @@ -0,0 +1,90 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A class encapsulating the state of a MP3 stream +// C++ header + +#ifndef _MP3_STREAM_STATE_HH +#define _MP3_STREAM_STATE_HH + +#ifndef _USAGE_ENVIRONMENT_HH +#include "UsageEnvironment.hh" +#endif +#ifndef _BOOLEAN_HH +#include "Boolean.hh" +#endif +#ifndef _MP3_INTERNALS_HH +#include "MP3Internals.hh" +#endif +#ifndef _NET_COMMON_H +#include "NetCommon.h" +#endif + +#include + +#define XING_TOC_LENGTH 100 + +class MP3StreamState { +public: + MP3StreamState(UsageEnvironment& env); + virtual ~MP3StreamState(); + + void assignStream(FILE* fid, unsigned fileSize); + + unsigned findNextHeader(struct timeval& presentationTime); + Boolean readFrame(unsigned char* outBuf, unsigned outBufSize, + unsigned& resultFrameSize, + unsigned& resultDurationInMicroseconds); + // called after findNextHeader() + + void getAttributes(char* buffer, unsigned bufferSize) const; + + float filePlayTime() const; // in seconds + unsigned fileSize() const { return fFileSize; } + void setPresentationTimeScale(unsigned scale) { fPresentationTimeScale = scale; } + unsigned getByteNumberFromPositionFraction(float fraction); // 0.0 <= fraction <= 1.0 + void seekWithinFile(unsigned seekByteNumber); + + void checkForXingHeader(); // hack for Xing VBR files + +protected: // private->protected requested by Pierre l'Hussiez + unsigned readFromStream(unsigned char* buf, unsigned numChars); + +private: + MP3FrameParams& fr() {return fCurrentFrame;} + MP3FrameParams const& fr() const {return fCurrentFrame;} + + struct timeval currentFramePlayTime() const; + + Boolean findNextFrame(); + +private: + UsageEnvironment& fEnv; + FILE* fFid; + Boolean fFidIsReallyASocket; + unsigned fFileSize; + unsigned fNumFramesInFile; + unsigned fPresentationTimeScale; + // used if we're streaming at other than the normal rate + Boolean fIsVBR, fHasXingTOC; + u_int8_t fXingTOC[XING_TOC_LENGTH]; // set iff "fHasXingTOC" is True + + MP3FrameParams fCurrentFrame; + struct timeval fNextFramePresentationTime; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/MP3Transcoder.cpp b/AnyCore/lib_rtsp/liveMedia/MP3Transcoder.cpp new file mode 100644 index 0000000..8c437d4 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MP3Transcoder.cpp @@ -0,0 +1,52 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MP3 Transcoder +// Implementation + +#include "MP3Transcoder.hh" + +MP3Transcoder::MP3Transcoder(UsageEnvironment& env, + MP3ADUTranscoder* aduTranscoder) + : MP3FromADUSource(env, aduTranscoder, False) { +} + +MP3Transcoder::~MP3Transcoder() { +} + +MP3Transcoder* MP3Transcoder::createNew(UsageEnvironment& env, + unsigned outBitrate /* in kbps */, + FramedSource* inputSource) { + MP3Transcoder* newSource = NULL; + + do { + // Create the intermediate filters that help implement the transcoder: + ADUFromMP3Source* aduFromMP3 + = ADUFromMP3Source::createNew(env, inputSource, False); + // Note: This also checks that "inputSource" is an MP3 source + if (aduFromMP3 == NULL) break; + + MP3ADUTranscoder* aduTranscoder + = MP3ADUTranscoder::createNew(env, outBitrate, aduFromMP3); + if (aduTranscoder == NULL) break; + + // Then create the transcoder itself: + newSource = new MP3Transcoder(env, aduTranscoder); + } while (0); + + return newSource; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG1or2AudioRTPSink.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG1or2AudioRTPSink.cpp new file mode 100644 index 0000000..316cadb --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG1or2AudioRTPSink.cpp @@ -0,0 +1,63 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for MPEG audio (RFC 2250) +// Implementation + +#include "MPEG1or2AudioRTPSink.hh" + +MPEG1or2AudioRTPSink::MPEG1or2AudioRTPSink(UsageEnvironment& env, Groupsock* RTPgs) + : AudioRTPSink(env, RTPgs, 14, 90000, "MPA") { +} + +MPEG1or2AudioRTPSink::~MPEG1or2AudioRTPSink() { +} + +MPEG1or2AudioRTPSink* +MPEG1or2AudioRTPSink::createNew(UsageEnvironment& env, Groupsock* RTPgs) { + return new MPEG1or2AudioRTPSink(env, RTPgs); +} + +void MPEG1or2AudioRTPSink::doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes) { + // If this is the 1st frame in the 1st packet, set the RTP 'M' (marker) + // bit (because this is considered the start of a talk spurt): + if (isFirstPacket() && isFirstFrameInPacket()) { + setMarkerBit(); + } + + // If this is the first frame in the packet, set the lower half of the + // audio-specific header (to the "fragmentationOffset"): + if (isFirstFrameInPacket()) { + setSpecialHeaderWord(fragmentationOffset&0xFFFF); + } + + // Important: Also call our base class's doSpecialFrameHandling(), + // to set the packet's timestamp: + MultiFramedRTPSink::doSpecialFrameHandling(fragmentationOffset, + frameStart, numBytesInFrame, + framePresentationTime, + numRemainingBytes); +} + +unsigned MPEG1or2AudioRTPSink::specialHeaderSize() const { + // There's a 4 byte special audio header: + return 4; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG1or2AudioRTPSource.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG1or2AudioRTPSource.cpp new file mode 100644 index 0000000..c3773a5 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG1or2AudioRTPSource.cpp @@ -0,0 +1,62 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MPEG-1 or MPEG-2 Audio RTP Sources +// Implementation + +#include "MPEG1or2AudioRTPSource.hh" + +MPEG1or2AudioRTPSource* +MPEG1or2AudioRTPSource::createNew(UsageEnvironment& env, + Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) { + return new MPEG1or2AudioRTPSource(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency); +} + +MPEG1or2AudioRTPSource::MPEG1or2AudioRTPSource(UsageEnvironment& env, + Groupsock* rtpGS, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) + : MultiFramedRTPSource(env, rtpGS, + rtpPayloadFormat, rtpTimestampFrequency) { +} + +MPEG1or2AudioRTPSource::~MPEG1or2AudioRTPSource() { +} + +Boolean MPEG1or2AudioRTPSource +::processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize) { + // There's a 4-byte header indicating fragmentation. + if (packet->dataSize() < 4) return False; + + // Note: This fragmentation header is actually useless to us, because + // it doesn't tell us whether or not this RTP packet *ends* a + // fragmented frame. Thus, we can't use it to properly set + // "fCurrentPacketCompletesFrame". Instead, we assume that even + // a partial audio frame will be usable to clients. + + resultSpecialHeaderSize = 4; + return True; +} + +char const* MPEG1or2AudioRTPSource::MIMEtype() const { + return "audio/MPEG"; +} + diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG1or2AudioStreamFramer.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG1or2AudioStreamFramer.cpp new file mode 100644 index 0000000..26fcee5 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG1or2AudioStreamFramer.cpp @@ -0,0 +1,210 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up an MPEG (1,2) audio elementary stream into frames +// Implementation + +#include "MPEG1or2AudioStreamFramer.hh" +#include "StreamParser.hh" +#include "MP3Internals.hh" +#include + +////////// MPEG1or2AudioStreamParser definition ////////// + +class MPEG1or2AudioStreamParser: public StreamParser { +public: + MPEG1or2AudioStreamParser(MPEG1or2AudioStreamFramer* usingSource, + FramedSource* inputSource); + virtual ~MPEG1or2AudioStreamParser(); + +public: + unsigned parse(unsigned& numTruncatedBytes); + // returns the size of the frame that was acquired, or 0 if none was + + void registerReadInterest(unsigned char* to, unsigned maxSize); + + MP3FrameParams const& currentFrame() const { return fCurrentFrame; } + +private: + unsigned char* fTo; + unsigned fMaxSize; + + // Parameters of the most recently read frame: + MP3FrameParams fCurrentFrame; // also works for layer I or II +}; + + +////////// MPEG1or2AudioStreamFramer implementation ////////// + +MPEG1or2AudioStreamFramer +::MPEG1or2AudioStreamFramer(UsageEnvironment& env, FramedSource* inputSource, + Boolean syncWithInputSource) + : FramedFilter(env, inputSource), + fSyncWithInputSource(syncWithInputSource) { + reset(); + + fParser = new MPEG1or2AudioStreamParser(this, inputSource); +} + +MPEG1or2AudioStreamFramer::~MPEG1or2AudioStreamFramer() { + delete fParser; +} + +MPEG1or2AudioStreamFramer* +MPEG1or2AudioStreamFramer::createNew(UsageEnvironment& env, + FramedSource* inputSource, + Boolean syncWithInputSource) { + // Need to add source type checking here??? ##### + return new MPEG1or2AudioStreamFramer(env, inputSource, syncWithInputSource); +} + +void MPEG1or2AudioStreamFramer::flushInput() { + reset(); + fParser->flushInput(); +} + +void MPEG1or2AudioStreamFramer::reset() { + // Use the current wallclock time as the initial 'presentation time': + struct timeval timeNow; + gettimeofday(&timeNow, NULL); + resetPresentationTime(timeNow); +} + +void MPEG1or2AudioStreamFramer +::resetPresentationTime(struct timeval newPresentationTime) { + fNextFramePresentationTime = newPresentationTime; +} + +void MPEG1or2AudioStreamFramer::doGetNextFrame() { + fParser->registerReadInterest(fTo, fMaxSize); + continueReadProcessing(); +} + +#define MILLION 1000000 + +static unsigned const numSamplesByLayer[4] = {0, 384, 1152, 1152}; + +struct timeval MPEG1or2AudioStreamFramer::currentFramePlayTime() const { + MP3FrameParams const& fr = fParser->currentFrame(); + unsigned const numSamples = numSamplesByLayer[fr.layer]; + + struct timeval result; + unsigned const freq = fr.samplingFreq*(1 + fr.isMPEG2); + if (freq == 0) { + result.tv_sec = 0; + result.tv_usec = 0; + return result; + } + + // result is numSamples/freq + unsigned const uSeconds + = ((numSamples*2*MILLION)/freq + 1)/2; // rounds to nearest integer + + result.tv_sec = uSeconds/MILLION; + result.tv_usec = uSeconds%MILLION; + return result; +} + +void MPEG1or2AudioStreamFramer +::continueReadProcessing(void* clientData, + unsigned char* /*ptr*/, unsigned /*size*/, + struct timeval presentationTime) { + MPEG1or2AudioStreamFramer* framer = (MPEG1or2AudioStreamFramer*)clientData; + if (framer->fSyncWithInputSource) { + framer->resetPresentationTime(presentationTime); + } + framer->continueReadProcessing(); +} + +void MPEG1or2AudioStreamFramer::continueReadProcessing() { + unsigned acquiredFrameSize = fParser->parse(fNumTruncatedBytes); + if (acquiredFrameSize > 0) { + // We were able to acquire a frame from the input. + // It has already been copied to the reader's space. + fFrameSize = acquiredFrameSize; + + // Also set the presentation time, and increment it for next time, + // based on the length of this frame: + fPresentationTime = fNextFramePresentationTime; + struct timeval framePlayTime = currentFramePlayTime(); + fDurationInMicroseconds = framePlayTime.tv_sec*MILLION + framePlayTime.tv_usec; + fNextFramePresentationTime.tv_usec += framePlayTime.tv_usec; + fNextFramePresentationTime.tv_sec + += framePlayTime.tv_sec + fNextFramePresentationTime.tv_usec/MILLION; + fNextFramePresentationTime.tv_usec %= MILLION; + + // Call our own 'after getting' function. Because we're not a 'leaf' + // source, we can call this directly, without risking infinite recursion. + afterGetting(this); + } else { + // We were unable to parse a complete frame from the input, because: + // - we had to read more data from the source stream, or + // - the source stream has ended. + } +} + + +////////// MPEG1or2AudioStreamParser implementation ////////// + +MPEG1or2AudioStreamParser +::MPEG1or2AudioStreamParser(MPEG1or2AudioStreamFramer* usingSource, + FramedSource* inputSource) + : StreamParser(inputSource, FramedSource::handleClosure, usingSource, + &MPEG1or2AudioStreamFramer::continueReadProcessing, usingSource) { +} + +MPEG1or2AudioStreamParser::~MPEG1or2AudioStreamParser() { +} + +void MPEG1or2AudioStreamParser::registerReadInterest(unsigned char* to, + unsigned maxSize) { + fTo = to; + fMaxSize = maxSize; +} + +unsigned MPEG1or2AudioStreamParser::parse(unsigned& numTruncatedBytes) { + try { + saveParserState(); + + // We expect a MPEG audio header (first 11 bits set to 1) at the start: + while (((fCurrentFrame.hdr = test4Bytes())&0xFFE00000) != 0xFFE00000) { + skipBytes(1); + saveParserState(); + } + + fCurrentFrame.setParamsFromHeader(); + + // Copy the frame to the requested destination: + unsigned frameSize = fCurrentFrame.frameSize + 4; // include header + if (frameSize > fMaxSize) { + numTruncatedBytes = frameSize - fMaxSize; + frameSize = fMaxSize; + } else { + numTruncatedBytes = 0; + } + + getBytes(fTo, frameSize); + skipBytes(numTruncatedBytes); + + return frameSize; + } catch (int /*e*/) { +#ifdef DEBUG + fprintf(stderr, "MPEG1or2AudioStreamParser::parse() EXCEPTION (This is normal behavior - *not* an error)\n"); +#endif + return 0; // the parsing got interrupted + } +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG1or2Demux.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG1or2Demux.cpp new file mode 100644 index 0000000..dfcb308 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG1or2Demux.cpp @@ -0,0 +1,756 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Demultiplexer for a MPEG 1 or 2 Program Stream +// Implementation + +#include "MPEG1or2Demux.hh" +#include "MPEG1or2DemuxedElementaryStream.hh" +#include "StreamParser.hh" +#include + +////////// MPEGProgramStreamParser definition ////////// + +// An enum representing the current state of the parser: +enum MPEGParseState { + PARSING_PACK_HEADER, + PARSING_SYSTEM_HEADER, + PARSING_PES_PACKET +}; + +class MPEGProgramStreamParser: public StreamParser { +public: + MPEGProgramStreamParser(MPEG1or2Demux* usingDemux, FramedSource* inputSource); + virtual ~MPEGProgramStreamParser(); + +public: + unsigned char parse(); + // returns the stream id of a stream for which a frame was acquired, + // or 0 if no such frame was acquired. + +private: + void setParseState(MPEGParseState parseState); + + void parsePackHeader(); + void parseSystemHeader(); + unsigned char parsePESPacket(); // returns as does parse() + + Boolean isSpecialStreamId(unsigned char stream_id) const; + // for PES packet header parsing + +private: + MPEG1or2Demux* fUsingDemux; + MPEGParseState fCurrentParseState; +}; + + +////////// MPEG1or2Demux::OutputDescriptor::SavedData definition/implementation ////////// + +class MPEG1or2Demux::OutputDescriptor::SavedData { +public: + SavedData(unsigned char* buf, unsigned size) + : next(NULL), data(buf), dataSize(size), numBytesUsed(0) { + } + virtual ~SavedData() { + delete[] data; + delete next; + } + + SavedData* next; + unsigned char* data; + unsigned dataSize, numBytesUsed; +}; + + +////////// MPEG1or2Demux implementation ////////// + +MPEG1or2Demux +::MPEG1or2Demux(UsageEnvironment& env, + FramedSource* inputSource, Boolean reclaimWhenLastESDies) + : Medium(env), + fInputSource(inputSource), fMPEGversion(0), + fNextAudioStreamNumber(0), fNextVideoStreamNumber(0), + fReclaimWhenLastESDies(reclaimWhenLastESDies), fNumOutstandingESs(0), + fNumPendingReads(0), fHaveUndeliveredData(False) { + fParser = new MPEGProgramStreamParser(this, inputSource); + for (unsigned i = 0; i < 256; ++i) { + fOutput[i].savedDataHead = fOutput[i].savedDataTail = NULL; + fOutput[i].isPotentiallyReadable = False; + fOutput[i].isCurrentlyActive = False; + fOutput[i].isCurrentlyAwaitingData = False; + } +} + +MPEG1or2Demux::~MPEG1or2Demux() { + delete fParser; + for (unsigned i = 0; i < 256; ++i) delete fOutput[i].savedDataHead; + Medium::close(fInputSource); +} + +MPEG1or2Demux* MPEG1or2Demux +::createNew(UsageEnvironment& env, + FramedSource* inputSource, Boolean reclaimWhenLastESDies) { + // Need to add source type checking here??? ##### + + return new MPEG1or2Demux(env, inputSource, reclaimWhenLastESDies); +} + +MPEG1or2Demux::SCR::SCR() + : highBit(0), remainingBits(0), extension(0), isValid(False) { +} + +void MPEG1or2Demux +::noteElementaryStreamDeletion(MPEG1or2DemuxedElementaryStream* /*es*/) { + if (--fNumOutstandingESs == 0 && fReclaimWhenLastESDies) { + Medium::close(this); + } +} + +void MPEG1or2Demux::flushInput() { + fParser->flushInput(); +} + +MPEG1or2DemuxedElementaryStream* +MPEG1or2Demux::newElementaryStream(u_int8_t streamIdTag) { + ++fNumOutstandingESs; + fOutput[streamIdTag].isPotentiallyReadable = True; + return new MPEG1or2DemuxedElementaryStream(envir(), streamIdTag, *this); +} + +MPEG1or2DemuxedElementaryStream* MPEG1or2Demux::newAudioStream() { + unsigned char newAudioStreamTag = 0xC0 | (fNextAudioStreamNumber++&~0xE0); + // MPEG audio stream tags are 110x xxxx (binary) + return newElementaryStream(newAudioStreamTag); +} + +MPEG1or2DemuxedElementaryStream* MPEG1or2Demux::newVideoStream() { + unsigned char newVideoStreamTag = 0xE0 | (fNextVideoStreamNumber++&~0xF0); + // MPEG video stream tags are 1110 xxxx (binary) + return newElementaryStream(newVideoStreamTag); +} + +// Appropriate one of the reserved stream id tags to mean: return raw PES packets: +#define RAW_PES 0xFC + +MPEG1or2DemuxedElementaryStream* MPEG1or2Demux::newRawPESStream() { + return newElementaryStream(RAW_PES); +} + +void MPEG1or2Demux::registerReadInterest(u_int8_t streamIdTag, + unsigned char* to, unsigned maxSize, + FramedSource::afterGettingFunc* afterGettingFunc, + void* afterGettingClientData, + FramedSource::onCloseFunc* onCloseFunc, + void* onCloseClientData) { + struct OutputDescriptor& out = fOutput[streamIdTag]; + + // Make sure this stream is not already being read: + if (out.isCurrentlyAwaitingData) { + envir() << "MPEG1or2Demux::registerReadInterest(): attempt to read stream more than once!\n"; + envir().internalError(); + } + + out.to = to; out.maxSize = maxSize; + out.fAfterGettingFunc = afterGettingFunc; + out.afterGettingClientData = afterGettingClientData; + out.fOnCloseFunc = onCloseFunc; + out.onCloseClientData = onCloseClientData; + out.isCurrentlyActive = True; + out.isCurrentlyAwaitingData = True; + // out.frameSize and out.presentationTime will be set when a frame's read + + ++fNumPendingReads; +} + +Boolean MPEG1or2Demux::useSavedData(u_int8_t streamIdTag, + unsigned char* to, unsigned maxSize, + FramedSource::afterGettingFunc* afterGettingFunc, + void* afterGettingClientData) { + struct OutputDescriptor& out = fOutput[streamIdTag]; + if (out.savedDataHead == NULL) return False; // common case + + unsigned totNumBytesCopied = 0; + while (maxSize > 0 && out.savedDataHead != NULL) { + OutputDescriptor::SavedData& savedData = *(out.savedDataHead); + unsigned char* from = &savedData.data[savedData.numBytesUsed]; + unsigned numBytesToCopy = savedData.dataSize - savedData.numBytesUsed; + if (numBytesToCopy > maxSize) numBytesToCopy = maxSize; + memmove(to, from, numBytesToCopy); + to += numBytesToCopy; + maxSize -= numBytesToCopy; + out.savedDataTotalSize -= numBytesToCopy; + totNumBytesCopied += numBytesToCopy; + savedData.numBytesUsed += numBytesToCopy; + if (savedData.numBytesUsed == savedData.dataSize) { + out.savedDataHead = savedData.next; + if (out.savedDataHead == NULL) out.savedDataTail = NULL; + savedData.next = NULL; + delete &savedData; + } + } + + out.isCurrentlyActive = True; + if (afterGettingFunc != NULL) { + struct timeval presentationTime; + presentationTime.tv_sec = 0; presentationTime.tv_usec = 0; // should fix ##### + (*afterGettingFunc)(afterGettingClientData, totNumBytesCopied, + 0 /* numTruncatedBytes */, presentationTime, + 0 /* durationInMicroseconds ?????#####*/); + } + return True; +} + +void MPEG1or2Demux +::continueReadProcessing(void* clientData, + unsigned char* /*ptr*/, unsigned /*size*/, + struct timeval /*presentationTime*/) { + MPEG1or2Demux* demux = (MPEG1or2Demux*)clientData; + demux->continueReadProcessing(); +} + +void MPEG1or2Demux::continueReadProcessing() { + while (fNumPendingReads > 0) { + unsigned char acquiredStreamIdTag = fParser->parse(); + + if (acquiredStreamIdTag != 0) { + // We were able to acquire a frame from the input. + struct OutputDescriptor& newOut = fOutput[acquiredStreamIdTag]; + newOut.isCurrentlyAwaitingData = False; + // indicates that we can be read again + // (This needs to be set before the 'after getting' call below, + // in case it tries to read another frame) + + // Call our own 'after getting' function. Because we're not a 'leaf' + // source, we can call this directly, without risking infinite recursion. + if (newOut.fAfterGettingFunc != NULL) { + (*newOut.fAfterGettingFunc)(newOut.afterGettingClientData, + newOut.frameSize, 0 /* numTruncatedBytes */, + newOut.presentationTime, + 0 /* durationInMicroseconds ?????#####*/); + --fNumPendingReads; + } + } else { + // We were unable to parse a complete frame from the input, because: + // - we had to read more data from the source stream, or + // - we found a frame for a stream that was being read, but whose + // reader is not ready to get the frame right now, or + // - the source stream has ended. + break; + } + } +} + +void MPEG1or2Demux::getNextFrame(u_int8_t streamIdTag, + unsigned char* to, unsigned maxSize, + FramedSource::afterGettingFunc* afterGettingFunc, + void* afterGettingClientData, + FramedSource::onCloseFunc* onCloseFunc, + void* onCloseClientData) { + // First, check whether we have saved data for this stream id: + if (useSavedData(streamIdTag, to, maxSize, + afterGettingFunc, afterGettingClientData)) { + return; + } + + // Then save the parameters of the specified stream id: + registerReadInterest(streamIdTag, to, maxSize, + afterGettingFunc, afterGettingClientData, + onCloseFunc, onCloseClientData); + + // Next, if we're the only currently pending read, continue looking for data: + if (fNumPendingReads == 1 || fHaveUndeliveredData) { + fHaveUndeliveredData = 0; + continueReadProcessing(); + } // otherwise the continued read processing has already been taken care of +} + +void MPEG1or2Demux::stopGettingFrames(u_int8_t streamIdTag) { + struct OutputDescriptor& out = fOutput[streamIdTag]; + + if (out.isCurrentlyAwaitingData && fNumPendingReads > 0) --fNumPendingReads; + + out.isCurrentlyActive = out.isCurrentlyAwaitingData = False; +} + +void MPEG1or2Demux::handleClosure(void* clientData) { + MPEG1or2Demux* demux = (MPEG1or2Demux*)clientData; + + demux->fNumPendingReads = 0; + + // Tell all pending readers that our source has closed. + // Note that we need to make a copy of our readers' close functions + // (etc.) before we start calling any of them, in case one of them + // ends up deleting this. + struct { + FramedSource::onCloseFunc* fOnCloseFunc; + void* onCloseClientData; + } savedPending[256]; + unsigned i, numPending = 0; + for (i = 0; i < 256; ++i) { + struct OutputDescriptor& out = demux->fOutput[i]; + if (out.isCurrentlyAwaitingData) { + if (out.fOnCloseFunc != NULL) { + savedPending[numPending].fOnCloseFunc = out.fOnCloseFunc; + savedPending[numPending].onCloseClientData = out.onCloseClientData; + ++numPending; + } + } + delete out.savedDataHead; out.savedDataHead = out.savedDataTail = NULL; + out.savedDataTotalSize = 0; + out.isPotentiallyReadable = out.isCurrentlyActive = out.isCurrentlyAwaitingData + = False; + } + for (i = 0; i < numPending; ++i) { + (*savedPending[i].fOnCloseFunc)(savedPending[i].onCloseClientData); + } +} + + +////////// MPEGProgramStreamParser implementation ////////// + +#include + +MPEGProgramStreamParser::MPEGProgramStreamParser(MPEG1or2Demux* usingDemux, + FramedSource* inputSource) + : StreamParser(inputSource, MPEG1or2Demux::handleClosure, usingDemux, + &MPEG1or2Demux::continueReadProcessing, usingDemux), + fUsingDemux(usingDemux), fCurrentParseState(PARSING_PACK_HEADER) { +} + +MPEGProgramStreamParser::~MPEGProgramStreamParser() { +} + +void MPEGProgramStreamParser::setParseState(MPEGParseState parseState) { + fCurrentParseState = parseState; + saveParserState(); +} + +unsigned char MPEGProgramStreamParser::parse() { + unsigned char acquiredStreamTagId = 0; + + try { + do { + switch (fCurrentParseState) { + case PARSING_PACK_HEADER: { + parsePackHeader(); + break; + } + case PARSING_SYSTEM_HEADER: { + parseSystemHeader(); + break; + } + case PARSING_PES_PACKET: { + acquiredStreamTagId = parsePESPacket(); + break; + } + } + } while(acquiredStreamTagId == 0); + + return acquiredStreamTagId; + } catch (int /*e*/) { +#ifdef DEBUG + fprintf(stderr, "MPEGProgramStreamParser::parse() EXCEPTION (This is normal behavior - *not* an error)\n"); + fflush(stderr); +#endif + return 0; // the parsing got interrupted + } +} + +#define PACK_START_CODE 0x000001BA +#define SYSTEM_HEADER_START_CODE 0x000001BB +#define PACKET_START_CODE_PREFIX 0x00000100 + +static inline Boolean isPacketStartCode(unsigned code) { + return (code&0xFFFFFF00) == PACKET_START_CODE_PREFIX + && code > SYSTEM_HEADER_START_CODE; +} + +void MPEGProgramStreamParser::parsePackHeader() { +#ifdef DEBUG + fprintf(stderr, "parsing pack header\n"); fflush(stderr); +#endif + unsigned first4Bytes; + while (1) { + first4Bytes = test4Bytes(); + + // We're supposed to have a pack header here, but check also for + // a system header or a PES packet, just in case: + if (first4Bytes == PACK_START_CODE) { + skipBytes(4); + break; + } else if (first4Bytes == SYSTEM_HEADER_START_CODE) { +#ifdef DEBUG + fprintf(stderr, "found system header instead of pack header\n"); +#endif + setParseState(PARSING_SYSTEM_HEADER); + return; + } else if (isPacketStartCode(first4Bytes)) { +#ifdef DEBUG + fprintf(stderr, "found packet start code 0x%02x instead of pack header\n", first4Bytes); +#endif + setParseState(PARSING_PES_PACKET); + return; + } + + setParseState(PARSING_PACK_HEADER); // ensures we progress over bad data + if ((first4Bytes&0xFF) > 1) { // a system code definitely doesn't start here + skipBytes(4); + } else { + skipBytes(1); + } + } + + // The size of the pack header differs depending on whether it's + // MPEG-1 or MPEG-2. The next byte tells us this: + unsigned char nextByte = get1Byte(); + MPEG1or2Demux::SCR& scr = fUsingDemux->fLastSeenSCR; // alias + if ((nextByte&0xF0) == 0x20) { // MPEG-1 + fUsingDemux->fMPEGversion = 1; + scr.highBit = (nextByte&0x08)>>3; + scr.remainingBits = (nextByte&0x06)<<29; + unsigned next4Bytes = get4Bytes(); + scr.remainingBits |= (next4Bytes&0xFFFE0000)>>2; + scr.remainingBits |= (next4Bytes&0x0000FFFE)>>1; + scr.extension = 0; + scr.isValid = True; + skipBits(24); + +#if defined(DEBUG_TIMESTAMPS) || defined(DEBUG_SCR_TIMESTAMPS) + fprintf(stderr, "pack hdr system_clock_reference_base: 0x%x", + scr.highBit); + fprintf(stderr, "%08x\n", scr.remainingBits); +#endif + } else if ((nextByte&0xC0) == 0x40) { // MPEG-2 + fUsingDemux->fMPEGversion = 2; + scr.highBit = (nextByte&0x20)>>5; + scr.remainingBits = (nextByte&0x18)<<27; + scr.remainingBits |= (nextByte&0x03)<<28; + unsigned next4Bytes = get4Bytes(); + scr.remainingBits |= (next4Bytes&0xFFF80000)>>4; + scr.remainingBits |= (next4Bytes&0x0003FFF8)>>3; + scr.extension = (next4Bytes&0x00000003)<<7; + next4Bytes = get4Bytes(); + scr.extension |= (next4Bytes&0xFE000000)>>25; + scr.isValid = True; + skipBits(5); + +#if defined(DEBUG_TIMESTAMPS) || defined(DEBUG_SCR_TIMESTAMPS) + fprintf(stderr, "pack hdr system_clock_reference_base: 0x%x", + scr.highBit); + fprintf(stderr, "%08x\n", scr.remainingBits); + fprintf(stderr, "pack hdr system_clock_reference_extension: 0x%03x\n", + scr.extension); +#endif + unsigned char pack_stuffing_length = getBits(3); + skipBytes(pack_stuffing_length); + } else { // unknown + fUsingDemux->envir() << "StreamParser::parsePack() saw strange byte following pack_start_code\n"; + } + + // Check for a System Header next: + setParseState(PARSING_SYSTEM_HEADER); +} + +void MPEGProgramStreamParser::parseSystemHeader() { +#ifdef DEBUG + fprintf(stderr, "parsing system header\n"); fflush(stderr); +#endif + unsigned next4Bytes = test4Bytes(); + if (next4Bytes != SYSTEM_HEADER_START_CODE) { + // The system header was optional. Look for a PES Packet instead: + setParseState(PARSING_PES_PACKET); + return; + } + +#ifdef DEBUG + fprintf(stderr, "saw system_header_start_code\n"); fflush(stderr); +#endif + skipBytes(4); // we've already seen the system_header_start_code + + unsigned short remaining_header_length = get2Bytes(); + + // According to the MPEG-1 and MPEG-2 specs, "remaining_header_length" should be + // at least 6 bytes. Check this now: + if (remaining_header_length < 6) { + fUsingDemux->envir() << "StreamParser::parseSystemHeader(): saw strange header_length: " + << remaining_header_length << " < 6\n"; + } + skipBytes(remaining_header_length); + + // Check for a PES Packet next: + setParseState(PARSING_PES_PACKET); +} + +#define private_stream_1 0xBD +#define private_stream_2 0xBF + +// A test for stream ids that are exempt from normal PES packet header parsing +Boolean MPEGProgramStreamParser +::isSpecialStreamId(unsigned char stream_id) const { + if (stream_id == RAW_PES) return True; // hack + + if (fUsingDemux->fMPEGversion == 1) { + return stream_id == private_stream_2; + } else { // assume MPEG-2 + if (stream_id <= private_stream_2) { + return stream_id != private_stream_1; + } else if ((stream_id&0xF0) == 0xF0) { + unsigned char lower4Bits = stream_id&0x0F; + return lower4Bits <= 2 || lower4Bits == 0x8 || lower4Bits == 0xF; + } else { + return False; + } + } +} + +#define READER_NOT_READY 2 + +unsigned char MPEGProgramStreamParser::parsePESPacket() { +#ifdef DEBUG + fprintf(stderr, "parsing PES packet\n"); fflush(stderr); +#endif + unsigned next4Bytes = test4Bytes(); + if (!isPacketStartCode(next4Bytes)) { + // The PES Packet was optional. Look for a Pack Header instead: + setParseState(PARSING_PACK_HEADER); + return 0; + } + +#ifdef DEBUG + fprintf(stderr, "saw packet_start_code_prefix\n"); fflush(stderr); +#endif + skipBytes(3); // we've already seen the packet_start_code_prefix + + unsigned char stream_id = get1Byte(); +#if defined(DEBUG) || defined(DEBUG_TIMESTAMPS) + unsigned char streamNum = stream_id; + char const* streamTypeStr; + if ((stream_id&0xE0) == 0xC0) { + streamTypeStr = "audio"; + streamNum = stream_id&~0xE0; + } else if ((stream_id&0xF0) == 0xE0) { + streamTypeStr = "video"; + streamNum = stream_id&~0xF0; + } else if (stream_id == 0xbc) { + streamTypeStr = "reserved"; + } else if (stream_id == 0xbd) { + streamTypeStr = "private_1"; + } else if (stream_id == 0xbe) { + streamTypeStr = "padding"; + } else if (stream_id == 0xbf) { + streamTypeStr = "private_2"; + } else { + streamTypeStr = "unknown"; + } +#endif +#ifdef DEBUG + static unsigned frameCount = 1; + fprintf(stderr, "%d, saw %s stream: 0x%02x\n", frameCount, streamTypeStr, streamNum); fflush(stderr); +#endif + + unsigned short PES_packet_length = get2Bytes(); +#ifdef DEBUG + fprintf(stderr, "PES_packet_length: %d\n", PES_packet_length); fflush(stderr); +#endif + + // Parse over the rest of the header, until we get to the packet data itself. + // This varies depending upon the MPEG version: + if (fUsingDemux->fOutput[RAW_PES].isPotentiallyReadable) { + // Hack: We've been asked to return raw PES packets, for every stream: + stream_id = RAW_PES; + } + unsigned savedParserOffset = curOffset(); +#ifdef DEBUG_TIMESTAMPS + unsigned char pts_highBit = 0; + unsigned pts_remainingBits = 0; + unsigned char dts_highBit = 0; + unsigned dts_remainingBits = 0; +#endif + if (fUsingDemux->fMPEGversion == 1) { + if (!isSpecialStreamId(stream_id)) { + unsigned char nextByte; + while ((nextByte = get1Byte()) == 0xFF) { // stuffing_byte + } + if ((nextByte&0xC0) == 0x40) { // '01' + skipBytes(1); + nextByte = get1Byte(); + } + if ((nextByte&0xF0) == 0x20) { // '0010' +#ifdef DEBUG_TIMESTAMPS + pts_highBit = (nextByte&0x08)>>3; + pts_remainingBits = (nextByte&0x06)<<29; + unsigned next4Bytes = get4Bytes(); + pts_remainingBits |= (next4Bytes&0xFFFE0000)>>2; + pts_remainingBits |= (next4Bytes&0x0000FFFE)>>1; +#else + skipBytes(4); +#endif + } else if ((nextByte&0xF0) == 0x30) { // '0011' +#ifdef DEBUG_TIMESTAMPS + pts_highBit = (nextByte&0x08)>>3; + pts_remainingBits = (nextByte&0x06)<<29; + unsigned next4Bytes = get4Bytes(); + pts_remainingBits |= (next4Bytes&0xFFFE0000)>>2; + pts_remainingBits |= (next4Bytes&0x0000FFFE)>>1; + + nextByte = get1Byte(); + dts_highBit = (nextByte&0x08)>>3; + dts_remainingBits = (nextByte&0x06)<<29; + next4Bytes = get4Bytes(); + dts_remainingBits |= (next4Bytes&0xFFFE0000)>>2; + dts_remainingBits |= (next4Bytes&0x0000FFFE)>>1; +#else + skipBytes(9); +#endif + } + } + } else { // assume MPEG-2 + if (!isSpecialStreamId(stream_id)) { + // Fields in the next 3 bytes determine the size of the rest: + unsigned next3Bytes = getBits(24); +#ifdef DEBUG_TIMESTAMPS + unsigned char PTS_DTS_flags = (next3Bytes&0x00C000)>>14; +#endif +#ifdef undef + unsigned char ESCR_flag = (next3Bytes&0x002000)>>13; + unsigned char ES_rate_flag = (next3Bytes&0x001000)>>12; + unsigned char DSM_trick_mode_flag = (next3Bytes&0x000800)>>11; +#endif + unsigned char PES_header_data_length = (next3Bytes&0x0000FF); +#ifdef DEBUG + fprintf(stderr, "PES_header_data_length: 0x%02x\n", PES_header_data_length); fflush(stderr); +#endif +#ifdef DEBUG_TIMESTAMPS + if (PTS_DTS_flags == 0x2 && PES_header_data_length >= 5) { + unsigned char nextByte = get1Byte(); + pts_highBit = (nextByte&0x08)>>3; + pts_remainingBits = (nextByte&0x06)<<29; + unsigned next4Bytes = get4Bytes(); + pts_remainingBits |= (next4Bytes&0xFFFE0000)>>2; + pts_remainingBits |= (next4Bytes&0x0000FFFE)>>1; + + skipBytes(PES_header_data_length-5); + } else if (PTS_DTS_flags == 0x3 && PES_header_data_length >= 10) { + unsigned char nextByte = get1Byte(); + pts_highBit = (nextByte&0x08)>>3; + pts_remainingBits = (nextByte&0x06)<<29; + unsigned next4Bytes = get4Bytes(); + pts_remainingBits |= (next4Bytes&0xFFFE0000)>>2; + pts_remainingBits |= (next4Bytes&0x0000FFFE)>>1; + + nextByte = get1Byte(); + dts_highBit = (nextByte&0x08)>>3; + dts_remainingBits = (nextByte&0x06)<<29; + next4Bytes = get4Bytes(); + dts_remainingBits |= (next4Bytes&0xFFFE0000)>>2; + dts_remainingBits |= (next4Bytes&0x0000FFFE)>>1; + + skipBytes(PES_header_data_length-10); + } +#else + skipBytes(PES_header_data_length); +#endif + } + } +#ifdef DEBUG_TIMESTAMPS + fprintf(stderr, "%s stream, ", streamTypeStr); + fprintf(stderr, "packet presentation_time_stamp: 0x%x", pts_highBit); + fprintf(stderr, "%08x\n", pts_remainingBits); + fprintf(stderr, "\t\tpacket decoding_time_stamp: 0x%x", dts_highBit); + fprintf(stderr, "%08x\n", dts_remainingBits); +#endif + + // The rest of the packet will be the "PES_packet_data_byte"s + // Make sure that "PES_packet_length" was consistent with where we are now: + unsigned char acquiredStreamIdTag = 0; + unsigned currentParserOffset = curOffset(); + unsigned bytesSkipped = currentParserOffset - savedParserOffset; + if (stream_id == RAW_PES) { + restoreSavedParserState(); // so we deliver from the beginning of the PES packet + PES_packet_length += 6; // to include the whole of the PES packet + bytesSkipped = 0; + } + if (PES_packet_length < bytesSkipped) { + fUsingDemux->envir() << "StreamParser::parsePESPacket(): saw inconsistent PES_packet_length " + << PES_packet_length << " < " + << bytesSkipped << "\n"; + } else { + PES_packet_length -= bytesSkipped; +#ifdef DEBUG + unsigned next4Bytes = test4Bytes(); +#endif + + // Check whether our using source is interested in this stream type. + // If so, deliver the frame to him: + MPEG1or2Demux::OutputDescriptor_t& out = fUsingDemux->fOutput[stream_id]; + if (out.isCurrentlyAwaitingData) { + unsigned numBytesToCopy; + if (PES_packet_length > out.maxSize) { + fUsingDemux->envir() << "MPEGProgramStreamParser::parsePESPacket() error: PES_packet_length (" + << PES_packet_length + << ") exceeds max frame size asked for (" + << out.maxSize << ")\n"; + numBytesToCopy = out.maxSize; + } else { + numBytesToCopy = PES_packet_length; + } + + getBytes(out.to, numBytesToCopy); + out.frameSize = numBytesToCopy; +#ifdef DEBUG + fprintf(stderr, "%d, %d bytes of PES_packet_data (out.maxSize: %d); first 4 bytes: 0x%08x\n", frameCount, numBytesToCopy, out.maxSize, next4Bytes); fflush(stderr); +#endif + // set out.presentationTime later ##### + acquiredStreamIdTag = stream_id; + PES_packet_length -= numBytesToCopy; + } else if (out.isCurrentlyActive) { + // Someone has been reading this stream, but isn't right now. + // We can't deliver this frame until he asks for it, so punt for now. + // The next time he asks for a frame, he'll get it. +#ifdef DEBUG + fprintf(stderr, "%d, currently undeliverable PES data; first 4 bytes: 0x%08x - currently undeliverable!\n", frameCount, next4Bytes); fflush(stderr); +#endif + restoreSavedParserState(); // so we read from the beginning next time + fUsingDemux->fHaveUndeliveredData = True; + throw READER_NOT_READY; + } else if (out.isPotentiallyReadable && + out.savedDataTotalSize + PES_packet_length < 1000000 /*limit*/) { + // Someone is interested in this stream, but hasn't begun reading it yet. + // Save this data, so that the reader will get it when he later asks for it. + unsigned char* buf = new unsigned char[PES_packet_length]; + getBytes(buf, PES_packet_length); + MPEG1or2Demux::OutputDescriptor::SavedData* savedData + = new MPEG1or2Demux::OutputDescriptor::SavedData(buf, PES_packet_length); + if (out.savedDataHead == NULL) { + out.savedDataHead = out.savedDataTail = savedData; + } else { + out.savedDataTail->next = savedData; + out.savedDataTail = savedData; + } + out.savedDataTotalSize += PES_packet_length; + PES_packet_length = 0; + } + skipBytes(PES_packet_length); + } + + // Check for another PES Packet next: + setParseState(PARSING_PES_PACKET); +#ifdef DEBUG + ++frameCount; +#endif + return acquiredStreamIdTag; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG1or2DemuxedElementaryStream.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG1or2DemuxedElementaryStream.cpp new file mode 100644 index 0000000..b0d9020 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG1or2DemuxedElementaryStream.cpp @@ -0,0 +1,88 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A MPEG 1 or 2 Elementary Stream, demultiplexed from a Program Stream +// Implementation + +#include "MPEG1or2DemuxedElementaryStream.hh" + +////////// MPEG1or2DemuxedElementaryStream ////////// + +MPEG1or2DemuxedElementaryStream:: +MPEG1or2DemuxedElementaryStream(UsageEnvironment& env, u_int8_t streamIdTag, + MPEG1or2Demux& sourceDemux) + : FramedSource(env), + fOurStreamIdTag(streamIdTag), fOurSourceDemux(sourceDemux), fMPEGversion(0) { + // Set our MIME type string for known media types: + if ((streamIdTag&0xE0) == 0xC0) { + fMIMEtype = "audio/MPEG"; + } else if ((streamIdTag&0xF0) == 0xE0) { + fMIMEtype = "video/MPEG"; + } else { + fMIMEtype = MediaSource::MIMEtype(); + } +} + +MPEG1or2DemuxedElementaryStream::~MPEG1or2DemuxedElementaryStream() { + fOurSourceDemux.noteElementaryStreamDeletion(this); +} + +void MPEG1or2DemuxedElementaryStream::doGetNextFrame() { + fOurSourceDemux.getNextFrame(fOurStreamIdTag, fTo, fMaxSize, + afterGettingFrame, this, + handleClosure, this); +} + +void MPEG1or2DemuxedElementaryStream::doStopGettingFrames() { + fOurSourceDemux.stopGettingFrames(fOurStreamIdTag); +} + +char const* MPEG1or2DemuxedElementaryStream::MIMEtype() const { + return fMIMEtype; +} + +unsigned MPEG1or2DemuxedElementaryStream::maxFrameSize() const { + return 6+65535; + // because the MPEG spec allows for PES packets as large as + // (6 + 65535) bytes (header + data) +} + +void MPEG1or2DemuxedElementaryStream +::afterGettingFrame(void* clientData, + unsigned frameSize, unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + MPEG1or2DemuxedElementaryStream* stream + = (MPEG1or2DemuxedElementaryStream*)clientData; + stream->afterGettingFrame1(frameSize, numTruncatedBytes, + presentationTime, durationInMicroseconds); +} + +void MPEG1or2DemuxedElementaryStream +::afterGettingFrame1(unsigned frameSize, unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + fFrameSize = frameSize; + fNumTruncatedBytes = numTruncatedBytes; + fPresentationTime = presentationTime; + fDurationInMicroseconds = durationInMicroseconds; + + fLastSeenSCR = fOurSourceDemux.lastSeenSCR(); + fMPEGversion = fOurSourceDemux.mpegVersion(); + + FramedSource::afterGetting(this); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG1or2DemuxedServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG1or2DemuxedServerMediaSubsession.cpp new file mode 100644 index 0000000..d6b32d8 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG1or2DemuxedServerMediaSubsession.cpp @@ -0,0 +1,134 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a MPEG-1 or 2 demuxer. +// Implementation + +#include "MPEG1or2DemuxedServerMediaSubsession.hh" +#include "MPEG1or2AudioStreamFramer.hh" +#include "MPEG1or2AudioRTPSink.hh" +#include "MPEG1or2VideoStreamFramer.hh" +#include "MPEG1or2VideoRTPSink.hh" +#include "AC3AudioStreamFramer.hh" +#include "AC3AudioRTPSink.hh" +#include "ByteStreamFileSource.hh" + +MPEG1or2DemuxedServerMediaSubsession* MPEG1or2DemuxedServerMediaSubsession +::createNew(MPEG1or2FileServerDemux& demux, u_int8_t streamIdTag, + Boolean reuseFirstSource, Boolean iFramesOnly, double vshPeriod) { + return new MPEG1or2DemuxedServerMediaSubsession(demux, streamIdTag, + reuseFirstSource, + iFramesOnly, vshPeriod); +} + +MPEG1or2DemuxedServerMediaSubsession +::MPEG1or2DemuxedServerMediaSubsession(MPEG1or2FileServerDemux& demux, + u_int8_t streamIdTag, Boolean reuseFirstSource, + Boolean iFramesOnly, double vshPeriod) + : OnDemandServerMediaSubsession(demux.envir(), reuseFirstSource), + fOurDemux(demux), fStreamIdTag(streamIdTag), + fIFramesOnly(iFramesOnly), fVSHPeriod(vshPeriod) { +} + +MPEG1or2DemuxedServerMediaSubsession::~MPEG1or2DemuxedServerMediaSubsession() { +} + +FramedSource* MPEG1or2DemuxedServerMediaSubsession +::createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate) { + FramedSource* es = NULL; + do { + es = fOurDemux.newElementaryStream(clientSessionId, fStreamIdTag); + if (es == NULL) break; + + if ((fStreamIdTag&0xF0) == 0xC0 /*MPEG audio*/) { + estBitrate = 128; // kbps, estimate + return MPEG1or2AudioStreamFramer::createNew(envir(), es); + } else if ((fStreamIdTag&0xF0) == 0xE0 /*video*/) { + estBitrate = 500; // kbps, estimate + return MPEG1or2VideoStreamFramer::createNew(envir(), es, + fIFramesOnly, fVSHPeriod); + } else if (fStreamIdTag == 0xBD /*AC-3 audio*/) { + estBitrate = 192; // kbps, estimate + return AC3AudioStreamFramer::createNew(envir(), es, 0x80); + } else { // unknown stream type + break; + } + } while (0); + + // An error occurred: + Medium::close(es); + return NULL; +} + +RTPSink* MPEG1or2DemuxedServerMediaSubsession +::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource) { + if ((fStreamIdTag&0xF0) == 0xC0 /*MPEG audio*/) { + return MPEG1or2AudioRTPSink::createNew(envir(), rtpGroupsock); + } else if ((fStreamIdTag&0xF0) == 0xE0 /*video*/) { + return MPEG1or2VideoRTPSink::createNew(envir(), rtpGroupsock); + } else if (fStreamIdTag == 0xBD /*AC-3 audio*/) { + // Get the sampling frequency from the audio source; use it for the RTP frequency: + AC3AudioStreamFramer* audioSource + = (AC3AudioStreamFramer*)inputSource; + return AC3AudioRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + audioSource->samplingRate()); + } else { + return NULL; + } +} + +void MPEG1or2DemuxedServerMediaSubsession +::seekStreamSource(FramedSource* inputSource, double& seekNPT, double /*streamDuration*/, u_int64_t& /*numBytes*/) { + float const dur = duration(); + unsigned const size = fOurDemux.fileSize(); + unsigned absBytePosition = dur == 0.0 ? 0 : (unsigned)((seekNPT/dur)*size); + + // "inputSource" is a 'framer' + // Flush its data, to account for the seek that we're about to do: + if ((fStreamIdTag&0xF0) == 0xC0 /*MPEG audio*/) { + MPEG1or2AudioStreamFramer* framer = (MPEG1or2AudioStreamFramer*)inputSource; + framer->flushInput(); + } else if ((fStreamIdTag&0xF0) == 0xE0 /*video*/) { + MPEG1or2VideoStreamFramer* framer = (MPEG1or2VideoStreamFramer*)inputSource; + framer->flushInput(); + } + + // "inputSource" is a filter; its input source is the original elem stream source: + MPEG1or2DemuxedElementaryStream* elemStreamSource + = (MPEG1or2DemuxedElementaryStream*)(((FramedFilter*)inputSource)->inputSource()); + + // Next, get the original source demux: + MPEG1or2Demux& sourceDemux = elemStreamSource->sourceDemux(); + + // and flush its input buffers: + sourceDemux.flushInput(); + + // Then, get the original input file stream from the source demux: + ByteStreamFileSource* inputFileSource + = (ByteStreamFileSource*)(sourceDemux.inputSource()); + // Note: We can make that cast, because we know that the demux was originally + // created from a "ByteStreamFileSource". + + // Do the appropriate seek within the input file stream: + inputFileSource->seekToByteAbsolute(absBytePosition); +} + +float MPEG1or2DemuxedServerMediaSubsession::duration() const { + return fOurDemux.fileDuration(); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG1or2FileServerDemux.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG1or2FileServerDemux.cpp new file mode 100644 index 0000000..036b128 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG1or2FileServerDemux.cpp @@ -0,0 +1,264 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A server demultiplexer for a MPEG 1 or 2 Program Stream +// Implementation + +#include "MPEG1or2FileServerDemux.hh" +#include "MPEG1or2DemuxedServerMediaSubsession.hh" +#include "ByteStreamFileSource.hh" + +MPEG1or2FileServerDemux* +MPEG1or2FileServerDemux::createNew(UsageEnvironment& env, char const* fileName, + Boolean reuseFirstSource) { + return new MPEG1or2FileServerDemux(env, fileName, reuseFirstSource); +} + +static float MPEG1or2ProgramStreamFileDuration(UsageEnvironment& env, + char const* fileName, + unsigned& fileSize); // forward +MPEG1or2FileServerDemux +::MPEG1or2FileServerDemux(UsageEnvironment& env, char const* fileName, + Boolean reuseFirstSource) + : Medium(env), + fReuseFirstSource(reuseFirstSource), + fSession0Demux(NULL), fLastCreatedDemux(NULL), fLastClientSessionId(~0) { + fFileName = strDup(fileName); + fFileDuration = MPEG1or2ProgramStreamFileDuration(env, fileName, fFileSize); +} + +MPEG1or2FileServerDemux::~MPEG1or2FileServerDemux() { + Medium::close(fSession0Demux); + delete[] (char*)fFileName; +} + +ServerMediaSubsession* +MPEG1or2FileServerDemux::newAudioServerMediaSubsession() { + return MPEG1or2DemuxedServerMediaSubsession::createNew(*this, 0xC0, fReuseFirstSource); +} + +ServerMediaSubsession* +MPEG1or2FileServerDemux::newVideoServerMediaSubsession(Boolean iFramesOnly, + double vshPeriod) { + return MPEG1or2DemuxedServerMediaSubsession::createNew(*this, 0xE0, fReuseFirstSource, + iFramesOnly, vshPeriod); +} + +ServerMediaSubsession* +MPEG1or2FileServerDemux::newAC3AudioServerMediaSubsession() { + return MPEG1or2DemuxedServerMediaSubsession::createNew(*this, 0xBD, fReuseFirstSource); + // because, in a VOB file, the AC3 audio has stream id 0xBD +} + +MPEG1or2DemuxedElementaryStream* +MPEG1or2FileServerDemux::newElementaryStream(unsigned clientSessionId, + u_int8_t streamIdTag) { + MPEG1or2Demux* demuxToUse; + if (clientSessionId == 0) { + // 'Session 0' is treated especially, because its audio & video streams + // are created and destroyed one-at-a-time, rather than both streams being + // created, and then (later) both streams being destroyed (as is the case + // for other ('real') session ids). Because of this, a separate demux is + // used for session 0, and its deletion is managed by us, rather than + // happening automatically. + if (fSession0Demux == NULL) { + // Open our input file as a 'byte-stream file source': + ByteStreamFileSource* fileSource + = ByteStreamFileSource::createNew(envir(), fFileName); + if (fileSource == NULL) return NULL; + fSession0Demux = MPEG1or2Demux::createNew(envir(), fileSource, False/*note!*/); + } + demuxToUse = fSession0Demux; + } else { + // First, check whether this is a new client session. If so, create a new + // demux for it: + if (clientSessionId != fLastClientSessionId) { + // Open our input file as a 'byte-stream file source': + ByteStreamFileSource* fileSource + = ByteStreamFileSource::createNew(envir(), fFileName); + if (fileSource == NULL) return NULL; + + fLastCreatedDemux = MPEG1or2Demux::createNew(envir(), fileSource, True); + // Note: We tell the demux to delete itself when its last + // elementary stream is deleted. + fLastClientSessionId = clientSessionId; + // Note: This code relies upon the fact that the creation of streams for + // different client sessions do not overlap - so one "MPEG1or2Demux" is used + // at a time. + } + demuxToUse = fLastCreatedDemux; + } + + if (demuxToUse == NULL) return NULL; // shouldn't happen + + return demuxToUse->newElementaryStream(streamIdTag); +} + + +static Boolean getMPEG1or2TimeCode(FramedSource* dataSource, + MPEG1or2Demux& parentDemux, + Boolean returnFirstSeenCode, + float& timeCode); // forward + +static float MPEG1or2ProgramStreamFileDuration(UsageEnvironment& env, + char const* fileName, + unsigned& fileSize) { + FramedSource* dataSource = NULL; + float duration = 0.0; // until we learn otherwise + fileSize = 0; // ditto + + do { + // Open the input file as a 'byte-stream file source': + ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(env, fileName); + if (fileSource == NULL) break; + dataSource = fileSource; + + fileSize = (unsigned)(fileSource->fileSize()); + if (fileSize == 0) break; + + // Create a MPEG demultiplexor that reads from that source. + MPEG1or2Demux* baseDemux = MPEG1or2Demux::createNew(env, dataSource, True); + if (baseDemux == NULL) break; + + // Create, from this, a source that returns raw PES packets: + dataSource = baseDemux->newRawPESStream(); + + // Read the first time code from the file: + float firstTimeCode; + if (!getMPEG1or2TimeCode(dataSource, *baseDemux, True, firstTimeCode)) break; + + // Then, read the last time code from the file. + // (Before doing this, flush the demux's input buffers, + // and seek towards the end of the file, for efficiency.) + baseDemux->flushInput(); + unsigned const startByteFromEnd = 100000; + unsigned newFilePosition + = fileSize < startByteFromEnd ? 0 : fileSize - startByteFromEnd; + if (newFilePosition > 0) fileSource->seekToByteAbsolute(newFilePosition); + + float lastTimeCode; + if (!getMPEG1or2TimeCode(dataSource, *baseDemux, False, lastTimeCode)) break; + + // Take the difference between these time codes as being the file duration: + float timeCodeDiff = lastTimeCode - firstTimeCode; + if (timeCodeDiff < 0) break; + duration = timeCodeDiff; + } while (0); + + Medium::close(dataSource); + return duration; +} + +#define MFSD_DUMMY_SINK_BUFFER_SIZE (6+65535) /* large enough for a PES packet */ + +class MFSD_DummySink: public MediaSink { +public: + MFSD_DummySink(MPEG1or2Demux& demux, Boolean returnFirstSeenCode); + virtual ~MFSD_DummySink(); + + char watchVariable; + +private: + // redefined virtual function: + virtual Boolean continuePlaying(); + +private: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(); + +private: + MPEG1or2Demux& fOurDemux; + Boolean fReturnFirstSeenCode; + unsigned char fBuf[MFSD_DUMMY_SINK_BUFFER_SIZE]; +}; + +static void afterPlayingMFSD_DummySink(MFSD_DummySink* sink); // forward +static float computeSCRTimeCode(MPEG1or2Demux::SCR const& scr); // forward + +static Boolean getMPEG1or2TimeCode(FramedSource* dataSource, + MPEG1or2Demux& parentDemux, + Boolean returnFirstSeenCode, + float& timeCode) { + // Start reading through "dataSource", until we see a SCR time code: + parentDemux.lastSeenSCR().isValid = False; + UsageEnvironment& env = dataSource->envir(); // alias + MFSD_DummySink sink(parentDemux, returnFirstSeenCode); + sink.startPlaying(*dataSource, + (MediaSink::afterPlayingFunc*)afterPlayingMFSD_DummySink, &sink); + env.taskScheduler().doEventLoop(&sink.watchVariable); + + timeCode = computeSCRTimeCode(parentDemux.lastSeenSCR()); + return parentDemux.lastSeenSCR().isValid; +} + + +////////// MFSD_DummySink implementation ////////// + +MFSD_DummySink::MFSD_DummySink(MPEG1or2Demux& demux, Boolean returnFirstSeenCode) + : MediaSink(demux.envir()), + watchVariable(0), fOurDemux(demux), fReturnFirstSeenCode(returnFirstSeenCode) { +} + +MFSD_DummySink::~MFSD_DummySink() { +} + +Boolean MFSD_DummySink::continuePlaying() { + if (fSource == NULL) return False; // sanity check + + fSource->getNextFrame(fBuf, sizeof fBuf, + afterGettingFrame, this, + onSourceClosure, this); + return True; +} + +void MFSD_DummySink::afterGettingFrame(void* clientData, unsigned /*frameSize*/, + unsigned /*numTruncatedBytes*/, + struct timeval /*presentationTime*/, + unsigned /*durationInMicroseconds*/) { + MFSD_DummySink* sink = (MFSD_DummySink*)clientData; + sink->afterGettingFrame1(); +} + +void MFSD_DummySink::afterGettingFrame1() { + if (fReturnFirstSeenCode && fOurDemux.lastSeenSCR().isValid) { + // We were asked to return the first SCR that we saw, and we've seen one, + // so we're done. (Handle this as if the input source had closed.) + onSourceClosure(); + return; + } + + continuePlaying(); +} + +static void afterPlayingMFSD_DummySink(MFSD_DummySink* sink) { + // Return from the "doEventLoop()" call: + sink->watchVariable = ~0; +} + +static float computeSCRTimeCode(MPEG1or2Demux::SCR const& scr) { + double result = scr.remainingBits/90000.0 + scr.extension/300.0; + if (scr.highBit) { + // Add (2^32)/90000 == (2^28)/5625 + double const highBitValue = (256*1024*1024)/5625.0; + result += highBitValue; + } + + return (float)result; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoFileServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoFileServerMediaSubsession.cpp new file mode 100644 index 0000000..ba86f13 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoFileServerMediaSubsession.cpp @@ -0,0 +1,69 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a MPEG-1 or 2 Elementary Stream video file. +// Implementation + +#include "MPEG1or2VideoFileServerMediaSubsession.hh" +#include "MPEG1or2VideoRTPSink.hh" +#include "ByteStreamFileSource.hh" +#include "MPEG1or2VideoStreamFramer.hh" + +MPEG1or2VideoFileServerMediaSubsession* +MPEG1or2VideoFileServerMediaSubsession::createNew(UsageEnvironment& env, + char const* fileName, + Boolean reuseFirstSource, + Boolean iFramesOnly, + double vshPeriod) { + return new MPEG1or2VideoFileServerMediaSubsession(env, fileName, reuseFirstSource, + iFramesOnly, vshPeriod); +} + +MPEG1or2VideoFileServerMediaSubsession +::MPEG1or2VideoFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, + Boolean reuseFirstSource, + Boolean iFramesOnly, + double vshPeriod) + : FileServerMediaSubsession(env, fileName, reuseFirstSource), + fIFramesOnly(iFramesOnly), fVSHPeriod(vshPeriod) { +} + +MPEG1or2VideoFileServerMediaSubsession +::~MPEG1or2VideoFileServerMediaSubsession() { +} + +FramedSource* MPEG1or2VideoFileServerMediaSubsession +::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) { + estBitrate = 500; // kbps, estimate + + ByteStreamFileSource* fileSource + = ByteStreamFileSource::createNew(envir(), fFileName); + if (fileSource == NULL) return NULL; + fFileSize = fileSource->fileSize(); + + return MPEG1or2VideoStreamFramer + ::createNew(envir(), fileSource, fIFramesOnly, fVSHPeriod); +} + +RTPSink* MPEG1or2VideoFileServerMediaSubsession +::createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char /*rtpPayloadTypeIfDynamic*/, + FramedSource* /*inputSource*/) { + return MPEG1or2VideoRTPSink::createNew(envir(), rtpGroupsock); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoRTPSink.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoRTPSink.cpp new file mode 100644 index 0000000..8b597ac --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoRTPSink.cpp @@ -0,0 +1,175 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for MPEG video (RFC 2250) +// Implementation + +#include "MPEG1or2VideoRTPSink.hh" +#include "MPEG1or2VideoStreamFramer.hh" + +MPEG1or2VideoRTPSink::MPEG1or2VideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs) + : VideoRTPSink(env, RTPgs, 32, 90000, "MPV") { + fPictureState.temporal_reference = 0; + fPictureState.picture_coding_type = fPictureState.vector_code_bits = 0; +} + +MPEG1or2VideoRTPSink::~MPEG1or2VideoRTPSink() { +} + +MPEG1or2VideoRTPSink* +MPEG1or2VideoRTPSink::createNew(UsageEnvironment& env, Groupsock* RTPgs) { + return new MPEG1or2VideoRTPSink(env, RTPgs); +} + +Boolean MPEG1or2VideoRTPSink::sourceIsCompatibleWithUs(MediaSource& source) { + // Our source must be an appropriate framer: + return source.isMPEG1or2VideoStreamFramer(); +} + +Boolean MPEG1or2VideoRTPSink::allowFragmentationAfterStart() const { + return True; +} + +Boolean MPEG1or2VideoRTPSink +::frameCanAppearAfterPacketStart(unsigned char const* frameStart, + unsigned numBytesInFrame) const { + // A 'frame' (which in this context can mean a header or a slice as well as a + // complete picture) can appear at other than the first position in a packet + // in all situations, EXCEPT when it follows the end of (i.e., the last slice + // of) a picture. I.e., the headers at the beginning of a picture must + // appear at the start of a RTP packet. + if (!fPreviousFrameWasSlice) return True; + + // A slice is already packed into this packet. We allow this new 'frame' + // to be packed after it, provided that it is also a slice: + return numBytesInFrame >= 4 + && frameStart[0] == 0 && frameStart[1] == 0 && frameStart[2] == 1 + && frameStart[3] >= 1 && frameStart[3] <= 0xAF; +} + +#define VIDEO_SEQUENCE_HEADER_START_CODE 0x000001B3 +#define PICTURE_START_CODE 0x00000100 + +void MPEG1or2VideoRTPSink +::doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes) { + Boolean thisFrameIsASlice = False; // until we learn otherwise + if (isFirstFrameInPacket()) { + fSequenceHeaderPresent = fPacketBeginsSlice = fPacketEndsSlice = False; + } + + if (fragmentationOffset == 0) { + // Begin by inspecting the 4-byte code at the start of the frame: + if (numBytesInFrame < 4) return; // shouldn't happen + unsigned startCode = (frameStart[0]<<24) | (frameStart[1]<<16) + | (frameStart[2]<<8) | frameStart[3]; + + if (startCode == VIDEO_SEQUENCE_HEADER_START_CODE) { + // This is a video sequence header + fSequenceHeaderPresent = True; + } else if (startCode == PICTURE_START_CODE) { + // This is a picture header + + // Record the parameters of this picture: + if (numBytesInFrame < 8) return; // shouldn't happen + unsigned next4Bytes = (frameStart[4]<<24) | (frameStart[5]<<16) + | (frameStart[6]<<8) | frameStart[7]; + unsigned char byte8 = numBytesInFrame == 8 ? 0 : frameStart[8]; + + fPictureState.temporal_reference = (next4Bytes&0xFFC00000)>>(32-10); + fPictureState.picture_coding_type = (next4Bytes&0x00380000)>>(32-(10+3)); + + unsigned char FBV, BFC, FFV, FFC; + FBV = BFC = FFV = FFC = 0; + switch (fPictureState.picture_coding_type) { + case 3: + FBV = (byte8&0x40)>>6; + BFC = (byte8&0x38)>>3; + // fall through to: + case 2: + FFV = (next4Bytes&0x00000004)>>2; + FFC = ((next4Bytes&0x00000003)<<1) | ((byte8&0x80)>>7); + } + + fPictureState.vector_code_bits = (FBV<<7) | (BFC<<4) | (FFV<<3) | FFC; + } else if ((startCode&0xFFFFFF00) == 0x00000100) { + unsigned char lastCodeByte = startCode&0xFF; + + if (lastCodeByte <= 0xAF) { + // This is (the start of) a slice + thisFrameIsASlice = True; + } else { + // This is probably a GOP header; we don't do anything with this + } + } else { + // The first 4 bytes aren't a code that we recognize. + envir() << "Warning: MPEG1or2VideoRTPSink::doSpecialFrameHandling saw strange first 4 bytes " + << (void*)startCode << ", but we're not a fragment\n"; + } + } else { + // We're a fragment (other than the first) of a slice. + thisFrameIsASlice = True; + } + + if (thisFrameIsASlice) { + // This packet begins a slice iff there's no fragmentation offset: + fPacketBeginsSlice = (fragmentationOffset == 0); + + // This packet also ends a slice iff there are no fragments remaining: + fPacketEndsSlice = (numRemainingBytes == 0); + } + + // Set the video-specific header based on the parameters that we've seen. + // Note that this may get done more than once, if several frames appear + // in the packet. That's OK, because this situation happens infrequently, + // and we want the video-specific header to reflect the most up-to-date + // information (in particular, from a Picture Header) anyway. + unsigned videoSpecificHeader = + // T == 0 + (fPictureState.temporal_reference<<16) | + // AN == N == 0 + (fSequenceHeaderPresent<<13) | + (fPacketBeginsSlice<<12) | + (fPacketEndsSlice<<11) | + (fPictureState.picture_coding_type<<8) | + fPictureState.vector_code_bits; + setSpecialHeaderWord(videoSpecificHeader); + + // Also set the RTP timestamp. (As above, we do this for each frame + // in the packet.) + setTimestamp(framePresentationTime); + + // Set the RTP 'M' (marker) bit iff this frame ends (i.e., is the last + // slice of) a picture (and there are no fragments remaining). + // This relies on the source being a "MPEG1or2VideoStreamFramer". + MPEG1or2VideoStreamFramer* framerSource = (MPEG1or2VideoStreamFramer*)fSource; + if (framerSource != NULL && framerSource->pictureEndMarker() + && numRemainingBytes == 0) { + setMarkerBit(); + framerSource->pictureEndMarker() = False; + } + + fPreviousFrameWasSlice = thisFrameIsASlice; +} + +unsigned MPEG1or2VideoRTPSink::specialHeaderSize() const { + // There's a 4 byte special video header: + return 4; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoRTPSource.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoRTPSource.cpp new file mode 100644 index 0000000..8b7eed0 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoRTPSource.cpp @@ -0,0 +1,82 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MPEG-1 or MPEG-2 Video RTP Sources +// Implementation + +#include "MPEG1or2VideoRTPSource.hh" + +MPEG1or2VideoRTPSource* +MPEG1or2VideoRTPSource::createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) { + return new MPEG1or2VideoRTPSource(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency); +} + +MPEG1or2VideoRTPSource::MPEG1or2VideoRTPSource(UsageEnvironment& env, + Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) + : MultiFramedRTPSource(env, RTPgs, + rtpPayloadFormat, rtpTimestampFrequency){ +} + +MPEG1or2VideoRTPSource::~MPEG1or2VideoRTPSource() { +} + +Boolean MPEG1or2VideoRTPSource +::processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize) { + // There's a 4-byte video-specific header + if (packet->dataSize() < 4) return False; + + u_int32_t header = ntohl(*(u_int32_t*)(packet->data())); + + u_int32_t sBit = header&0x00002000; // sequence-header-present + u_int32_t bBit = header&0x00001000; // beginning-of-slice + u_int32_t eBit = header&0x00000800; // end-of-slice + + fCurrentPacketBeginsFrame = (sBit|bBit) != 0; + fCurrentPacketCompletesFrame = ((sBit != 0) && (bBit == 0)) || (eBit != 0); + + resultSpecialHeaderSize = 4; + return True; +} + +Boolean MPEG1or2VideoRTPSource +::packetIsUsableInJitterCalculation(unsigned char* packet, + unsigned packetSize) { + // There's a 4-byte video-specific header + if (packetSize < 4) return False; + + // Extract the "Picture-Type" field from this, to determine whether + // this packet can be used in jitter calculations: + unsigned header = ntohl(*(u_int32_t*)packet); + + unsigned short pictureType = (header>>8)&0x7; + if (pictureType == 1) { // an I frame + return True; + } else { // a P, B, D, or other unknown frame type + return False; + } +} + +char const* MPEG1or2VideoRTPSource::MIMEtype() const { + return "video/MPEG"; +} + diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoStreamDiscreteFramer.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoStreamDiscreteFramer.cpp new file mode 100644 index 0000000..ded2d83 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoStreamDiscreteFramer.cpp @@ -0,0 +1,203 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A simplified version of "MPEG1or2VideoStreamFramer" that takes only +// complete, discrete frames (rather than an arbitrary byte stream) as input. +// This avoids the parsing and data copying overhead of the full +// "MPEG1or2VideoStreamFramer". +// Implementation + +#include "MPEG1or2VideoStreamDiscreteFramer.hh" + +MPEG1or2VideoStreamDiscreteFramer* +MPEG1or2VideoStreamDiscreteFramer::createNew(UsageEnvironment& env, + FramedSource* inputSource, + Boolean iFramesOnly, + double vshPeriod, + Boolean leavePresentationTimesUnmodified) { + // Need to add source type checking here??? ##### + return new MPEG1or2VideoStreamDiscreteFramer(env, inputSource, + iFramesOnly, vshPeriod, leavePresentationTimesUnmodified); +} + +MPEG1or2VideoStreamDiscreteFramer +::MPEG1or2VideoStreamDiscreteFramer(UsageEnvironment& env, + FramedSource* inputSource, + Boolean iFramesOnly, double vshPeriod, Boolean leavePresentationTimesUnmodified) + : MPEG1or2VideoStreamFramer(env, inputSource, iFramesOnly, vshPeriod, + False/*don't create a parser*/), + fLeavePresentationTimesUnmodified(leavePresentationTimesUnmodified), + fLastNonBFrameTemporal_reference(0), + fSavedVSHSize(0), fSavedVSHTimestamp(0.0), + fIFramesOnly(iFramesOnly), fVSHPeriod(vshPeriod) { + fLastNonBFramePresentationTime.tv_sec = 0; + fLastNonBFramePresentationTime.tv_usec = 0; +} + +MPEG1or2VideoStreamDiscreteFramer::~MPEG1or2VideoStreamDiscreteFramer() { +} + +void MPEG1or2VideoStreamDiscreteFramer::doGetNextFrame() { + // Arrange to read data (which should be a complete MPEG-1 or 2 video frame) + // from our data source, directly into the client's input buffer. + // After reading this, we'll do some parsing on the frame. + fInputSource->getNextFrame(fTo, fMaxSize, + afterGettingFrame, this, + FramedSource::handleClosure, this); +} + +void MPEG1or2VideoStreamDiscreteFramer +::afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + MPEG1or2VideoStreamDiscreteFramer* source + = (MPEG1or2VideoStreamDiscreteFramer*)clientData; + source->afterGettingFrame1(frameSize, numTruncatedBytes, + presentationTime, durationInMicroseconds); +} + +static double const frameRateFromCode[] = { + 0.0, // forbidden + 24000/1001.0, // approx 23.976 + 24.0, + 25.0, + 30000/1001.0, // approx 29.97 + 30.0, + 50.0, + 60000/1001.0, // approx 59.94 + 60.0, + 0.0, // reserved + 0.0, // reserved + 0.0, // reserved + 0.0, // reserved + 0.0, // reserved + 0.0, // reserved + 0.0 // reserved +}; + +#define MILLION 1000000 + +void MPEG1or2VideoStreamDiscreteFramer +::afterGettingFrame1(unsigned frameSize, unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + // Check that the first 4 bytes are a system code: + if (frameSize >= 4 && fTo[0] == 0 && fTo[1] == 0 && fTo[2] == 1) { + fPictureEndMarker = True; // Assume that we have a complete 'picture' here + + u_int8_t nextCode = fTo[3]; + if (nextCode == 0xB3) { // VIDEO_SEQUENCE_HEADER_START_CODE + // Note the following 'frame rate' code: + if (frameSize >= 8) { + u_int8_t frame_rate_code = fTo[7]&0x0F; + fFrameRate = frameRateFromCode[frame_rate_code]; + } + + // Also, save away this Video Sequence Header, in case we need it later: + // First, figure out how big it is: + unsigned vshSize; + for (vshSize = 4; vshSize < frameSize-3; ++vshSize) { + if (fTo[vshSize] == 0 && fTo[vshSize+1] == 0 && fTo[vshSize+2] == 1 && + (fTo[vshSize+3] == 0xB8 || fTo[vshSize+3] == 0x00)) break; + } + if (vshSize == frameSize-3) vshSize = frameSize; // There was nothing else following it + if (vshSize <= sizeof fSavedVSHBuffer) { + memmove(fSavedVSHBuffer, fTo, vshSize); + fSavedVSHSize = vshSize; + fSavedVSHTimestamp + = presentationTime.tv_sec + presentationTime.tv_usec/(double)MILLION; + } + } else if (nextCode == 0xB8) { // GROUP_START_CODE + // If necessary, insert a saved Video Sequence Header in front of this: + double pts = presentationTime.tv_sec + presentationTime.tv_usec/(double)MILLION; + if (pts > fSavedVSHTimestamp + fVSHPeriod && + fSavedVSHSize + frameSize <= fMaxSize) { + memmove(&fTo[fSavedVSHSize], &fTo[0], frameSize); // make room for the header + memmove(&fTo[0], fSavedVSHBuffer, fSavedVSHSize); // insert it + frameSize += fSavedVSHSize; + fSavedVSHTimestamp = pts; + } + } + + unsigned i = 3; + if (nextCode == 0xB3 /*VIDEO_SEQUENCE_HEADER_START_CODE*/ || + nextCode == 0xB8 /*GROUP_START_CODE*/) { + // Skip to the following PICTURE_START_CODE (if any): + for (i += 4; i < frameSize; ++i) { + if (fTo[i] == 0x00 /*PICTURE_START_CODE*/ + && fTo[i-1] == 1 && fTo[i-2] == 0 && fTo[i-3] == 0) { + nextCode = fTo[i]; + break; + } + } + } + + if (nextCode == 0x00 /*PICTURE_START_CODE*/ && i+2 < frameSize) { + // Get the 'temporal_reference' and 'picture_coding_type' from the + // following 2 bytes: + ++i; + unsigned short temporal_reference = (fTo[i]<<2)|(fTo[i+1]>>6); + unsigned char picture_coding_type = (fTo[i+1]&0x38)>>3; + + // If this is not an "I" frame, but we were asked for "I" frames only, then try again: + if (fIFramesOnly && picture_coding_type != 1) { + doGetNextFrame(); + return; + } + + // If this is a "B" frame, then we have to tweak "presentationTime": + if (!fLeavePresentationTimesUnmodified && picture_coding_type == 3/*B*/ + && (fLastNonBFramePresentationTime.tv_usec > 0 || + fLastNonBFramePresentationTime.tv_sec > 0)) { + int trIncrement + = fLastNonBFrameTemporal_reference - temporal_reference; + if (trIncrement < 0) trIncrement += 1024; // field is 10 bits in size + + unsigned usIncrement = fFrameRate == 0.0 ? 0 + : (unsigned)((trIncrement*MILLION)/fFrameRate); + unsigned secondsToSubtract = usIncrement/MILLION; + unsigned uSecondsToSubtract = usIncrement%MILLION; + + presentationTime = fLastNonBFramePresentationTime; + if ((unsigned)presentationTime.tv_usec < uSecondsToSubtract) { + presentationTime.tv_usec += MILLION; + if (presentationTime.tv_sec > 0) --presentationTime.tv_sec; + } + presentationTime.tv_usec -= uSecondsToSubtract; + if ((unsigned)presentationTime.tv_sec > secondsToSubtract) { + presentationTime.tv_sec -= secondsToSubtract; + } else { + presentationTime.tv_sec = presentationTime.tv_usec = 0; + } + } else { + fLastNonBFramePresentationTime = presentationTime; + fLastNonBFrameTemporal_reference = temporal_reference; + } + } + } + + // ##### Later: + // - do "iFramesOnly" if requested + + // Complete delivery to the client: + fFrameSize = frameSize; + fNumTruncatedBytes = numTruncatedBytes; + fPresentationTime = presentationTime; + fDurationInMicroseconds = durationInMicroseconds; + afterGetting(this); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoStreamFramer.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoStreamFramer.cpp new file mode 100644 index 0000000..fa8f7ce --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG1or2VideoStreamFramer.cpp @@ -0,0 +1,478 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up an MPEG 1 or 2 video elementary stream into +// frames for: Video_Sequence_Header, GOP_Header, Picture_Header +// Implementation + +#include "MPEG1or2VideoStreamFramer.hh" +#include "MPEGVideoStreamParser.hh" +#include + +////////// MPEG1or2VideoStreamParser definition ////////// + +// An enum representing the current state of the parser: +enum MPEGParseState { + PARSING_VIDEO_SEQUENCE_HEADER, + PARSING_VIDEO_SEQUENCE_HEADER_SEEN_CODE, + PARSING_GOP_HEADER, + PARSING_GOP_HEADER_SEEN_CODE, + PARSING_PICTURE_HEADER, + PARSING_SLICE +}; + +#define VSH_MAX_SIZE 1000 + +class MPEG1or2VideoStreamParser: public MPEGVideoStreamParser { +public: + MPEG1or2VideoStreamParser(MPEG1or2VideoStreamFramer* usingSource, + FramedSource* inputSource, + Boolean iFramesOnly, double vshPeriod); + virtual ~MPEG1or2VideoStreamParser(); + +private: // redefined virtual functions: + virtual void flushInput(); + virtual unsigned parse(); + +private: + void reset(); + + MPEG1or2VideoStreamFramer* usingSource() { + return (MPEG1or2VideoStreamFramer*)fUsingSource; + } + void setParseState(MPEGParseState parseState); + + unsigned parseVideoSequenceHeader(Boolean haveSeenStartCode); + unsigned parseGOPHeader(Boolean haveSeenStartCode); + unsigned parsePictureHeader(); + unsigned parseSlice(); + +private: + MPEGParseState fCurrentParseState; + unsigned fPicturesSinceLastGOP; + // can be used to compute timestamp for a video_sequence_header + unsigned short fCurPicTemporalReference; + // used to compute slice timestamp + unsigned char fCurrentSliceNumber; // set when parsing a slice + + // A saved copy of the most recently seen 'video_sequence_header', + // in case we need to insert it into the stream periodically: + unsigned char fSavedVSHBuffer[VSH_MAX_SIZE]; + unsigned fSavedVSHSize; + double fSavedVSHTimestamp; + double fVSHPeriod; + Boolean fIFramesOnly, fSkippingCurrentPicture; + + void saveCurrentVSH(); + Boolean needToUseSavedVSH(); + unsigned useSavedVSH(); // returns the size of the saved VSH +}; + + +////////// MPEG1or2VideoStreamFramer implementation ////////// + +MPEG1or2VideoStreamFramer::MPEG1or2VideoStreamFramer(UsageEnvironment& env, + FramedSource* inputSource, + Boolean iFramesOnly, + double vshPeriod, + Boolean createParser) + : MPEGVideoStreamFramer(env, inputSource) { + fParser = createParser + ? new MPEG1or2VideoStreamParser(this, inputSource, + iFramesOnly, vshPeriod) + : NULL; +} + +MPEG1or2VideoStreamFramer::~MPEG1or2VideoStreamFramer() { +} + +MPEG1or2VideoStreamFramer* +MPEG1or2VideoStreamFramer::createNew(UsageEnvironment& env, + FramedSource* inputSource, + Boolean iFramesOnly, + double vshPeriod) { + // Need to add source type checking here??? ##### + return new MPEG1or2VideoStreamFramer(env, inputSource, iFramesOnly, vshPeriod); +} + +double MPEG1or2VideoStreamFramer::getCurrentPTS() const { + return fPresentationTime.tv_sec + fPresentationTime.tv_usec/1000000.0; +} + +Boolean MPEG1or2VideoStreamFramer::isMPEG1or2VideoStreamFramer() const { + return True; +} + +////////// MPEG1or2VideoStreamParser implementation ////////// + +MPEG1or2VideoStreamParser +::MPEG1or2VideoStreamParser(MPEG1or2VideoStreamFramer* usingSource, + FramedSource* inputSource, + Boolean iFramesOnly, double vshPeriod) + : MPEGVideoStreamParser(usingSource, inputSource), + fCurrentParseState(PARSING_VIDEO_SEQUENCE_HEADER), + fVSHPeriod(vshPeriod), fIFramesOnly(iFramesOnly) { + reset(); +} + +MPEG1or2VideoStreamParser::~MPEG1or2VideoStreamParser() { +} + +void MPEG1or2VideoStreamParser::setParseState(MPEGParseState parseState) { + fCurrentParseState = parseState; + MPEGVideoStreamParser::setParseState(); +} + +void MPEG1or2VideoStreamParser::reset() { + fPicturesSinceLastGOP = 0; + fCurPicTemporalReference = 0; + fCurrentSliceNumber = 0; + fSavedVSHSize = 0; + fSkippingCurrentPicture = False; +} + +void MPEG1or2VideoStreamParser::flushInput() { + reset(); + StreamParser::flushInput(); + if (fCurrentParseState != PARSING_VIDEO_SEQUENCE_HEADER) { + setParseState(PARSING_GOP_HEADER); // start from the next GOP + } +} + +unsigned MPEG1or2VideoStreamParser::parse() { + try { + switch (fCurrentParseState) { + case PARSING_VIDEO_SEQUENCE_HEADER: { + return parseVideoSequenceHeader(False); + } + case PARSING_VIDEO_SEQUENCE_HEADER_SEEN_CODE: { + return parseVideoSequenceHeader(True); + } + case PARSING_GOP_HEADER: { + return parseGOPHeader(False); + } + case PARSING_GOP_HEADER_SEEN_CODE: { + return parseGOPHeader(True); + } + case PARSING_PICTURE_HEADER: { + return parsePictureHeader(); + } + case PARSING_SLICE: { + return parseSlice(); + } + default: { + return 0; // shouldn't happen + } + } + } catch (int /*e*/) { +#ifdef DEBUG + fprintf(stderr, "MPEG1or2VideoStreamParser::parse() EXCEPTION (This is normal behavior - *not* an error)\n"); +#endif + return 0; // the parsing got interrupted + } +} + +void MPEG1or2VideoStreamParser::saveCurrentVSH() { + unsigned frameSize = curFrameSize(); + if (frameSize > sizeof fSavedVSHBuffer) return; // too big to save + + memmove(fSavedVSHBuffer, fStartOfFrame, frameSize); + fSavedVSHSize = frameSize; + fSavedVSHTimestamp = usingSource()->getCurrentPTS(); +} + +Boolean MPEG1or2VideoStreamParser::needToUseSavedVSH() { + return usingSource()->getCurrentPTS() > fSavedVSHTimestamp+fVSHPeriod + && fSavedVSHSize > 0; +} + +unsigned MPEG1or2VideoStreamParser::useSavedVSH() { + unsigned bytesToUse = fSavedVSHSize; + unsigned maxBytesToUse = fLimit - fStartOfFrame; + if (bytesToUse > maxBytesToUse) bytesToUse = maxBytesToUse; + + memmove(fStartOfFrame, fSavedVSHBuffer, bytesToUse); + + // Also reset the saved timestamp: + fSavedVSHTimestamp = usingSource()->getCurrentPTS(); + +#ifdef DEBUG + fprintf(stderr, "used saved video_sequence_header (%d bytes)\n", bytesToUse); +#endif + return bytesToUse; +} + +#define VIDEO_SEQUENCE_HEADER_START_CODE 0x000001B3 +#define GROUP_START_CODE 0x000001B8 +#define PICTURE_START_CODE 0x00000100 +#define SEQUENCE_END_CODE 0x000001B7 + +static double const frameRateFromCode[] = { + 0.0, // forbidden + 24000/1001.0, // approx 23.976 + 24.0, + 25.0, + 30000/1001.0, // approx 29.97 + 30.0, + 50.0, + 60000/1001.0, // approx 59.94 + 60.0, + 0.0, // reserved + 0.0, // reserved + 0.0, // reserved + 0.0, // reserved + 0.0, // reserved + 0.0, // reserved + 0.0 // reserved +}; + +unsigned MPEG1or2VideoStreamParser +::parseVideoSequenceHeader(Boolean haveSeenStartCode) { +#ifdef DEBUG + fprintf(stderr, "parsing video sequence header\n"); +#endif + unsigned first4Bytes; + if (!haveSeenStartCode) { + while ((first4Bytes = test4Bytes()) != VIDEO_SEQUENCE_HEADER_START_CODE) { +#ifdef DEBUG + fprintf(stderr, "ignoring non video sequence header: 0x%08x\n", first4Bytes); +#endif + get1Byte(); setParseState(PARSING_VIDEO_SEQUENCE_HEADER); + // ensures we progress over bad data + } + first4Bytes = get4Bytes(); + } else { + // We've already seen the start code + first4Bytes = VIDEO_SEQUENCE_HEADER_START_CODE; + } + save4Bytes(first4Bytes); + + // Next, extract the size and rate parameters from the next 8 bytes + unsigned paramWord1 = get4Bytes(); + save4Bytes(paramWord1); + unsigned next4Bytes = get4Bytes(); +#ifdef DEBUG + unsigned short horizontal_size_value = (paramWord1&0xFFF00000)>>(32-12); + unsigned short vertical_size_value = (paramWord1&0x000FFF00)>>8; + unsigned char aspect_ratio_information = (paramWord1&0x000000F0)>>4; +#endif + unsigned char frame_rate_code = (paramWord1&0x0000000F); + usingSource()->fFrameRate = frameRateFromCode[frame_rate_code]; +#ifdef DEBUG + unsigned bit_rate_value = (next4Bytes&0xFFFFC000)>>(32-18); + unsigned vbv_buffer_size_value = (next4Bytes&0x00001FF8)>>3; + fprintf(stderr, "horizontal_size_value: %d, vertical_size_value: %d, aspect_ratio_information: %d, frame_rate_code: %d (=>%f fps), bit_rate_value: %d (=>%d bps), vbv_buffer_size_value: %d\n", horizontal_size_value, vertical_size_value, aspect_ratio_information, frame_rate_code, usingSource()->fFrameRate, bit_rate_value, bit_rate_value*400, vbv_buffer_size_value); +#endif + + // Now, copy all bytes that we see, up until we reach a GROUP_START_CODE + // or a PICTURE_START_CODE: + do { + saveToNextCode(next4Bytes); + } while (next4Bytes != GROUP_START_CODE && next4Bytes != PICTURE_START_CODE); + + setParseState((next4Bytes == GROUP_START_CODE) + ? PARSING_GOP_HEADER_SEEN_CODE : PARSING_PICTURE_HEADER); + + // Compute this frame's timestamp by noting how many pictures we've seen + // since the last GOP header: + usingSource()->computePresentationTime(fPicturesSinceLastGOP); + + // Save this video_sequence_header, in case we need to insert a copy + // into the stream later: + saveCurrentVSH(); + + return curFrameSize(); +} + +unsigned MPEG1or2VideoStreamParser::parseGOPHeader(Boolean haveSeenStartCode) { + // First check whether we should insert a previously-saved + // 'video_sequence_header' here: + if (needToUseSavedVSH()) return useSavedVSH(); + +#ifdef DEBUG + fprintf(stderr, "parsing GOP header\n"); +#endif + unsigned first4Bytes; + if (!haveSeenStartCode) { + while ((first4Bytes = test4Bytes()) != GROUP_START_CODE) { +#ifdef DEBUG + fprintf(stderr, "ignoring non GOP start code: 0x%08x\n", first4Bytes); +#endif + get1Byte(); setParseState(PARSING_GOP_HEADER); + // ensures we progress over bad data + } + first4Bytes = get4Bytes(); + } else { + // We've already seen the GROUP_START_CODE + first4Bytes = GROUP_START_CODE; + } + save4Bytes(first4Bytes); + + // Next, extract the (25-bit) time code from the next 4 bytes: + unsigned next4Bytes = get4Bytes(); + unsigned time_code = (next4Bytes&0xFFFFFF80)>>(32-25); +#if defined(DEBUG) || defined(DEBUG_TIMESTAMPS) + Boolean drop_frame_flag = (time_code&0x01000000) != 0; +#endif + unsigned time_code_hours = (time_code&0x00F80000)>>19; + unsigned time_code_minutes = (time_code&0x0007E000)>>13; + unsigned time_code_seconds = (time_code&0x00000FC0)>>6; + unsigned time_code_pictures = (time_code&0x0000003F); +#if defined(DEBUG) || defined(DEBUG_TIMESTAMPS) + fprintf(stderr, "time_code: 0x%07x, drop_frame %d, hours %d, minutes %d, seconds %d, pictures %d\n", time_code, drop_frame_flag, time_code_hours, time_code_minutes, time_code_seconds, time_code_pictures); +#endif +#ifdef DEBUG + Boolean closed_gop = (next4Bytes&0x00000040) != 0; + Boolean broken_link = (next4Bytes&0x00000020) != 0; + fprintf(stderr, "closed_gop: %d, broken_link: %d\n", closed_gop, broken_link); +#endif + + // Now, copy all bytes that we see, up until we reach a PICTURE_START_CODE: + do { + saveToNextCode(next4Bytes); + } while (next4Bytes != PICTURE_START_CODE); + + // Record the time code: + usingSource()->setTimeCode(time_code_hours, time_code_minutes, + time_code_seconds, time_code_pictures, + fPicturesSinceLastGOP); + + fPicturesSinceLastGOP = 0; + + // Compute this frame's timestamp: + usingSource()->computePresentationTime(0); + + setParseState(PARSING_PICTURE_HEADER); + + return curFrameSize(); +} + +inline Boolean isSliceStartCode(unsigned fourBytes) { + if ((fourBytes&0xFFFFFF00) != 0x00000100) return False; + + unsigned char lastByte = fourBytes&0xFF; + return lastByte <= 0xAF && lastByte >= 1; +} + +unsigned MPEG1or2VideoStreamParser::parsePictureHeader() { +#ifdef DEBUG + fprintf(stderr, "parsing picture header\n"); +#endif + // Note that we've already read the PICTURE_START_CODE + // Next, extract the temporal reference from the next 4 bytes: + unsigned next4Bytes = get4Bytes(); + unsigned short temporal_reference = (next4Bytes&0xFFC00000)>>(32-10); + unsigned char picture_coding_type = (next4Bytes&0x00380000)>>19; +#ifdef DEBUG + unsigned short vbv_delay = (next4Bytes&0x0007FFF8)>>3; + fprintf(stderr, "temporal_reference: %d, picture_coding_type: %d, vbv_delay: %d\n", temporal_reference, picture_coding_type, vbv_delay); +#endif + + fSkippingCurrentPicture = fIFramesOnly && picture_coding_type != 1; + if (fSkippingCurrentPicture) { + // Skip all bytes that we see, up until we reach a slice_start_code: + do { + skipToNextCode(next4Bytes); + } while (!isSliceStartCode(next4Bytes)); + } else { + // Save the PICTURE_START_CODE that we've already read: + save4Bytes(PICTURE_START_CODE); + + // Copy all bytes that we see, up until we reach a slice_start_code: + do { + saveToNextCode(next4Bytes); + } while (!isSliceStartCode(next4Bytes)); + } + + setParseState(PARSING_SLICE); + + fCurrentSliceNumber = next4Bytes&0xFF; + + // Record the temporal reference: + fCurPicTemporalReference = temporal_reference; + + // Compute this frame's timestamp: + usingSource()->computePresentationTime(fCurPicTemporalReference); + + if (fSkippingCurrentPicture) { + return parse(); // try again, until we get a non-skipped frame + } else { + return curFrameSize(); + } +} + +unsigned MPEG1or2VideoStreamParser::parseSlice() { + // Note that we've already read the slice_start_code: + unsigned next4Bytes = PICTURE_START_CODE|fCurrentSliceNumber; +#ifdef DEBUG_SLICE + fprintf(stderr, "parsing slice: 0x%08x\n", next4Bytes); +#endif + + if (fSkippingCurrentPicture) { + // Skip all bytes that we see, up until we reach a code of some sort: + skipToNextCode(next4Bytes); + } else { + // Copy all bytes that we see, up until we reach a code of some sort: + saveToNextCode(next4Bytes); + } + + // The next thing to parse depends on the code that we just saw: + if (isSliceStartCode(next4Bytes)) { // common case + setParseState(PARSING_SLICE); + fCurrentSliceNumber = next4Bytes&0xFF; + } else { + // Because we don't see any more slices, we are assumed to have ended + // the current picture: + ++fPicturesSinceLastGOP; + ++usingSource()->fPictureCount; + usingSource()->fPictureEndMarker = True; // HACK ##### + + switch (next4Bytes) { + case SEQUENCE_END_CODE: { + setParseState(PARSING_VIDEO_SEQUENCE_HEADER); + break; + } + case VIDEO_SEQUENCE_HEADER_START_CODE: { + setParseState(PARSING_VIDEO_SEQUENCE_HEADER_SEEN_CODE); + break; + } + case GROUP_START_CODE: { + setParseState(PARSING_GOP_HEADER_SEEN_CODE); + break; + } + case PICTURE_START_CODE: { + setParseState(PARSING_PICTURE_HEADER); + break; + } + default: { + usingSource()->envir() << "MPEG1or2VideoStreamParser::parseSlice(): Saw unexpected code " + << (void*)next4Bytes << "\n"; + setParseState(PARSING_SLICE); // the safest way to recover... + break; + } + } + } + + // Compute this frame's timestamp: + usingSource()->computePresentationTime(fCurPicTemporalReference); + + if (fSkippingCurrentPicture) { + return parse(); // try again, until we get a non-skipped frame + } else { + return curFrameSize(); + } +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG2IndexFromTransportStream.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG2IndexFromTransportStream.cpp new file mode 100644 index 0000000..920fb5d --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG2IndexFromTransportStream.cpp @@ -0,0 +1,683 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that produces a sequence of I-frame indices from a MPEG-2 Transport Stream +// Implementation + +#include "MPEG2IndexFromTransportStream.hh" + +////////// IndexRecord definition ////////// + +enum RecordType { + RECORD_UNPARSED = 0, + RECORD_VSH = 1, // a MPEG Video Sequence Header + RECORD_GOP = 2, + RECORD_PIC_NON_IFRAME = 3, // includes slices + RECORD_PIC_IFRAME = 4, // includes slices + RECORD_NAL_H264_SPS = 5, // H.264 + RECORD_NAL_H264_PPS = 6, // H.264 + RECORD_NAL_H264_SEI = 7, // H.264 + RECORD_NAL_H264_NON_IFRAME = 8, // H.264 + RECORD_NAL_H264_IFRAME = 9, // H.264 + RECORD_NAL_H264_OTHER = 10, // H.264 + RECORD_NAL_H265_VPS = 11, // H.265 + RECORD_NAL_H265_SPS = 12, // H.265 + RECORD_NAL_H265_PPS = 13, // H.265 + RECORD_NAL_H265_NON_IFRAME = 14, // H.265 + RECORD_NAL_H265_IFRAME = 15, // H.265 + RECORD_NAL_H265_OTHER = 16, // H.265 + RECORD_JUNK +}; + +class IndexRecord { +public: + IndexRecord(u_int8_t startOffset, u_int8_t size, + unsigned long transportPacketNumber, float pcr); + virtual ~IndexRecord(); + + RecordType& recordType() { return fRecordType; } + void setFirstFlag() { fRecordType = (RecordType)(((u_int8_t)fRecordType) | 0x80); } + u_int8_t startOffset() const { return fStartOffset; } + u_int8_t& size() { return fSize; } + float pcr() const { return fPCR; } + unsigned long transportPacketNumber() const { return fTransportPacketNumber; } + + IndexRecord* next() const { return fNext; } + void addAfter(IndexRecord* prev); + void unlink(); + +private: + // Index records are maintained in a doubly-linked list: + IndexRecord* fNext; + IndexRecord* fPrev; + + RecordType fRecordType; + u_int8_t fStartOffset; // within the Transport Stream packet + u_int8_t fSize; // in bytes, following "fStartOffset". + // Note: fStartOffset + fSize <= TRANSPORT_PACKET_SIZE + float fPCR; + unsigned long fTransportPacketNumber; +}; + +#ifdef DEBUG +static char const* recordTypeStr[] = { + "UNPARSED", + "VSH", + "GOP", + "PIC(non-I-frame)", + "PIC(I-frame)", + "SPS (H.264)", + "PPS (H.264)", + "SEI (H.264)", + "H.264 non-I-frame", + "H.264 I-frame", + "other NAL unit (H.264)", + "VPS (H.265)", + "SPS (H.265)", + "PPS (H.265)", + "H.265 non-I-frame", + "H.265 I-frame", + "other NAL unit (H.265)", + "JUNK" +}; + +UsageEnvironment& operator<<(UsageEnvironment& env, IndexRecord& r) { + return env << "[" << ((r.recordType()&0x80) != 0 ? "1" : "") + << recordTypeStr[r.recordType()&0x7F] << ":" + << (unsigned)r.transportPacketNumber() << ":" << r.startOffset() + << "(" << r.size() << ")@" << r.pcr() << "]"; +} +#endif + + +////////// MPEG2IFrameIndexFromTransportStream implementation ////////// + +MPEG2IFrameIndexFromTransportStream* +MPEG2IFrameIndexFromTransportStream::createNew(UsageEnvironment& env, + FramedSource* inputSource) { + return new MPEG2IFrameIndexFromTransportStream(env, inputSource); +} + +// The largest expected frame size (in bytes): +#define MAX_FRAME_SIZE 400000 + +// Make our parse buffer twice as large as this, to ensure that at least one +// complete frame will fit inside it: +#define PARSE_BUFFER_SIZE (2*MAX_FRAME_SIZE) + +// The PID used for the PAT (as defined in the MPEG Transport Stream standard): +#define PAT_PID 0 + +MPEG2IFrameIndexFromTransportStream +::MPEG2IFrameIndexFromTransportStream(UsageEnvironment& env, + FramedSource* inputSource) + : FramedFilter(env, inputSource), + fIsH264(False), fIsH265(False), + fInputTransportPacketCounter((unsigned)-1), fClosureNumber(0), fLastContinuityCounter(~0), + fFirstPCR(0.0), fLastPCR(0.0), fHaveSeenFirstPCR(False), + fPMT_PID(0x10), fVideo_PID(0xE0), // default values + fParseBufferSize(PARSE_BUFFER_SIZE), + fParseBufferFrameStart(0), fParseBufferParseEnd(4), fParseBufferDataEnd(0), + fHeadIndexRecord(NULL), fTailIndexRecord(NULL) { + fParseBuffer = new unsigned char[fParseBufferSize]; +} + +MPEG2IFrameIndexFromTransportStream::~MPEG2IFrameIndexFromTransportStream() { + delete fHeadIndexRecord; + delete[] fParseBuffer; +} + +void MPEG2IFrameIndexFromTransportStream::doGetNextFrame() { + // Begin by trying to deliver an index record (for an already-parsed frame) + // to the client: + if (deliverIndexRecord()) return; + + // No more index records are left to deliver, so try to parse a new frame: + if (parseFrame()) { // success - try again + doGetNextFrame(); + return; + } + + // We need to read some more Transport Stream packets. Check whether we have room: + if (fParseBufferSize - fParseBufferDataEnd < TRANSPORT_PACKET_SIZE) { + // There's no room left. Compact the buffer, and check again: + compactParseBuffer(); + if (fParseBufferSize - fParseBufferDataEnd < TRANSPORT_PACKET_SIZE) { + envir() << "ERROR: parse buffer full; increase MAX_FRAME_SIZE\n"; + // Treat this as if the input source ended: + handleInputClosure1(); + return; + } + } + + // Arrange to read a new Transport Stream packet: + fInputSource->getNextFrame(fInputBuffer, sizeof fInputBuffer, + afterGettingFrame, this, + handleInputClosure, this); +} + +void MPEG2IFrameIndexFromTransportStream +::afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + MPEG2IFrameIndexFromTransportStream* source + = (MPEG2IFrameIndexFromTransportStream*)clientData; + source->afterGettingFrame1(frameSize, numTruncatedBytes, + presentationTime, durationInMicroseconds); +} + +#define TRANSPORT_SYNC_BYTE 0x47 + +void MPEG2IFrameIndexFromTransportStream +::afterGettingFrame1(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + if (frameSize < TRANSPORT_PACKET_SIZE || fInputBuffer[0] != TRANSPORT_SYNC_BYTE) { + if (fInputBuffer[0] != TRANSPORT_SYNC_BYTE) { + envir() << "Bad TS sync byte: 0x" << fInputBuffer[0] << "\n"; + } + // Handle this as if the source ended: + handleInputClosure1(); + return; + } + + ++fInputTransportPacketCounter; + + // Figure out how much of this Transport Packet contains PES data: + u_int8_t adaptation_field_control = (fInputBuffer[3]&0x30)>>4; + u_int8_t totalHeaderSize + = adaptation_field_control <= 1 ? 4 : 5 + fInputBuffer[4]; + if (adaptation_field_control == 2 && totalHeaderSize != TRANSPORT_PACKET_SIZE || + adaptation_field_control == 3 && totalHeaderSize >= TRANSPORT_PACKET_SIZE) { + envir() << "Bad \"adaptation_field_length\": " << fInputBuffer[4] << "\n"; + doGetNextFrame(); + return; + } + + // Check for a PCR: + if (totalHeaderSize > 5 && (fInputBuffer[5]&0x10) != 0) { + // There's a PCR: + u_int32_t pcrBaseHigh + = (fInputBuffer[6]<<24)|(fInputBuffer[7]<<16) + |(fInputBuffer[8]<<8)|fInputBuffer[9]; + float pcr = pcrBaseHigh/45000.0f; + if ((fInputBuffer[10]&0x80) != 0) pcr += 1/90000.0f; // add in low-bit (if set) + unsigned short pcrExt = ((fInputBuffer[10]&0x01)<<8) | fInputBuffer[11]; + pcr += pcrExt/27000000.0f; + + if (!fHaveSeenFirstPCR) { + fFirstPCR = pcr; + fHaveSeenFirstPCR = True; + } else if (pcr < fLastPCR) { + // The PCR timestamp has gone backwards. Display a warning about this + // (because it indicates buggy Transport Stream data), and compensate for it. + envir() << "\nWarning: At about " << fLastPCR-fFirstPCR + << " seconds into the file, the PCR timestamp decreased - from " + << fLastPCR << " to " << pcr << "\n"; + fFirstPCR -= (fLastPCR - pcr); + } + fLastPCR = pcr; + } + + // Get the PID from the packet, and check for special tables: the PAT and PMT: + u_int16_t PID = ((fInputBuffer[1]&0x1F)<<8) | fInputBuffer[2]; + if (PID == PAT_PID) { + analyzePAT(&fInputBuffer[totalHeaderSize], TRANSPORT_PACKET_SIZE-totalHeaderSize); + } else if (PID == fPMT_PID) { + analyzePMT(&fInputBuffer[totalHeaderSize], TRANSPORT_PACKET_SIZE-totalHeaderSize); + } + + // Ignore transport packets for non-video programs, + // or packets with no data, or packets that duplicate the previous packet: + u_int8_t continuity_counter = fInputBuffer[3]&0x0F; + if ((PID != fVideo_PID) || + !(adaptation_field_control == 1 || adaptation_field_control == 3) || + continuity_counter == fLastContinuityCounter) { + doGetNextFrame(); + return; + } + fLastContinuityCounter = continuity_counter; + + // Also, if this is the start of a PES packet, then skip over the PES header: + Boolean payload_unit_start_indicator = (fInputBuffer[1]&0x40) != 0; + if (payload_unit_start_indicator && totalHeaderSize < TRANSPORT_PACKET_SIZE - 8 + && fInputBuffer[totalHeaderSize] == 0x00 && fInputBuffer[totalHeaderSize+1] == 0x00 + && fInputBuffer[totalHeaderSize+2] == 0x01) { + u_int8_t PES_header_data_length = fInputBuffer[totalHeaderSize+8]; + totalHeaderSize += 9 + PES_header_data_length; + if (totalHeaderSize >= TRANSPORT_PACKET_SIZE) { + envir() << "Unexpectedly large PES header size: " << PES_header_data_length << "\n"; + // Handle this as if the source ended: + handleInputClosure1(); + return; + } + } + + // The remaining data is Video Elementary Stream data. Add it to our parse buffer: + unsigned vesSize = TRANSPORT_PACKET_SIZE - totalHeaderSize; + memmove(&fParseBuffer[fParseBufferDataEnd], &fInputBuffer[totalHeaderSize], vesSize); + fParseBufferDataEnd += vesSize; + + // And add a new index record noting where it came from: + addToTail(new IndexRecord(totalHeaderSize, vesSize, fInputTransportPacketCounter, + fLastPCR - fFirstPCR)); + + // Try again: + doGetNextFrame(); +} + +void MPEG2IFrameIndexFromTransportStream::handleInputClosure(void* clientData) { + MPEG2IFrameIndexFromTransportStream* source + = (MPEG2IFrameIndexFromTransportStream*)clientData; + source->handleInputClosure1(); +} + +#define VIDEO_SEQUENCE_START_CODE 0xB3 // MPEG-1 or 2 +#define VISUAL_OBJECT_SEQUENCE_START_CODE 0xB0 // MPEG-4 +#define GROUP_START_CODE 0xB8 // MPEG-1 or 2 +#define GROUP_VOP_START_CODE 0xB3 // MPEG-4 +#define PICTURE_START_CODE 0x00 // MPEG-1 or 2 +#define VOP_START_CODE 0xB6 // MPEG-4 + +void MPEG2IFrameIndexFromTransportStream::handleInputClosure1() { + if (++fClosureNumber == 1 && fParseBufferDataEnd > fParseBufferFrameStart + && fParseBufferDataEnd <= fParseBufferSize - 4) { + // This is the first time we saw EOF, and there's still data remaining to be + // parsed. Hack: Append a Picture Header code to the end of the unparsed + // data, and try again. This should use up all of the unparsed data. + fParseBuffer[fParseBufferDataEnd++] = 0; + fParseBuffer[fParseBufferDataEnd++] = 0; + fParseBuffer[fParseBufferDataEnd++] = 1; + fParseBuffer[fParseBufferDataEnd++] = PICTURE_START_CODE; + + // Try again: + doGetNextFrame(); + } else { + // Handle closure in the regular way: + handleClosure(); + } +} + +void MPEG2IFrameIndexFromTransportStream +::analyzePAT(unsigned char* pkt, unsigned size) { + // Get the PMT_PID: + while (size >= 17) { // The table is large enough + u_int16_t program_number = (pkt[9]<<8) | pkt[10]; + if (program_number != 0) { + fPMT_PID = ((pkt[11]&0x1F)<<8) | pkt[12]; + return; + } + + pkt += 4; size -= 4; + } +} + +void MPEG2IFrameIndexFromTransportStream +::analyzePMT(unsigned char* pkt, unsigned size) { + // Scan the "elementary_PID"s in the map, until we see the first video stream. + + // First, get the "section_length", to get the table's size: + u_int16_t section_length = ((pkt[2]&0x0F)<<8) | pkt[3]; + if ((unsigned)(4+section_length) < size) size = (4+section_length); + + // Then, skip any descriptors following the "program_info_length": + if (size < 22) return; // not enough data + unsigned program_info_length = ((pkt[11]&0x0F)<<8) | pkt[12]; + pkt += 13; size -= 13; + if (size < program_info_length) return; // not enough data + pkt += program_info_length; size -= program_info_length; + + // Look at each ("stream_type","elementary_PID") pair, looking for a video stream: + while (size >= 9) { + u_int8_t stream_type = pkt[0]; + u_int16_t elementary_PID = ((pkt[1]&0x1F)<<8) | pkt[2]; + if (stream_type == 1 || stream_type == 2 || + stream_type == 0x1B/*H.264 video*/ || stream_type == 0x24/*H.265 video*/) { + if (stream_type == 0x1B) fIsH264 = True; + else if (stream_type == 0x24) fIsH265 = True; + fVideo_PID = elementary_PID; + return; + } + + u_int16_t ES_info_length = ((pkt[3]&0x0F)<<8) | pkt[4]; + pkt += 5; size -= 5; + if (size < ES_info_length) return; // not enough data + pkt += ES_info_length; size -= ES_info_length; + } +} + +Boolean MPEG2IFrameIndexFromTransportStream::deliverIndexRecord() { + IndexRecord* head = fHeadIndexRecord; + if (head == NULL) return False; + + // Check whether the head record has been parsed yet: + if (head->recordType() == RECORD_UNPARSED) return False; + + // Remove the head record (the one whose data we'll be delivering): + IndexRecord* next = head->next(); + head->unlink(); + if (next == head) { + fHeadIndexRecord = fTailIndexRecord = NULL; + } else { + fHeadIndexRecord = next; + } + + if (head->recordType() == RECORD_JUNK) { + // Don't actually deliver the data to the client: + delete head; + // Try to deliver the next record instead: + return deliverIndexRecord(); + } + + // Deliver data from the head record: +#ifdef DEBUG + envir() << "delivering: " << *head << "\n"; +#endif + if (fMaxSize < 11) { + fFrameSize = 0; + } else { + fTo[0] = (u_int8_t)(head->recordType()); + fTo[1] = head->startOffset(); + fTo[2] = head->size(); + // Deliver the PCR, as 24 bits (integer part; little endian) + 8 bits (fractional part) + float pcr = head->pcr(); + unsigned pcr_int = (unsigned)pcr; + u_int8_t pcr_frac = (u_int8_t)(256*(pcr-pcr_int)); + fTo[3] = (unsigned char)(pcr_int); + fTo[4] = (unsigned char)(pcr_int>>8); + fTo[5] = (unsigned char)(pcr_int>>16); + fTo[6] = (unsigned char)(pcr_frac); + // Deliver the transport packet number (in little-endian order): + unsigned long tpn = head->transportPacketNumber(); + fTo[7] = (unsigned char)(tpn); + fTo[8] = (unsigned char)(tpn>>8); + fTo[9] = (unsigned char)(tpn>>16); + fTo[10] = (unsigned char)(tpn>>24); + fFrameSize = 11; + } + + // Free the (former) head record (as we're now done with it): + delete head; + + // Complete delivery to the client: + afterGetting(this); + return True; +} + +Boolean MPEG2IFrameIndexFromTransportStream::parseFrame() { + // At this point, we have a queue of >=0 (unparsed) index records, representing + // the data in the parse buffer from "fParseBufferFrameStart" + // to "fParseBufferDataEnd". We now parse through this data, looking for + // a complete 'frame', where a 'frame', in this case, means: + // for MPEG video: a Video Sequence Header, GOP Header, Picture Header, or Slice + // for H.264 or H.265 video: a NAL unit + + // Inspect the frame's initial 4-byte code, to make sure it starts with a system code: + if (fParseBufferDataEnd-fParseBufferFrameStart < 4) return False; // not enough data + unsigned numInitialBadBytes = 0; + unsigned char const* p = &fParseBuffer[fParseBufferFrameStart]; + if (!(p[0] == 0 && p[1] == 0 && p[2] == 1)) { + // There's no system code at the beginning. Parse until we find one: + if (fParseBufferParseEnd == fParseBufferFrameStart + 4) { + // Start parsing from the beginning of the frame data: + fParseBufferParseEnd = fParseBufferFrameStart; + } + unsigned char nextCode; + if (!parseToNextCode(nextCode)) return False; + + numInitialBadBytes = fParseBufferParseEnd - fParseBufferFrameStart; + fParseBufferFrameStart = fParseBufferParseEnd; + fParseBufferParseEnd += 4; // skip over the code that we just saw + p = &fParseBuffer[fParseBufferFrameStart]; + } + + unsigned char curCode = p[3]; + if (fIsH264) curCode &= 0x1F; // nal_unit_type + else if (fIsH265) curCode = (curCode&0x7E)>>1; + + RecordType curRecordType; + unsigned char nextCode; + if (fIsH264) { + switch (curCode) { + case 1: // Coded slice of a non-IDR picture + curRecordType = RECORD_NAL_H264_NON_IFRAME; + if (!parseToNextCode(nextCode)) return False; + break; + case 5: // Coded slice of an IDR picture + curRecordType = RECORD_NAL_H264_IFRAME; + if (!parseToNextCode(nextCode)) return False; + break; + case 6: // Supplemental enhancement information (SEI) + curRecordType = RECORD_NAL_H264_SEI; + if (!parseToNextCode(nextCode)) return False; + break; + case 7: // Sequence parameter set (SPS) + curRecordType = RECORD_NAL_H264_SPS; + if (!parseToNextCode(nextCode)) return False; + break; + case 8: // Picture parameter set (PPS) + curRecordType = RECORD_NAL_H264_PPS; + if (!parseToNextCode(nextCode)) return False; + break; + default: + curRecordType = RECORD_NAL_H264_OTHER; + if (!parseToNextCode(nextCode)) return False; + break; + } + } else if (fIsH265) { + switch (curCode) { + case 19: // Coded slice segment of an IDR picture + case 20: // Coded slice segment of an IDR picture + curRecordType = RECORD_NAL_H265_IFRAME; + if (!parseToNextCode(nextCode)) return False; + break; + case 32: // Video parameter set (VPS) + curRecordType = RECORD_NAL_H265_VPS; + if (!parseToNextCode(nextCode)) return False; + break; + case 33: // Sequence parameter set (SPS) + curRecordType = RECORD_NAL_H265_SPS; + if (!parseToNextCode(nextCode)) return False; + break; + case 34: // Picture parameter set (PPS) + curRecordType = RECORD_NAL_H265_PPS; + if (!parseToNextCode(nextCode)) return False; + break; + default: + curRecordType = (curCode <= 31) ? RECORD_NAL_H265_NON_IFRAME : RECORD_NAL_H265_OTHER; + if (!parseToNextCode(nextCode)) return False; + break; + } + } else { // MPEG-1, 2, or 4 + switch (curCode) { + case VIDEO_SEQUENCE_START_CODE: + case VISUAL_OBJECT_SEQUENCE_START_CODE: + curRecordType = RECORD_VSH; + while (1) { + if (!parseToNextCode(nextCode)) return False; + if (nextCode == GROUP_START_CODE || + nextCode == PICTURE_START_CODE || nextCode == VOP_START_CODE) break; + fParseBufferParseEnd += 4; // skip over the code that we just saw + } + break; + case GROUP_START_CODE: + curRecordType = RECORD_GOP; + while (1) { + if (!parseToNextCode(nextCode)) return False; + if (nextCode == PICTURE_START_CODE || nextCode == VOP_START_CODE) break; + fParseBufferParseEnd += 4; // skip over the code that we just saw + } + break; + default: // picture + curRecordType = RECORD_PIC_NON_IFRAME; // may get changed to IFRAME later + while (1) { + if (!parseToNextCode(nextCode)) return False; + if (nextCode == VIDEO_SEQUENCE_START_CODE || + nextCode == VISUAL_OBJECT_SEQUENCE_START_CODE || + nextCode == GROUP_START_CODE || nextCode == GROUP_VOP_START_CODE || + nextCode == PICTURE_START_CODE || nextCode == VOP_START_CODE) break; + fParseBufferParseEnd += 4; // skip over the code that we just saw + } + break; + } + } + + if (curRecordType == RECORD_PIC_NON_IFRAME) { + if (curCode == VOP_START_CODE) { // MPEG-4 + if ((fParseBuffer[fParseBufferFrameStart+4]&0xC0) == 0) { + // This is actually an I-frame. Note it as such: + curRecordType = RECORD_PIC_IFRAME; + } + } else { // MPEG-1 or 2 + if ((fParseBuffer[fParseBufferFrameStart+5]&0x38) == 0x08) { + // This is actually an I-frame. Note it as such: + curRecordType = RECORD_PIC_IFRAME; + } + } + } + + // There is now a parsed 'frame', from "fParseBufferFrameStart" + // to "fParseBufferParseEnd". Tag the corresponding index records to note this: + unsigned frameSize = fParseBufferParseEnd - fParseBufferFrameStart + numInitialBadBytes; +#ifdef DEBUG + envir() << "parsed " << recordTypeStr[curRecordType] << "; length " + << frameSize << "\n"; +#endif + for (IndexRecord* r = fHeadIndexRecord; ; r = r->next()) { + if (numInitialBadBytes >= r->size()) { + r->recordType() = RECORD_JUNK; + numInitialBadBytes -= r->size(); + } else { + r->recordType() = curRecordType; + } + if (r == fHeadIndexRecord) r->setFirstFlag(); + // indicates that this is the first record for this frame + + if (r->size() > frameSize) { + // This record contains extra data that's not part of the frame. + // Shorten this record, and move the extra data to a new record + // that comes afterwards: + u_int8_t newOffset = r->startOffset() + frameSize; + u_int8_t newSize = r->size() - frameSize; + r->size() = frameSize; +#ifdef DEBUG + envir() << "tagged record (modified): " << *r << "\n"; +#endif + + IndexRecord* newRecord + = new IndexRecord(newOffset, newSize, r->transportPacketNumber(), r->pcr()); + newRecord->addAfter(r); + if (fTailIndexRecord == r) fTailIndexRecord = newRecord; +#ifdef DEBUG + envir() << "added extra record: " << *newRecord << "\n"; +#endif + } else { +#ifdef DEBUG + envir() << "tagged record: " << *r << "\n"; +#endif + } + frameSize -= r->size(); + if (frameSize == 0) break; + if (r == fTailIndexRecord) { // this shouldn't happen + envir() << "!!!!!Internal consistency error!!!!!\n"; + return False; + } + } + + // Finally, update our parse state (to skip over the now-parsed data): + fParseBufferFrameStart = fParseBufferParseEnd; + fParseBufferParseEnd += 4; // to skip over the next code (that we found) + + return True; +} + +Boolean MPEG2IFrameIndexFromTransportStream +::parseToNextCode(unsigned char& nextCode) { + unsigned char const* p = &fParseBuffer[fParseBufferParseEnd]; + unsigned char const* end = &fParseBuffer[fParseBufferDataEnd]; + while (p <= end-4) { + if (p[2] > 1) p += 3; // common case (optimized) + else if (p[2] == 0) ++p; + else if (p[0] == 0 && p[1] == 0) { // && p[2] == 1 + // We found a code here: + nextCode = p[3]; + fParseBufferParseEnd = p - &fParseBuffer[0]; // where we've gotten to + return True; + } else p += 3; + } + + fParseBufferParseEnd = p - &fParseBuffer[0]; // where we've gotten to + return False; // no luck this time +} + +void MPEG2IFrameIndexFromTransportStream::compactParseBuffer() { +#ifdef DEBUG + envir() << "Compacting parse buffer: [" << fParseBufferFrameStart + << "," << fParseBufferParseEnd << "," << fParseBufferDataEnd << "]"; +#endif + memmove(&fParseBuffer[0], &fParseBuffer[fParseBufferFrameStart], + fParseBufferDataEnd - fParseBufferFrameStart); + fParseBufferDataEnd -= fParseBufferFrameStart; + fParseBufferParseEnd -= fParseBufferFrameStart; + fParseBufferFrameStart = 0; +#ifdef DEBUG + envir() << "-> [" << fParseBufferFrameStart + << "," << fParseBufferParseEnd << "," << fParseBufferDataEnd << "]\n"; +#endif +} + +void MPEG2IFrameIndexFromTransportStream::addToTail(IndexRecord* newIndexRecord) { +#ifdef DEBUG + envir() << "adding new: " << *newIndexRecord << "\n"; +#endif + if (fTailIndexRecord == NULL) { + fHeadIndexRecord = fTailIndexRecord = newIndexRecord; + } else { + newIndexRecord->addAfter(fTailIndexRecord); + fTailIndexRecord = newIndexRecord; + } +} + +////////// IndexRecord implementation ////////// + +IndexRecord::IndexRecord(u_int8_t startOffset, u_int8_t size, + unsigned long transportPacketNumber, float pcr) + : fNext(this), fPrev(this), fRecordType(RECORD_UNPARSED), + fStartOffset(startOffset), fSize(size), + fPCR(pcr), fTransportPacketNumber(transportPacketNumber) { +} + +IndexRecord::~IndexRecord() { + IndexRecord* nextRecord = next(); + unlink(); + if (nextRecord != this) delete nextRecord; +} + +void IndexRecord::addAfter(IndexRecord* prev) { + fNext = prev->fNext; + fPrev = prev; + prev->fNext->fPrev = this; + prev->fNext = this; +} + +void IndexRecord::unlink() { + fNext->fPrev = fPrev; + fPrev->fNext = fNext; + fNext = fPrev = this; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG2TransportFileServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG2TransportFileServerMediaSubsession.cpp new file mode 100644 index 0000000..b113aaf --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG2TransportFileServerMediaSubsession.cpp @@ -0,0 +1,352 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a MPEG-2 Transport Stream file. +// Implementation + +#include "MPEG2TransportFileServerMediaSubsession.hh" +#include "SimpleRTPSink.hh" + +MPEG2TransportFileServerMediaSubsession* +MPEG2TransportFileServerMediaSubsession::createNew(UsageEnvironment& env, + char const* fileName, + char const* indexFileName, + Boolean reuseFirstSource) { + MPEG2TransportStreamIndexFile* indexFile; + if (indexFileName != NULL && reuseFirstSource) { + // It makes no sense to support trick play if all clients use the same source. Fix this: + env << "MPEG2TransportFileServerMediaSubsession::createNew(): ignoring the index file name, because \"reuseFirstSource\" is set\n"; + indexFile = NULL; + } else { + indexFile = MPEG2TransportStreamIndexFile::createNew(env, indexFileName); + } + return new MPEG2TransportFileServerMediaSubsession(env, fileName, indexFile, + reuseFirstSource); +} + +MPEG2TransportFileServerMediaSubsession +::MPEG2TransportFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, + MPEG2TransportStreamIndexFile* indexFile, + Boolean reuseFirstSource) + : FileServerMediaSubsession(env, fileName, reuseFirstSource), + fIndexFile(indexFile), fDuration(0.0), fClientSessionHashTable(NULL) { + if (fIndexFile != NULL) { // we support 'trick play' + fDuration = fIndexFile->getPlayingDuration(); + fClientSessionHashTable = HashTable::create(ONE_WORD_HASH_KEYS); + } +} + +MPEG2TransportFileServerMediaSubsession +::~MPEG2TransportFileServerMediaSubsession() { + if (fIndexFile != NULL) { // we support 'trick play' + Medium::close(fIndexFile); + + // Clean out the client session hash table: + while (1) { + ClientTrickPlayState* client + = (ClientTrickPlayState*)(fClientSessionHashTable->RemoveNext()); + if (client == NULL) break; + delete client; + } + delete fClientSessionHashTable; + } +} + +#define TRANSPORT_PACKET_SIZE 188 +#define TRANSPORT_PACKETS_PER_NETWORK_PACKET 7 +// The product of these two numbers must be enough to fit within a network packet + +void MPEG2TransportFileServerMediaSubsession +::startStream(unsigned clientSessionId, void* streamToken, TaskFunc* rtcpRRHandler, + void* rtcpRRHandlerClientData, unsigned short& rtpSeqNum, + unsigned& rtpTimestamp, + ServerRequestAlternativeByteHandler* serverRequestAlternativeByteHandler, + void* serverRequestAlternativeByteHandlerClientData) { + if (fIndexFile != NULL) { // we support 'trick play' + ClientTrickPlayState* client = lookupClient(clientSessionId); + if (client != NULL && client->areChangingScale()) { + // First, handle this like a "PAUSE", except that we back up to the previous VSH + client->updateStateOnPlayChange(True); + OnDemandServerMediaSubsession::pauseStream(clientSessionId, streamToken); + + // Then, adjust for the change of scale: + client->updateStateOnScaleChange(); + } + } + + // Call the original, default version of this routine: + OnDemandServerMediaSubsession::startStream(clientSessionId, streamToken, + rtcpRRHandler, rtcpRRHandlerClientData, + rtpSeqNum, rtpTimestamp, + serverRequestAlternativeByteHandler, serverRequestAlternativeByteHandlerClientData); +} + +void MPEG2TransportFileServerMediaSubsession +::pauseStream(unsigned clientSessionId, void* streamToken) { + if (fIndexFile != NULL) { // we support 'trick play' + ClientTrickPlayState* client = lookupClient(clientSessionId); + if (client != NULL) { + client->updateStateOnPlayChange(False); + } + } + + // Call the original, default version of this routine: + OnDemandServerMediaSubsession::pauseStream(clientSessionId, streamToken); +} + +void MPEG2TransportFileServerMediaSubsession +::seekStream(unsigned clientSessionId, void* streamToken, double& seekNPT, double streamDuration, u_int64_t& numBytes) { + // Begin by calling the original, default version of this routine: + OnDemandServerMediaSubsession::seekStream(clientSessionId, streamToken, seekNPT, streamDuration, numBytes); + + // Then, special handling specific to indexed Transport Stream files: + if (fIndexFile != NULL) { // we support 'trick play' + ClientTrickPlayState* client = lookupClient(clientSessionId); + if (client != NULL) { + unsigned long numTSPacketsToStream = client->updateStateFromNPT(seekNPT, streamDuration); + numBytes = numTSPacketsToStream*TRANSPORT_PACKET_SIZE; + } + } +} + +void MPEG2TransportFileServerMediaSubsession +::setStreamScale(unsigned clientSessionId, void* streamToken, float scale) { + if (fIndexFile != NULL) { // we support 'trick play' + ClientTrickPlayState* client = lookupClient(clientSessionId); + if (client != NULL) { + client->setNextScale(scale); // scale won't take effect until the next "PLAY" + } + } + + // Call the original, default version of this routine: + OnDemandServerMediaSubsession::setStreamScale(clientSessionId, streamToken, scale); +} + +void MPEG2TransportFileServerMediaSubsession +::deleteStream(unsigned clientSessionId, void*& streamToken) { + if (fIndexFile != NULL) { // we support 'trick play' + ClientTrickPlayState* client = lookupClient(clientSessionId); + if (client != NULL) { + client->updateStateOnPlayChange(False); + } + } + + // Call the original, default version of this routine: + OnDemandServerMediaSubsession::deleteStream(clientSessionId, streamToken); +} + +ClientTrickPlayState* MPEG2TransportFileServerMediaSubsession::newClientTrickPlayState() { + return new ClientTrickPlayState(fIndexFile); +} + +FramedSource* MPEG2TransportFileServerMediaSubsession +::createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate) { + // Create the video source: + unsigned const inputDataChunkSize + = TRANSPORT_PACKETS_PER_NETWORK_PACKET*TRANSPORT_PACKET_SIZE; + ByteStreamFileSource* fileSource + = ByteStreamFileSource::createNew(envir(), fFileName, inputDataChunkSize); + if (fileSource == NULL) return NULL; + fFileSize = fileSource->fileSize(); + + // Use the file size and the duration to estimate the stream's bitrate: + if (fFileSize > 0 && fDuration > 0.0) { + estBitrate = (unsigned)((int64_t)fFileSize/(125*fDuration) + 0.5); // kbps, rounded + } else { + estBitrate = 5000; // kbps, estimate + } + + + // Create a framer for the Transport Stream: + MPEG2TransportStreamFramer* framer + = MPEG2TransportStreamFramer::createNew(envir(), fileSource); + + if (fIndexFile != NULL) { // we support 'trick play' + // Keep state for this client (if we don't already have it): + ClientTrickPlayState* client = lookupClient(clientSessionId); + if (client == NULL) { + client = newClientTrickPlayState(); + fClientSessionHashTable->Add((char const*)clientSessionId, client); + } + client->setSource(framer); + } + + return framer; +} + +RTPSink* MPEG2TransportFileServerMediaSubsession +::createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char /*rtpPayloadTypeIfDynamic*/, + FramedSource* /*inputSource*/) { + return SimpleRTPSink::createNew(envir(), rtpGroupsock, + 33, 90000, "video", "MP2T", + 1, True, False /*no 'M' bit*/); +} + +void MPEG2TransportFileServerMediaSubsession::testScaleFactor(float& scale) { + if (fIndexFile != NULL && fDuration > 0.0) { + // We support any integral scale, other than 0 + int iScale = scale < 0.0 ? (int)(scale - 0.5f) : (int)(scale + 0.5f); // round + if (iScale == 0) iScale = 1; + scale = (float)iScale; + } else { + scale = 1.0f; + } +} + +float MPEG2TransportFileServerMediaSubsession::duration() const { + return fDuration; +} + +ClientTrickPlayState* MPEG2TransportFileServerMediaSubsession +::lookupClient(unsigned clientSessionId) { + return (ClientTrickPlayState*)(fClientSessionHashTable->Lookup((char const*)clientSessionId)); +} + + +////////// ClientTrickPlayState implementation ////////// + +ClientTrickPlayState::ClientTrickPlayState(MPEG2TransportStreamIndexFile* indexFile) + : fIndexFile(indexFile), + fOriginalTransportStreamSource(NULL), + fTrickModeFilter(NULL), fTrickPlaySource(NULL), + fFramer(NULL), + fScale(1.0f), fNextScale(1.0f), fNPT(0.0f), + fTSRecordNum(0), fIxRecordNum(0) { +} + +unsigned long ClientTrickPlayState::updateStateFromNPT(double npt, double streamDuration) { + fNPT = (float)npt; + // Map "fNPT" to the corresponding Transport Stream and Index record numbers: + unsigned long tsRecordNum, ixRecordNum; + fIndexFile->lookupTSPacketNumFromNPT(fNPT, tsRecordNum, ixRecordNum); + + updateTSRecordNum(); + if (tsRecordNum != fTSRecordNum) { + fTSRecordNum = tsRecordNum; + fIxRecordNum = ixRecordNum; + + // Seek the source to the new record number: + reseekOriginalTransportStreamSource(); + // Note: We assume that we're asked to seek only in normal + // (i.e., non trick play) mode, so we don't seek within the trick + // play source (if any). + + fFramer->clearPIDStatusTable(); + } + + unsigned long numTSRecordsToStream = 0; + float pcrLimit = 0.0; + if (streamDuration > 0.0) { + // fNPT might have changed when we looked it up in the index file. Adjust "streamDuration" accordingly: + streamDuration += npt - (double)fNPT; + + if (streamDuration > 0.0) { + // Specify that we want to stream no more data than this. + + if (fNextScale == 1.0f) { + // We'll be streaming from the original file. + // Use the index file to figure out how many Transport Packets we get to stream: + unsigned long toTSRecordNum, toIxRecordNum; + float toNPT = (float)(fNPT + streamDuration); + fIndexFile->lookupTSPacketNumFromNPT(toNPT, toTSRecordNum, toIxRecordNum); + if (toTSRecordNum > tsRecordNum) { // sanity check + numTSRecordsToStream = toTSRecordNum - tsRecordNum; + } + } else { + // We'll be streaming from the trick play stream. + // It'd be difficult to figure out how many Transport Packets we need to stream, so instead set a PCR + // limit in the trick play stream. (We rely upon the fact that PCRs in the trick play stream start at 0.0) + int direction = fNextScale < 0.0 ? -1 : 1; + pcrLimit = (float)(streamDuration/(fNextScale*direction)); + } + } + } + fFramer->setNumTSPacketsToStream(numTSRecordsToStream); + fFramer->setPCRLimit(pcrLimit); + + return numTSRecordsToStream; +} + +void ClientTrickPlayState::updateStateOnScaleChange() { + fScale = fNextScale; + + // Change our source objects to reflect the change in scale: + // First, close the existing trick play source (if any): + if (fTrickPlaySource != NULL) { + fTrickModeFilter->forgetInputSource(); + // so that the underlying Transport Stream source doesn't get deleted by: + Medium::close(fTrickPlaySource); + fTrickPlaySource = NULL; + fTrickModeFilter = NULL; + } + if (fNextScale != 1.0f) { + // Create a new trick play filter from the original Transport Stream source: + UsageEnvironment& env = fIndexFile->envir(); // alias + fTrickModeFilter = MPEG2TransportStreamTrickModeFilter + ::createNew(env, fOriginalTransportStreamSource, fIndexFile, int(fNextScale)); + fTrickModeFilter->seekTo(fTSRecordNum, fIxRecordNum); + + // And generate a Transport Stream from this: + fTrickPlaySource = MPEG2TransportStreamFromESSource::createNew(env); + fTrickPlaySource->addNewVideoSource(fTrickModeFilter, fIndexFile->mpegVersion()); + + fFramer->changeInputSource(fTrickPlaySource); + } else { + // Switch back to the original Transport Stream source: + reseekOriginalTransportStreamSource(); + fFramer->changeInputSource(fOriginalTransportStreamSource); + } +} + +void ClientTrickPlayState::updateStateOnPlayChange(Boolean reverseToPreviousVSH) { + updateTSRecordNum(); + if (fTrickPlaySource == NULL) { + // We were in regular (1x) play. Use the index file to look up the + // index record number and npt from the current transport number: + fIndexFile->lookupPCRFromTSPacketNum(fTSRecordNum, reverseToPreviousVSH, fNPT, fIxRecordNum); + } else { + // We were in trick mode, and so already have the index record number. + // Get the transport record number and npt from this: + fIxRecordNum = fTrickModeFilter->nextIndexRecordNum(); + if ((long)fIxRecordNum < 0) fIxRecordNum = 0; // we were at the start of the file + unsigned long transportRecordNum; + float pcr; + u_int8_t offset, size, recordType; // all dummy + if (fIndexFile->readIndexRecordValues(fIxRecordNum, transportRecordNum, + offset, size, pcr, recordType)) { + fTSRecordNum = transportRecordNum; + fNPT = pcr; + } + } +} + +void ClientTrickPlayState::setSource(MPEG2TransportStreamFramer* framer) { + fFramer = framer; + fOriginalTransportStreamSource = (ByteStreamFileSource*)(framer->inputSource()); +} + +void ClientTrickPlayState::updateTSRecordNum(){ + if (fFramer != NULL) fTSRecordNum += (unsigned long)(fFramer->tsPacketCount()); +} + +void ClientTrickPlayState::reseekOriginalTransportStreamSource() { + u_int64_t tsRecordNum64 = (u_int64_t)fTSRecordNum; + fOriginalTransportStreamSource->seekToByteAbsolute(tsRecordNum64*TRANSPORT_PACKET_SIZE); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamFramer.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamFramer.cpp new file mode 100644 index 0000000..1caf28c --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamFramer.cpp @@ -0,0 +1,290 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that passes through (unchanged) chunks that contain an integral number +// of MPEG-2 Transport Stream packets, but returning (in "fDurationInMicroseconds") +// an updated estimate of the time gap between chunks. +// Implementation + +#include "MPEG2TransportStreamFramer.hh" +#include // for "gettimeofday()" + +#define TRANSPORT_PACKET_SIZE 188 + +////////// Definitions of constants that control the behavior of this code ///////// + +#if !defined(NEW_DURATION_WEIGHT) +#define NEW_DURATION_WEIGHT 0.5 + // How much weight to give to the latest duration measurement (must be <= 1) +#endif + +#if !defined(TIME_ADJUSTMENT_FACTOR) +#define TIME_ADJUSTMENT_FACTOR 0.8 + // A factor by which to adjust the duration estimate to ensure that the overall + // packet transmission times remains matched with the PCR times (which will be the + // times that we expect receivers to play the incoming packets). + // (must be <= 1) +#endif + +#if !defined(MAX_PLAYOUT_BUFFER_DURATION) +#define MAX_PLAYOUT_BUFFER_DURATION 0.1 // (seconds) +#endif + +#if !defined(PCR_PERIOD_VARIATION_RATIO) +#define PCR_PERIOD_VARIATION_RATIO 0.5 +#endif + +////////// PIDStatus ////////// + +class PIDStatus { +public: + PIDStatus(double _firstClock, double _firstRealTime) + : firstClock(_firstClock), lastClock(_firstClock), + firstRealTime(_firstRealTime), lastRealTime(_firstRealTime), + lastPacketNum(0) { + } + + double firstClock, lastClock, firstRealTime, lastRealTime; + u_int64_t lastPacketNum; +}; + + +////////// MPEG2TransportStreamFramer ////////// + +MPEG2TransportStreamFramer* MPEG2TransportStreamFramer +::createNew(UsageEnvironment& env, FramedSource* inputSource) { + return new MPEG2TransportStreamFramer(env, inputSource); +} + +MPEG2TransportStreamFramer +::MPEG2TransportStreamFramer(UsageEnvironment& env, FramedSource* inputSource) + : FramedFilter(env, inputSource), + fTSPacketCount(0), fTSPacketDurationEstimate(0.0), fTSPCRCount(0), + fLimitNumTSPacketsToStream(False), fNumTSPacketsToStream(0), + fLimitTSPacketsToStreamByPCR(False), fPCRLimit(0.0) { + fPIDStatusTable = HashTable::create(ONE_WORD_HASH_KEYS); +} + +MPEG2TransportStreamFramer::~MPEG2TransportStreamFramer() { + clearPIDStatusTable(); + delete fPIDStatusTable; +} + +void MPEG2TransportStreamFramer::clearPIDStatusTable() { + PIDStatus* pidStatus; + while ((pidStatus = (PIDStatus*)fPIDStatusTable->RemoveNext()) != NULL) { + delete pidStatus; + } +} + +void MPEG2TransportStreamFramer::setNumTSPacketsToStream(unsigned long numTSRecordsToStream) { + fNumTSPacketsToStream = numTSRecordsToStream; + fLimitNumTSPacketsToStream = numTSRecordsToStream > 0; +} + +void MPEG2TransportStreamFramer::setPCRLimit(float pcrLimit) { + fPCRLimit = pcrLimit; + fLimitTSPacketsToStreamByPCR = pcrLimit != 0.0; +} + +void MPEG2TransportStreamFramer::doGetNextFrame() { + if (fLimitNumTSPacketsToStream) { + if (fNumTSPacketsToStream == 0) { + handleClosure(); + return; + } + if (fNumTSPacketsToStream*TRANSPORT_PACKET_SIZE < fMaxSize) { + fMaxSize = fNumTSPacketsToStream*TRANSPORT_PACKET_SIZE; + } + } + + // Read directly from our input source into our client's buffer: + fFrameSize = 0; + fInputSource->getNextFrame(fTo, fMaxSize, + afterGettingFrame, this, + FramedSource::handleClosure, this); +} + +void MPEG2TransportStreamFramer::doStopGettingFrames() { + FramedFilter::doStopGettingFrames(); + fTSPacketCount = 0; + fTSPCRCount = 0; + + clearPIDStatusTable(); +} + +void MPEG2TransportStreamFramer +::afterGettingFrame(void* clientData, unsigned frameSize, + unsigned /*numTruncatedBytes*/, + struct timeval presentationTime, + unsigned /*durationInMicroseconds*/) { + MPEG2TransportStreamFramer* framer = (MPEG2TransportStreamFramer*)clientData; + framer->afterGettingFrame1(frameSize, presentationTime); +} + +#define TRANSPORT_SYNC_BYTE 0x47 + +void MPEG2TransportStreamFramer::afterGettingFrame1(unsigned frameSize, + struct timeval presentationTime) { + fFrameSize += frameSize; + unsigned const numTSPackets = fFrameSize/TRANSPORT_PACKET_SIZE; + fNumTSPacketsToStream -= numTSPackets; + fFrameSize = numTSPackets*TRANSPORT_PACKET_SIZE; // an integral # of TS packets + if (fFrameSize == 0) { + // We didn't read a complete TS packet; assume that the input source has closed. + handleClosure(); + return; + } + + // Make sure the data begins with a sync byte: + unsigned syncBytePosition; + for (syncBytePosition = 0; syncBytePosition < fFrameSize; ++syncBytePosition) { + if (fTo[syncBytePosition] == TRANSPORT_SYNC_BYTE) break; + } + if (syncBytePosition == fFrameSize) { + envir() << "No Transport Stream sync byte in data."; + handleClosure(); + return; + } else if (syncBytePosition > 0) { + // There's a sync byte, but not at the start of the data. Move the good data + // to the start of the buffer, then read more to fill it up again: + memmove(fTo, &fTo[syncBytePosition], fFrameSize - syncBytePosition); + fFrameSize -= syncBytePosition; + fInputSource->getNextFrame(&fTo[fFrameSize], syncBytePosition, + afterGettingFrame, this, + FramedSource::handleClosure, this); + return; + } // else normal case: the data begins with a sync byte + + fPresentationTime = presentationTime; + + // Scan through the TS packets that we read, and update our estimate of + // the duration of each packet: + struct timeval tvNow; + gettimeofday(&tvNow, NULL); + double timeNow = tvNow.tv_sec + tvNow.tv_usec/1000000.0; + for (unsigned i = 0; i < numTSPackets; ++i) { + if (!updateTSPacketDurationEstimate(&fTo[i*TRANSPORT_PACKET_SIZE], timeNow)) { + // We hit a preset limit (based on PCR) within the stream. Handle this as if the input source has closed: + handleClosure(); + return; + } + } + + fDurationInMicroseconds + = numTSPackets * (unsigned)(fTSPacketDurationEstimate*1000000); + + // Complete the delivery to our client: + afterGetting(this); +} + +Boolean MPEG2TransportStreamFramer::updateTSPacketDurationEstimate(unsigned char* pkt, double timeNow) { + // Sanity check: Make sure we start with the sync byte: + if (pkt[0] != TRANSPORT_SYNC_BYTE) { + envir() << "Missing sync byte!\n"; + return True; + } + + ++fTSPacketCount; + + // If this packet doesn't contain a PCR, then we're not interested in it: + u_int8_t const adaptation_field_control = (pkt[3]&0x30)>>4; + if (adaptation_field_control != 2 && adaptation_field_control != 3) return True; + // there's no adaptation_field + + u_int8_t const adaptation_field_length = pkt[4]; + if (adaptation_field_length == 0) return True; + + u_int8_t const discontinuity_indicator = pkt[5]&0x80; + u_int8_t const pcrFlag = pkt[5]&0x10; + if (pcrFlag == 0) return True; // no PCR + + // There's a PCR. Get it, and the PID: + ++fTSPCRCount; + u_int32_t pcrBaseHigh = (pkt[6]<<24)|(pkt[7]<<16)|(pkt[8]<<8)|pkt[9]; + double clock = pcrBaseHigh/45000.0; + if ((pkt[10]&0x80) != 0) clock += 1/90000.0; // add in low-bit (if set) + unsigned short pcrExt = ((pkt[10]&0x01)<<8) | pkt[11]; + clock += pcrExt/27000000.0; + if (fLimitTSPacketsToStreamByPCR) { + if (clock > fPCRLimit) { + // We've hit a preset limit within the stream: + return False; + } + } + + unsigned pid = ((pkt[1]&0x1F)<<8) | pkt[2]; + + // Check whether we already have a record of a PCR for this PID: + PIDStatus* pidStatus = (PIDStatus*)(fPIDStatusTable->Lookup((char*)pid)); + + if (pidStatus == NULL) { + // We're seeing this PID's PCR for the first time: + pidStatus = new PIDStatus(clock, timeNow); + fPIDStatusTable->Add((char*)pid, pidStatus); +#ifdef DEBUG_PCR + fprintf(stderr, "PID 0x%x, FIRST PCR 0x%08x+%d:%03x == %f @ %f, pkt #%lu\n", pid, pcrBaseHigh, pkt[10]>>7, pcrExt, clock, timeNow, fTSPacketCount); +#endif + } else { + // We've seen this PID's PCR before; update our per-packet duration estimate: + int64_t packetsSinceLast = (int64_t)(fTSPacketCount - pidStatus->lastPacketNum); + // it's "int64_t" because some compilers can't convert "u_int64_t" -> "double" + double durationPerPacket = (clock - pidStatus->lastClock)/packetsSinceLast; + + // Hack (suggested by "Romain"): Don't update our estimate if this PCR appeared unusually quickly. + // (This can produce more accurate estimates for wildly VBR streams.) + double meanPCRPeriod = 0.0; + if (fTSPCRCount > 0) { + double tsPacketCount = (double)(int64_t)fTSPacketCount; + double tsPCRCount = (double)(int64_t)fTSPCRCount; + meanPCRPeriod = tsPacketCount/tsPCRCount; + if (packetsSinceLast < meanPCRPeriod*PCR_PERIOD_VARIATION_RATIO) return True; + } + + if (fTSPacketDurationEstimate == 0.0) { // we've just started + fTSPacketDurationEstimate = durationPerPacket; + } else if (discontinuity_indicator == 0 && durationPerPacket >= 0.0) { + fTSPacketDurationEstimate + = durationPerPacket*NEW_DURATION_WEIGHT + + fTSPacketDurationEstimate*(1-NEW_DURATION_WEIGHT); + + // Also adjust the duration estimate to try to ensure that the transmission + // rate matches the playout rate: + double transmitDuration = timeNow - pidStatus->firstRealTime; + double playoutDuration = clock - pidStatus->firstClock; + if (transmitDuration > playoutDuration) { + fTSPacketDurationEstimate *= TIME_ADJUSTMENT_FACTOR; // reduce estimate + } else if (transmitDuration + MAX_PLAYOUT_BUFFER_DURATION < playoutDuration) { + fTSPacketDurationEstimate /= TIME_ADJUSTMENT_FACTOR; // increase estimate + } + } else { + // the PCR has a discontinuity from its previous value; don't use it now, + // but reset our PCR and real-time values to compensate: + pidStatus->firstClock = clock; + pidStatus->firstRealTime = timeNow; + } +#ifdef DEBUG_PCR + fprintf(stderr, "PID 0x%x, PCR 0x%08x+%d:%03x == %f @ %f (diffs %f @ %f), pkt #%lu, discon %d => this duration %f, new estimate %f, mean PCR period=%f\n", pid, pcrBaseHigh, pkt[10]>>7, pcrExt, clock, timeNow, clock - pidStatus->firstClock, timeNow - pidStatus->firstRealTime, fTSPacketCount, discontinuity_indicator != 0, durationPerPacket, fTSPacketDurationEstimate, meanPCRPeriod ); +#endif + } + + pidStatus->lastClock = clock; + pidStatus->lastRealTime = timeNow; + pidStatus->lastPacketNum = fTSPacketCount; + + return True; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamFromESSource.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamFromESSource.cpp new file mode 100644 index 0000000..45ecf4c --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamFromESSource.cpp @@ -0,0 +1,260 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter for converting one or more MPEG Elementary Streams +// to a MPEG-2 Transport Stream +// Implementation + +#include "MPEG2TransportStreamFromESSource.hh" + +#define MAX_INPUT_ES_FRAME_SIZE 100000 +#define SIMPLE_PES_HEADER_SIZE 14 +#define LOW_WATER_MARK 1000 // <= MAX_INPUT_ES_FRAME_SIZE +#define INPUT_BUFFER_SIZE (SIMPLE_PES_HEADER_SIZE + 2*MAX_INPUT_ES_FRAME_SIZE) + +////////// InputESSourceRecord definition ////////// + +class InputESSourceRecord { +public: + InputESSourceRecord(MPEG2TransportStreamFromESSource& parent, + FramedSource* inputSource, + u_int8_t streamId, int mpegVersion, + InputESSourceRecord* next, int16_t PID = -1); + virtual ~InputESSourceRecord(); + + InputESSourceRecord* next() const { return fNext; } + FramedSource* inputSource() const { return fInputSource; } + + void askForNewData(); + Boolean deliverBufferToClient(); + + unsigned char* buffer() const { return fInputBuffer; } + void reset() { + // Reset the buffer for future use: + fInputBufferBytesAvailable = 0; + fInputBufferInUse = False; + } + +private: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime); + +private: + InputESSourceRecord* fNext; + MPEG2TransportStreamFromESSource& fParent; + FramedSource* fInputSource; + u_int8_t fStreamId; + int fMPEGVersion; + unsigned char* fInputBuffer; + unsigned fInputBufferBytesAvailable; + Boolean fInputBufferInUse; + MPEG1or2Demux::SCR fSCR; + int16_t fPID; +}; + + +////////// MPEG2TransportStreamFromESSource implementation ////////// + +MPEG2TransportStreamFromESSource* MPEG2TransportStreamFromESSource +::createNew(UsageEnvironment& env) { + return new MPEG2TransportStreamFromESSource(env); +} + +void MPEG2TransportStreamFromESSource +::addNewVideoSource(FramedSource* inputSource, int mpegVersion, int16_t PID) { + u_int8_t streamId = 0xE0 | (fVideoSourceCounter++&0x0F); + addNewInputSource(inputSource, streamId, mpegVersion, PID); + fHaveVideoStreams = True; +} + +void MPEG2TransportStreamFromESSource +::addNewAudioSource(FramedSource* inputSource, int mpegVersion, int16_t PID) { + u_int8_t streamId = 0xC0 | (fAudioSourceCounter++&0x0F); + addNewInputSource(inputSource, streamId, mpegVersion, PID); +} + +MPEG2TransportStreamFromESSource +::MPEG2TransportStreamFromESSource(UsageEnvironment& env) + : MPEG2TransportStreamMultiplexor(env), + fInputSources(NULL), fVideoSourceCounter(0), fAudioSourceCounter(0) { + fHaveVideoStreams = False; // unless we add a video source +} + +MPEG2TransportStreamFromESSource::~MPEG2TransportStreamFromESSource() { + doStopGettingFrames(); + delete fInputSources; +} + +void MPEG2TransportStreamFromESSource::doStopGettingFrames() { + // Stop each input source: + for (InputESSourceRecord* sourceRec = fInputSources; sourceRec != NULL; + sourceRec = sourceRec->next()) { + sourceRec->inputSource()->stopGettingFrames(); + } +} + +void MPEG2TransportStreamFromESSource +::awaitNewBuffer(unsigned char* oldBuffer) { + InputESSourceRecord* sourceRec; + // Begin by resetting the old buffer: + if (oldBuffer != NULL) { + for (sourceRec = fInputSources; sourceRec != NULL; + sourceRec = sourceRec->next()) { + if (sourceRec->buffer() == oldBuffer) { + sourceRec->reset(); + break; + } + } + } + + if (isCurrentlyAwaitingData()) { + // Try to deliver one filled-in buffer to the client: + for (sourceRec = fInputSources; sourceRec != NULL; + sourceRec = sourceRec->next()) { + if (sourceRec->deliverBufferToClient()) break; + } + } + + // No filled-in buffers are available. Ask each of our inputs for data: + for (sourceRec = fInputSources; sourceRec != NULL; + sourceRec = sourceRec->next()) { + sourceRec->askForNewData(); + } + +} + +void MPEG2TransportStreamFromESSource +::addNewInputSource(FramedSource* inputSource, + u_int8_t streamId, int mpegVersion, int16_t PID) { + if (inputSource == NULL) return; + fInputSources = new InputESSourceRecord(*this, inputSource, streamId, + mpegVersion, fInputSources, PID); +} + + +////////// InputESSourceRecord implementation ////////// + +InputESSourceRecord +::InputESSourceRecord(MPEG2TransportStreamFromESSource& parent, + FramedSource* inputSource, + u_int8_t streamId, int mpegVersion, + InputESSourceRecord* next, int16_t PID) + : fNext(next), fParent(parent), fInputSource(inputSource), + fStreamId(streamId), fMPEGVersion(mpegVersion), fPID(PID) { + fInputBuffer = new unsigned char[INPUT_BUFFER_SIZE]; + reset(); +} + +InputESSourceRecord::~InputESSourceRecord() { + Medium::close(fInputSource); + delete[] fInputBuffer; + delete fNext; +} + +void InputESSourceRecord::askForNewData() { + if (fInputBufferInUse) return; + + if (fInputBufferBytesAvailable == 0) { + // Reset our buffer, by adding a simple PES header at the start: + fInputBuffer[0] = 0; fInputBuffer[1] = 0; fInputBuffer[2] = 1; + fInputBuffer[3] = fStreamId; + fInputBuffer[4] = 0; fInputBuffer[5] = 0; // fill in later with the length + fInputBuffer[6] = 0x80; + fInputBuffer[7] = 0x80; // include a PTS + fInputBuffer[8] = 5; // PES_header_data_length (enough for a PTS) + // fInputBuffer[9..13] will be the PTS; fill this in later + fInputBufferBytesAvailable = SIMPLE_PES_HEADER_SIZE; + } + if (fInputBufferBytesAvailable < LOW_WATER_MARK && + !fInputSource->isCurrentlyAwaitingData()) { + // We don't yet have enough data in our buffer. Arrange to read more: + fInputSource->getNextFrame(&fInputBuffer[fInputBufferBytesAvailable], + INPUT_BUFFER_SIZE-fInputBufferBytesAvailable, + afterGettingFrame, this, + FramedSource::handleClosure, &fParent); + } +} + +Boolean InputESSourceRecord::deliverBufferToClient() { + if (fInputBufferInUse || fInputBufferBytesAvailable < LOW_WATER_MARK) return False; + + // Fill in the PES_packet_length field that we left unset before: + unsigned PES_packet_length = fInputBufferBytesAvailable - 6; + if (PES_packet_length > 0xFFFF) { + // Set the PES_packet_length field to 0. This indicates an unbounded length (see ISO 13818-1, 2.4.3.7) + PES_packet_length = 0; + } + fInputBuffer[4] = PES_packet_length>>8; + fInputBuffer[5] = PES_packet_length; + + // Fill in the PES PTS (from our SCR): + fInputBuffer[9] = 0x20|(fSCR.highBit<<3)|(fSCR.remainingBits>>29)|0x01; + fInputBuffer[10] = fSCR.remainingBits>>22; + fInputBuffer[11] = (fSCR.remainingBits>>14)|0x01; + fInputBuffer[12] = fSCR.remainingBits>>7; + fInputBuffer[13] = (fSCR.remainingBits<<1)|0x01; + + fInputBufferInUse = True; + + // Do the delivery: + fParent.handleNewBuffer(fInputBuffer, fInputBufferBytesAvailable, + fMPEGVersion, fSCR, fPID); + + return True; +} + +void InputESSourceRecord +::afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned /*durationInMicroseconds*/) { + InputESSourceRecord* source = (InputESSourceRecord*)clientData; + source->afterGettingFrame1(frameSize, numTruncatedBytes, presentationTime); +} +void InputESSourceRecord +::afterGettingFrame1(unsigned frameSize, unsigned numTruncatedBytes, + struct timeval presentationTime) { + if (numTruncatedBytes > 0) { + fParent.envir() << "MPEG2TransportStreamFromESSource: input buffer too small; increase \"MAX_INPUT_ES_FRAME_SIZE\" in \"MPEG2TransportStreamFromESSource\" by at least " + << numTruncatedBytes << " bytes!\n"; + } + + if (fInputBufferBytesAvailable == SIMPLE_PES_HEADER_SIZE) { + // Use this presentationTime for our SCR: + fSCR.highBit + = ((presentationTime.tv_sec*45000 + (presentationTime.tv_usec*9)/200)& + 0x80000000) != 0; + fSCR.remainingBits + = presentationTime.tv_sec*90000 + (presentationTime.tv_usec*9)/100; + fSCR.extension = (presentationTime.tv_usec*9)%100; +#ifdef DEBUG_SCR + fprintf(stderr, "PES header: stream_id 0x%02x, pts: %u.%06u => SCR 0x%x%08x:%03x\n", fStreamId, (unsigned)presentationTime.tv_sec, (unsigned)presentationTime.tv_usec, fSCR.highBit, fSCR.remainingBits, fSCR.extension); +#endif + } + + fInputBufferBytesAvailable += frameSize; + + fParent.fPresentationTime = presentationTime; + + // Now that we have new input data, check if we can deliver to the client: + fParent.awaitNewBuffer(NULL); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamFromPESSource.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamFromPESSource.cpp new file mode 100644 index 0000000..7e08614 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamFromPESSource.cpp @@ -0,0 +1,74 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter for converting a stream of MPEG PES packets to a MPEG-2 Transport Stream +// Implementation + +#include "MPEG2TransportStreamFromPESSource.hh" + +#define MAX_PES_PACKET_SIZE (6+65535) + +MPEG2TransportStreamFromPESSource* MPEG2TransportStreamFromPESSource +::createNew(UsageEnvironment& env, MPEG1or2DemuxedElementaryStream* inputSource) { + return new MPEG2TransportStreamFromPESSource(env, inputSource); +} + +MPEG2TransportStreamFromPESSource +::MPEG2TransportStreamFromPESSource(UsageEnvironment& env, + MPEG1or2DemuxedElementaryStream* inputSource) + : MPEG2TransportStreamMultiplexor(env), + fInputSource(inputSource) { + fInputBuffer = new unsigned char[MAX_PES_PACKET_SIZE]; +} + +MPEG2TransportStreamFromPESSource::~MPEG2TransportStreamFromPESSource() { + Medium::close(fInputSource); + delete[] fInputBuffer; +} + +void MPEG2TransportStreamFromPESSource::doStopGettingFrames() { + fInputSource->stopGettingFrames(); +} + +void MPEG2TransportStreamFromPESSource +::awaitNewBuffer(unsigned char* /*oldBuffer*/) { + fInputSource->getNextFrame(fInputBuffer, MAX_PES_PACKET_SIZE, + afterGettingFrame, this, + FramedSource::handleClosure, this); +} + +void MPEG2TransportStreamFromPESSource +::afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + MPEG2TransportStreamFromPESSource* source + = (MPEG2TransportStreamFromPESSource*)clientData; + source->afterGettingFrame1(frameSize, numTruncatedBytes, + presentationTime, durationInMicroseconds); +} + +void MPEG2TransportStreamFromPESSource +::afterGettingFrame1(unsigned frameSize, + unsigned /*numTruncatedBytes*/, + struct timeval /*presentationTime*/, + unsigned /*durationInMicroseconds*/) { + if (frameSize < 4) return; + + handleNewBuffer(fInputBuffer, frameSize, + fInputSource->mpegVersion(), fInputSource->lastSeenSCR()); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamIndexFile.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamIndexFile.cpp new file mode 100644 index 0000000..99187e9 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamIndexFile.cpp @@ -0,0 +1,349 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A class that encapsulates MPEG-2 Transport Stream 'index files'/ +// These index files are used to implement 'trick play' operations +// (seek-by-time, fast forward, reverse play) on Transport Stream files. +// +// Implementation + +#include "MPEG2TransportStreamIndexFile.hh" +#include "InputFile.hh" + +MPEG2TransportStreamIndexFile +::MPEG2TransportStreamIndexFile(UsageEnvironment& env, char const* indexFileName) + : Medium(env), + fFileName(strDup(indexFileName)), fFid(NULL), fMPEGVersion(0), fCurrentIndexRecordNum(0), + fCachedPCR(0.0f), fCachedTSPacketNumber(0), fNumIndexRecords(0) { + // Get the file size, to determine how many index records it contains: + u_int64_t indexFileSize = GetFileSize(indexFileName, NULL); + if (indexFileSize % INDEX_RECORD_SIZE != 0) { + env << "Warning: Size of the index file \"" << indexFileName + << "\" (" << (unsigned)indexFileSize + << ") is not a multiple of the index record size (" + << INDEX_RECORD_SIZE << ")\n"; + } + fNumIndexRecords = (unsigned long)(indexFileSize/INDEX_RECORD_SIZE); +} + +MPEG2TransportStreamIndexFile* MPEG2TransportStreamIndexFile +::createNew(UsageEnvironment& env, char const* indexFileName) { + if (indexFileName == NULL) return NULL; + MPEG2TransportStreamIndexFile* indexFile + = new MPEG2TransportStreamIndexFile(env, indexFileName); + + // Reject empty or non-existent index files: + if (indexFile->getPlayingDuration() == 0.0f) { + delete indexFile; + indexFile = NULL; + } + + return indexFile; +} + +MPEG2TransportStreamIndexFile::~MPEG2TransportStreamIndexFile() { + closeFid(); + delete[] fFileName; +} + +void MPEG2TransportStreamIndexFile +::lookupTSPacketNumFromNPT(float& npt, unsigned long& tsPacketNumber, + unsigned long& indexRecordNumber) { + if (npt <= 0.0 || fNumIndexRecords == 0) { // Fast-track a common case: + npt = 0.0f; + tsPacketNumber = indexRecordNumber = 0; + return; + } + + // If "npt" is the same as the one that we last looked up, return its cached result: + if (npt == fCachedPCR) { + tsPacketNumber = fCachedTSPacketNumber; + indexRecordNumber = fCachedIndexRecordNumber; + return; + } + + // Search for the pair of neighboring index records whose PCR values span "npt". + // Use the 'regula-falsi' method. + Boolean success = False; + unsigned long ixFound = 0; + do { + unsigned long ixLeft = 0, ixRight = fNumIndexRecords-1; + float pcrLeft = 0.0f, pcrRight; + if (!readIndexRecord(ixRight)) break; + pcrRight = pcrFromBuf(); + if (npt > pcrRight) npt = pcrRight; + // handle "npt" too large by seeking to the last frame of the file + + while (ixRight-ixLeft > 1 && pcrLeft < npt && npt <= pcrRight) { + unsigned long ixNew = ixLeft + + (unsigned long)(((npt-pcrLeft)/(pcrRight-pcrLeft))*(ixRight-ixLeft)); + if (ixNew == ixLeft || ixNew == ixRight) { + // use bisection instead: + ixNew = (ixLeft+ixRight)/2; + } + if (!readIndexRecord(ixNew)) break; + float pcrNew = pcrFromBuf(); + if (pcrNew < npt) { + pcrLeft = pcrNew; + ixLeft = ixNew; + } else { + pcrRight = pcrNew; + ixRight = ixNew; + } + } + if (ixRight-ixLeft > 1 || npt <= pcrLeft || npt > pcrRight) break; // bad PCR values in index file? + + ixFound = ixRight; + // "Rewind' until we reach the start of a Video Sequence or GOP header: + success = rewindToCleanPoint(ixFound); + } while (0); + + if (success && readIndexRecord(ixFound)) { + // Return (and cache) information from record "ixFound": + npt = fCachedPCR = pcrFromBuf(); + tsPacketNumber = fCachedTSPacketNumber = tsPacketNumFromBuf(); + indexRecordNumber = fCachedIndexRecordNumber = ixFound; + } else { + // An error occurred: Return the default values, for npt == 0: + npt = 0.0f; + tsPacketNumber = indexRecordNumber = 0; + } + closeFid(); +} + +void MPEG2TransportStreamIndexFile +::lookupPCRFromTSPacketNum(unsigned long& tsPacketNumber, Boolean reverseToPreviousCleanPoint, + float& pcr, unsigned long& indexRecordNumber) { + if (tsPacketNumber == 0 || fNumIndexRecords == 0) { // Fast-track a common case: + pcr = 0.0f; + indexRecordNumber = 0; + return; + } + + // If "tsPacketNumber" is the same as the one that we last looked up, return its cached result: + if (tsPacketNumber == fCachedTSPacketNumber) { + pcr = fCachedPCR; + indexRecordNumber = fCachedIndexRecordNumber; + return; + } + + // Search for the pair of neighboring index records whose TS packet #s span "tsPacketNumber". + // Use the 'regula-falsi' method. + Boolean success = False; + unsigned long ixFound = 0; + do { + unsigned long ixLeft = 0, ixRight = fNumIndexRecords-1; + unsigned long tsLeft = 0, tsRight; + if (!readIndexRecord(ixRight)) break; + tsRight = tsPacketNumFromBuf(); + if (tsPacketNumber > tsRight) tsPacketNumber = tsRight; + // handle "tsPacketNumber" too large by seeking to the last frame of the file + + while (ixRight-ixLeft > 1 && tsLeft < tsPacketNumber && tsPacketNumber <= tsRight) { + unsigned long ixNew = ixLeft + + (unsigned long)(((tsPacketNumber-tsLeft)/(tsRight-tsLeft))*(ixRight-ixLeft)); + if (ixNew == ixLeft || ixNew == ixRight) { + // Use bisection instead: + ixNew = (ixLeft+ixRight)/2; + } + if (!readIndexRecord(ixNew)) break; + unsigned long tsNew = tsPacketNumFromBuf(); + if (tsNew < tsPacketNumber) { + tsLeft = tsNew; + ixLeft = ixNew; + } else { + tsRight = tsNew; + ixRight = ixNew; + } + } + if (ixRight-ixLeft > 1 || tsPacketNumber <= tsLeft || tsPacketNumber > tsRight) break; // bad PCR values in index file? + + ixFound = ixRight; + if (reverseToPreviousCleanPoint) { + // "Rewind' until we reach the start of a Video Sequence or GOP header: + success = rewindToCleanPoint(ixFound); + } else { + success = True; + } + } while (0); + + if (success && readIndexRecord(ixFound)) { + // Return (and cache) information from record "ixFound": + pcr = fCachedPCR = pcrFromBuf(); + fCachedTSPacketNumber = tsPacketNumFromBuf(); + if (reverseToPreviousCleanPoint) tsPacketNumber = fCachedTSPacketNumber; + indexRecordNumber = fCachedIndexRecordNumber = ixFound; + } else { + // An error occurred: Return the default values, for tsPacketNumber == 0: + pcr = 0.0f; + indexRecordNumber = 0; + } + closeFid(); +} + +Boolean MPEG2TransportStreamIndexFile +::readIndexRecordValues(unsigned long indexRecordNum, + unsigned long& transportPacketNum, u_int8_t& offset, + u_int8_t& size, float& pcr, u_int8_t& recordType) { + if (!readIndexRecord(indexRecordNum)) return False; + + transportPacketNum = tsPacketNumFromBuf(); + offset = offsetFromBuf(); + size = sizeFromBuf(); + pcr = pcrFromBuf(); + recordType = recordTypeFromBuf(); + return True; +} + +float MPEG2TransportStreamIndexFile::getPlayingDuration() { + if (fNumIndexRecords == 0 || !readOneIndexRecord(fNumIndexRecords-1)) return 0.0f; + + return pcrFromBuf(); +} + +int MPEG2TransportStreamIndexFile::mpegVersion() { + if (fMPEGVersion != 0) return fMPEGVersion; // we already know it + + // Read the first index record, and figure out the MPEG version from its type: + if (!readOneIndexRecord(0)) return 0; // unknown; perhaps the indecx file is empty? + + setMPEGVersionFromRecordType(recordTypeFromBuf()); + return fMPEGVersion; +} + +Boolean MPEG2TransportStreamIndexFile::openFid() { + if (fFid == NULL && fFileName != NULL) { + if ((fFid = OpenInputFile(envir(), fFileName)) != NULL) { + fCurrentIndexRecordNum = 0; + } + } + + return fFid != NULL; +} + +Boolean MPEG2TransportStreamIndexFile::seekToIndexRecord(unsigned long indexRecordNumber) { + if (!openFid()) return False; + + if (indexRecordNumber == fCurrentIndexRecordNum) return True; // we're already there + + if (SeekFile64(fFid, (int64_t)(indexRecordNumber*INDEX_RECORD_SIZE), SEEK_SET) != 0) return False; + fCurrentIndexRecordNum = indexRecordNumber; + return True; +} + +Boolean MPEG2TransportStreamIndexFile::readIndexRecord(unsigned long indexRecordNum) { + do { + if (!seekToIndexRecord(indexRecordNum)) break; + if (fread(fBuf, INDEX_RECORD_SIZE, 1, fFid) != 1) break; + ++fCurrentIndexRecordNum; + + return True; + } while (0); + + return False; // an error occurred +} + +Boolean MPEG2TransportStreamIndexFile::readOneIndexRecord(unsigned long indexRecordNum) { + Boolean result = readIndexRecord(indexRecordNum); + closeFid(); + + return result; +} + +void MPEG2TransportStreamIndexFile::closeFid() { + if (fFid != NULL) { + CloseInputFile(fFid); + fFid = NULL; + } +} + +float MPEG2TransportStreamIndexFile::pcrFromBuf() { + unsigned pcr_int = (fBuf[5]<<16) | (fBuf[4]<<8) | fBuf[3]; + u_int8_t pcr_frac = fBuf[6]; + return pcr_int + pcr_frac/256.0f; +} + +unsigned long MPEG2TransportStreamIndexFile::tsPacketNumFromBuf() { + return (fBuf[10]<<24) | (fBuf[9]<<16) | (fBuf[8]<<8) | fBuf[7]; +} + +void MPEG2TransportStreamIndexFile::setMPEGVersionFromRecordType(u_int8_t recordType) { + if (fMPEGVersion != 0) return; // we already know it + + u_int8_t const recordTypeWithoutStartBit = recordType&~0x80; + if (recordTypeWithoutStartBit >= 1 && recordTypeWithoutStartBit <= 4) fMPEGVersion = 2; + else if (recordTypeWithoutStartBit >= 5 && recordTypeWithoutStartBit <= 10) fMPEGVersion = 5; + // represents H.264 + else if (recordTypeWithoutStartBit >= 11 && recordTypeWithoutStartBit <= 16) fMPEGVersion = 6; + // represents H.265 +} + +Boolean MPEG2TransportStreamIndexFile::rewindToCleanPoint(unsigned long&ixFound) { + Boolean success = False; // until we learn otherwise + + while (ixFound > 0) { + if (!readIndexRecord(ixFound)) break; + + u_int8_t recordType = recordTypeFromBuf(); + setMPEGVersionFromRecordType(recordType); + + // A 'clean point' is the start of a 'frame' from which a decoder can cleanly resume + // handling the stream. For H.264, this is a SPS. For H.265, this is a VPS. + // For MPEG-2, this is a Video Sequence Header, or a GOP. + + if ((recordType&0x80) != 0) { // This is the start of a 'frame' + recordType &=~ 0x80; // remove the 'start of frame' bit + if (fMPEGVersion == 5) { // H.264 + if (recordType == 5/*SPS*/) { + success = True; + break; + } + } else if (fMPEGVersion == 6) { // H.265 + if (recordType == 11/*VPS*/) { + success = True; + break; + } + } else { // MPEG-1, 2, or 4 + if (recordType == 1/*VSH*/) { + success = True; + break; + } else if (recordType == 2/*GOP*/) { + // Hack: If the preceding record is for a Video Sequence Header, then use it instead: + unsigned long newIxFound = ixFound; + + while (--newIxFound > 0) { + if (!readIndexRecord(newIxFound)) break; + recordType = recordTypeFromBuf(); + if ((recordType&0x7F) != 1) break; // not a Video Sequence Header + if ((recordType&0x80) != 0) { // this is the start of the VSH; use it + ixFound = newIxFound; + break; + } + } + } + success = True; + break; + } + } + + // Keep checking, from the previous record: + --ixFound; + } + if (ixFound == 0) success = True; // use record 0 anyway + + return success; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamMultiplexor.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamMultiplexor.cpp new file mode 100644 index 0000000..ce2b5fc --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamMultiplexor.cpp @@ -0,0 +1,441 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A class for generating MPEG-2 Transport Stream from one or more input +// Elementary Stream data sources +// Implementation + +#include "MPEG2TransportStreamMultiplexor.hh" + +#define TRANSPORT_PACKET_SIZE 188 + +#define PAT_PERIOD 100 // # of packets between Program Association Tables +#define PMT_PERIOD 500 // # of packets between Program Map Tables + +#define PID_TABLE_SIZE 256 + +MPEG2TransportStreamMultiplexor +::MPEG2TransportStreamMultiplexor(UsageEnvironment& env) + : FramedSource(env), + fHaveVideoStreams(True/*by default*/), + fOutgoingPacketCounter(0), fProgramMapVersion(0), + fPreviousInputProgramMapVersion(0xFF), fCurrentInputProgramMapVersion(0xFF), + fPCR_PID(0), fCurrentPID(0), + fInputBuffer(NULL), fInputBufferSize(0), fInputBufferBytesUsed(0), + fIsFirstAdaptationField(True) { + for (unsigned i = 0; i < PID_TABLE_SIZE; ++i) { + fPIDState[i].counter = 0; + fPIDState[i].streamType = 0; + } +} + +MPEG2TransportStreamMultiplexor::~MPEG2TransportStreamMultiplexor() { +} + +void MPEG2TransportStreamMultiplexor::doGetNextFrame() { + if (fInputBufferBytesUsed >= fInputBufferSize) { + // No more bytes are available from the current buffer. + // Arrange to read a new one. + awaitNewBuffer(fInputBuffer); + return; + } + + do { + // Periodically return a Program Association Table packet instead: + if (fOutgoingPacketCounter++ % PAT_PERIOD == 0) { + deliverPATPacket(); + break; + } + + // Periodically (or when we see a new PID) return a Program Map Table instead: + Boolean programMapHasChanged = fPIDState[fCurrentPID].counter == 0 + || fCurrentInputProgramMapVersion != fPreviousInputProgramMapVersion; + if (fOutgoingPacketCounter % PMT_PERIOD == 0 || programMapHasChanged) { + if (programMapHasChanged) { // reset values for next time: + fPIDState[fCurrentPID].counter = 1; + fPreviousInputProgramMapVersion = fCurrentInputProgramMapVersion; + } + deliverPMTPacket(programMapHasChanged); + break; + } + + // Normal case: Deliver (or continue delivering) the recently-read data: + deliverDataToClient(fCurrentPID, fInputBuffer, fInputBufferSize, + fInputBufferBytesUsed); + } while (0); + + // NEED TO SET fPresentationTime, durationInMicroseconds ##### + // Complete the delivery to the client: + if ((fOutgoingPacketCounter%10) == 0) { + // To avoid excessive recursion (and stack overflow) caused by excessively large input frames, + // occasionally return to the event loop to do this: + envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc*)FramedSource::afterGetting, this); + } else { + afterGetting(this); + } +} + +void MPEG2TransportStreamMultiplexor +::handleNewBuffer(unsigned char* buffer, unsigned bufferSize, + int mpegVersion, MPEG1or2Demux::SCR scr, int16_t PID) { + if (bufferSize < 4) return; + fInputBuffer = buffer; + fInputBufferSize = bufferSize; + fInputBufferBytesUsed = 0; + + u_int8_t stream_id = fInputBuffer[3]; + // Use "stream_id" directly as our PID. + // Also, figure out the Program Map 'stream type' from this. + if (stream_id == 0xBE) { // padding_stream; ignore + fInputBufferSize = 0; + } else if (stream_id == 0xBC) { // program_stream_map + setProgramStreamMap(fInputBufferSize); + fInputBufferSize = 0; // then, ignore the buffer + } else { + if (PID == -1) + fCurrentPID = stream_id; + else + fCurrentPID = (u_int8_t)PID; + + // Set the stream's type: + u_int8_t& streamType = fPIDState[fCurrentPID].streamType; // alias + + if (streamType == 0) { + // Instead, set the stream's type to default values, based on whether + // the stream is audio or video, and whether it's MPEG-1 or MPEG-2: + if ((stream_id&0xF0) == 0xE0) { // video + streamType = mpegVersion == 1 ? 1 : mpegVersion == 2 ? 2 : mpegVersion == 4 ? 0x10 : + mpegVersion == 5/*H.264*/ ? 0x1B : 0x24/*assume H.265*/; + } else if ((stream_id&0xE0) == 0xC0) { // audio + streamType = mpegVersion == 1 ? 3 : mpegVersion == 2 ? 4 : 0xF; + } else if (stream_id == 0xBD) { // private_stream1 (usually AC-3) + streamType = 0x06; // for DVB; for ATSC, use 0x81 + } else { // something else, e.g., AC-3 uses private_stream1 (0xBD) + streamType = 0x81; // private + } + } + + if (fPCR_PID == 0) { // set it to this stream, if it's appropriate: + if ((!fHaveVideoStreams && (streamType == 3 || streamType == 4 || streamType == 0xF))/* audio stream */ || + (streamType == 1 || streamType == 2 || streamType == 0x10 || streamType == 0x1B || streamType == 0x24)/* video stream */) { + fPCR_PID = fCurrentPID; // use this stream's SCR for PCR + } + } + if (fCurrentPID == fPCR_PID) { + // Record the input's current SCR timestamp, for use as our PCR: + fPCR = scr; + } + } + + // Now that we have new input data, retry the last delivery to the client: + doGetNextFrame(); +} + +void MPEG2TransportStreamMultiplexor +::deliverDataToClient(u_int8_t pid, unsigned char* buffer, unsigned bufferSize, + unsigned& startPositionInBuffer) { + // Construct a new Transport packet, and deliver it to the client: + if (fMaxSize < TRANSPORT_PACKET_SIZE) { + fFrameSize = 0; // the client hasn't given us enough space; deliver nothing + fNumTruncatedBytes = TRANSPORT_PACKET_SIZE; + } else { + fFrameSize = TRANSPORT_PACKET_SIZE; + Boolean willAddPCR = pid == fPCR_PID && startPositionInBuffer == 0 + && !(fPCR.highBit == 0 && fPCR.remainingBits == 0 && fPCR.extension == 0); + unsigned const numBytesAvailable = bufferSize - startPositionInBuffer; + unsigned numHeaderBytes = 4; // by default + unsigned numPCRBytes = 0; // by default + unsigned numPaddingBytes = 0; // by default + unsigned numDataBytes; + u_int8_t adaptation_field_control; + if (willAddPCR) { + adaptation_field_control = 0x30; + numHeaderBytes += 2; // for the "adaptation_field_length" and flags + numPCRBytes = 6; + if (numBytesAvailable >= TRANSPORT_PACKET_SIZE - numHeaderBytes - numPCRBytes) { + numDataBytes = TRANSPORT_PACKET_SIZE - numHeaderBytes - numPCRBytes; + } else { + numDataBytes = numBytesAvailable; + numPaddingBytes + = TRANSPORT_PACKET_SIZE - numHeaderBytes - numPCRBytes - numDataBytes; + } + } else if (numBytesAvailable >= TRANSPORT_PACKET_SIZE - numHeaderBytes) { + // This is the common case + adaptation_field_control = 0x10; + numDataBytes = TRANSPORT_PACKET_SIZE - numHeaderBytes; + } else { + adaptation_field_control = 0x30; + ++numHeaderBytes; // for the "adaptation_field_length" + // ASSERT: numBytesAvailable <= TRANSPORT_PACKET_SIZE - numHeaderBytes + numDataBytes = numBytesAvailable; + if (numDataBytes < TRANSPORT_PACKET_SIZE - numHeaderBytes) { + ++numHeaderBytes; // for the adaptation field flags + numPaddingBytes = TRANSPORT_PACKET_SIZE - numHeaderBytes - numDataBytes; + } + } + // ASSERT: numHeaderBytes+numPCRBytes+numPaddingBytes+numDataBytes + // == TRANSPORT_PACKET_SIZE + + // Fill in the header of the Transport Stream packet: + unsigned char* header = fTo; + *header++ = 0x47; // sync_byte + *header++ = (startPositionInBuffer == 0) ? 0x40 : 0x00; + // transport_error_indicator, payload_unit_start_indicator, transport_priority, + // first 5 bits of PID + *header++ = pid; + // last 8 bits of PID + unsigned& continuity_counter = fPIDState[pid].counter; // alias + *header++ = adaptation_field_control|(continuity_counter&0x0F); + // transport_scrambling_control, adaptation_field_control, continuity_counter + ++continuity_counter; + if (adaptation_field_control == 0x30) { + // Add an adaptation field: + u_int8_t adaptation_field_length + = (numHeaderBytes == 5) ? 0 : 1 + numPCRBytes + numPaddingBytes; + *header++ = adaptation_field_length; + if (numHeaderBytes > 5) { + u_int8_t flags = willAddPCR ? 0x10 : 0x00; + if (fIsFirstAdaptationField) { + flags |= 0x80; // discontinuity_indicator + fIsFirstAdaptationField = False; + } + *header++ = flags; + if (willAddPCR) { + u_int32_t pcrHigh32Bits = (fPCR.highBit<<31) | (fPCR.remainingBits>>1); + u_int8_t pcrLowBit = fPCR.remainingBits&1; + u_int8_t extHighBit = (fPCR.extension&0x100)>>8; + *header++ = pcrHigh32Bits>>24; + *header++ = pcrHigh32Bits>>16; + *header++ = pcrHigh32Bits>>8; + *header++ = pcrHigh32Bits; + *header++ = (pcrLowBit<<7)|0x7E|extHighBit; + *header++ = (u_int8_t)fPCR.extension; // low 8 bits of extension + } + } + } + + // Add any padding bytes: + for (unsigned i = 0; i < numPaddingBytes; ++i) *header++ = 0xFF; + + // Finally, add the data bytes: + memmove(header, &buffer[startPositionInBuffer], numDataBytes); + startPositionInBuffer += numDataBytes; + } +} + +#define PAT_PID 0 +#ifndef OUR_PROGRAM_NUMBER +#define OUR_PROGRAM_NUMBER 1 +#endif +#define OUR_PROGRAM_MAP_PID 0x30 + +void MPEG2TransportStreamMultiplexor::deliverPATPacket() { + // First, create a new buffer for the PAT packet: + unsigned const patSize = TRANSPORT_PACKET_SIZE - 4; // allow for the 4-byte header + unsigned char* patBuffer = new unsigned char[patSize]; + + // and fill it in: + unsigned char* pat = patBuffer; + *pat++ = 0; // pointer_field + *pat++ = 0; // table_id + *pat++ = 0xB0; // section_syntax_indicator; 0; reserved, section_length (high) + *pat++ = 13; // section_length (low) + *pat++ = 0; *pat++ = 1; // transport_stream_id + *pat++ = 0xC3; // reserved; version_number; current_next_indicator + *pat++ = 0; // section_number + *pat++ = 0; // last_section_number + *pat++ = OUR_PROGRAM_NUMBER>>8; *pat++ = OUR_PROGRAM_NUMBER; // program_number + *pat++ = 0xE0|(OUR_PROGRAM_MAP_PID>>8); // reserved; program_map_PID (high) + *pat++ = OUR_PROGRAM_MAP_PID; // program_map_PID (low) + + // Compute the CRC from the bytes we currently have (not including "pointer_field"): + u_int32_t crc = calculateCRC(patBuffer+1, pat - (patBuffer+1)); + *pat++ = crc>>24; *pat++ = crc>>16; *pat++ = crc>>8; *pat++ = crc; + + // Fill in the rest of the packet with padding bytes: + while (pat < &patBuffer[patSize]) *pat++ = 0xFF; + + // Deliver the packet: + unsigned startPosition = 0; + deliverDataToClient(PAT_PID, patBuffer, patSize, startPosition); + + // Finally, remove the new buffer: + delete[] patBuffer; +} + +void MPEG2TransportStreamMultiplexor::deliverPMTPacket(Boolean hasChanged) { + if (hasChanged) ++fProgramMapVersion; + + // First, create a new buffer for the PMT packet: + unsigned const pmtSize = TRANSPORT_PACKET_SIZE - 4; // allow for the 4-byte header + unsigned char* pmtBuffer = new unsigned char[pmtSize]; + + // and fill it in: + unsigned char* pmt = pmtBuffer; + *pmt++ = 0; // pointer_field + *pmt++ = 2; // table_id + *pmt++ = 0xB0; // section_syntax_indicator; 0; reserved, section_length (high) + unsigned char* section_lengthPtr = pmt; // save for later + *pmt++ = 0; // section_length (low) (fill in later) + *pmt++ = OUR_PROGRAM_NUMBER>>8; *pmt++ = OUR_PROGRAM_NUMBER; // program_number + *pmt++ = 0xC1|((fProgramMapVersion&0x1F)<<1); // reserved; version_number; current_next_indicator + *pmt++ = 0; // section_number + *pmt++ = 0; // last_section_number + *pmt++ = 0xE0; // reserved; PCR_PID (high) + *pmt++ = fPCR_PID; // PCR_PID (low) + *pmt++ = 0xF0; // reserved; program_info_length (high) + *pmt++ = 0; // program_info_length (low) + for (int pid = 0; pid < PID_TABLE_SIZE; ++pid) { + if (fPIDState[pid].streamType != 0) { + // This PID gets recorded in the table + *pmt++ = fPIDState[pid].streamType; + *pmt++ = 0xE0; // reserved; elementary_pid (high) + *pmt++ = pid; // elementary_pid (low) + *pmt++ = 0xF0; // reserved; ES_info_length (high) + *pmt++ = 0; // ES_info_length (low) + } + } + unsigned section_length = pmt - (section_lengthPtr+1) + 4 /*for CRC*/; + *section_lengthPtr = section_length; + + // Compute the CRC from the bytes we currently have (not including "pointer_field"): + u_int32_t crc = calculateCRC(pmtBuffer+1, pmt - (pmtBuffer+1)); + *pmt++ = crc>>24; *pmt++ = crc>>16; *pmt++ = crc>>8; *pmt++ = crc; + + // Fill in the rest of the packet with padding bytes: + while (pmt < &pmtBuffer[pmtSize]) *pmt++ = 0xFF; + + // Deliver the packet: + unsigned startPosition = 0; + deliverDataToClient(OUR_PROGRAM_MAP_PID, pmtBuffer, pmtSize, startPosition); + + // Finally, remove the new buffer: + delete[] pmtBuffer; +} + +void MPEG2TransportStreamMultiplexor::setProgramStreamMap(unsigned frameSize) { + if (frameSize <= 16) return; // program_stream_map is too small to be useful + if (frameSize > 0xFF) return; // program_stream_map is too large + + u_int16_t program_stream_map_length = (fInputBuffer[4]<<8) | fInputBuffer[5]; + if ((u_int16_t)frameSize > 6+program_stream_map_length) { + frameSize = 6+program_stream_map_length; + } + + u_int8_t versionByte = fInputBuffer[6]; + if ((versionByte&0x80) == 0) return; // "current_next_indicator" is not set + fCurrentInputProgramMapVersion = versionByte&0x1F; + + u_int16_t program_stream_info_length = (fInputBuffer[8]<<8) | fInputBuffer[9]; + unsigned offset = 10 + program_stream_info_length; // skip over 'descriptors' + + u_int16_t elementary_stream_map_length + = (fInputBuffer[offset]<<8) | fInputBuffer[offset+1]; + offset += 2; + frameSize -= 4; // sizeof CRC_32 + if (frameSize > offset + elementary_stream_map_length) { + frameSize = offset + elementary_stream_map_length; + } + + while (offset + 4 <= frameSize) { + u_int8_t stream_type = fInputBuffer[offset]; + u_int8_t elementary_stream_id = fInputBuffer[offset+1]; + + fPIDState[elementary_stream_id].streamType = stream_type; + + u_int16_t elementary_stream_info_length + = (fInputBuffer[offset+2]<<8) | fInputBuffer[offset+3]; + offset += 4 + elementary_stream_info_length; + } +} + +static u_int32_t const CRC32[256] = { + 0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9, + 0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005, + 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61, + 0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, + 0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9, + 0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75, + 0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011, + 0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd, + 0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039, + 0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5, + 0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81, + 0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d, + 0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49, + 0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95, + 0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1, + 0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d, + 0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae, + 0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072, + 0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16, + 0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca, + 0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde, + 0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02, + 0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066, + 0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba, + 0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e, + 0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692, + 0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6, + 0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a, + 0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e, + 0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2, + 0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686, + 0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a, + 0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637, + 0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb, + 0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f, + 0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53, + 0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47, + 0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b, + 0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff, + 0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623, + 0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7, + 0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b, + 0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f, + 0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3, + 0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7, + 0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b, + 0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f, + 0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3, + 0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640, + 0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c, + 0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8, + 0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24, + 0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30, + 0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec, + 0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088, + 0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654, + 0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0, + 0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c, + 0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18, + 0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4, + 0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0, + 0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c, + 0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668, + 0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4 +}; + +u_int32_t calculateCRC(u_int8_t const* data, unsigned dataLength, u_int32_t initialValue) { + u_int32_t crc = initialValue; + + while (dataLength-- > 0) { + crc = (crc<<8) ^ CRC32[(crc>>24) ^ (u_int32_t)(*data++)]; + } + + return crc; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamTrickModeFilter.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamTrickModeFilter.cpp new file mode 100644 index 0000000..f0a718e --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG2TransportStreamTrickModeFilter.cpp @@ -0,0 +1,266 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that converts a MPEG Transport Stream file - with corresponding index file +// - to a corresponding Video Elementary Stream. It also uses a "scale" parameter +// to implement 'trick mode' (fast forward or reverse play, using I-frames) on +// the video stream. +// Implementation + +#include "MPEG2TransportStreamTrickModeFilter.hh" +#include + +// Define the following to be True if we want the output file to have the same frame rate as the original file. +// (Because the output file contains I-frames only, this means that each I-frame will appear in the output file +// several times, and therefore the output file's bitrate will be significantly higher than that of the original.) +// Define the following to be False if we want the output file to include each I-frame no more than once. +// (This means that - except for high 'scale' values - both the output frame rate and the output bit rate +// will be less than that of the original.) +#define KEEP_ORIGINAL_FRAME_RATE False + +MPEG2TransportStreamTrickModeFilter* MPEG2TransportStreamTrickModeFilter +::createNew(UsageEnvironment& env, FramedSource* inputSource, + MPEG2TransportStreamIndexFile* indexFile, int scale) { + return new MPEG2TransportStreamTrickModeFilter(env, inputSource, indexFile, scale); +} + +MPEG2TransportStreamTrickModeFilter +::MPEG2TransportStreamTrickModeFilter(UsageEnvironment& env, FramedSource* inputSource, + MPEG2TransportStreamIndexFile* indexFile, int scale) + : FramedFilter(env, inputSource), + fHaveStarted(False), fIndexFile(indexFile), fScale(scale), fDirection(1), + fState(SKIPPING_FRAME), fFrameCount(0), + fNextIndexRecordNum(0), fNextTSPacketNum(0), + fCurrentTSPacketNum((unsigned long)(-1)), fUseSavedFrameNextTime(False) { + if (fScale < 0) { // reverse play + fScale = -fScale; + fDirection = -1; + } +} + +MPEG2TransportStreamTrickModeFilter::~MPEG2TransportStreamTrickModeFilter() { +} + +Boolean MPEG2TransportStreamTrickModeFilter::seekTo(unsigned long tsPacketNumber, + unsigned long indexRecordNumber) { + seekToTransportPacket(tsPacketNumber); + fNextIndexRecordNum = indexRecordNumber; + return True; +} + +#define isIFrameStart(type) ((type) == 0x81/*actually, a VSH*/ || (type) == 0x85/*actually, a SPS, for H.264*/ || (type) == 0x8B/*actually, a VPS, for H.265*/) + // This relies upon I-frames always being preceded by a VSH+GOP (for MPEG-2 data), + // by a SPS (for H.264 data), or by a VPS (for H.265 data) +#define isNonIFrameStart(type) ((type) == 0x83 || (type) == 0x88/*for H.264*/ || (type) == 0x8E/*for H.265*/) + +void MPEG2TransportStreamTrickModeFilter::doGetNextFrame() { + // fprintf(stderr, "#####DGNF1\n"); + // If our client's buffer size is too small, then deliver + // a 0-byte 'frame', to tell it to process all of the data that it has + // already read, before asking for more data from us: + if (fMaxSize < TRANSPORT_PACKET_SIZE) { + fFrameSize = 0; + afterGetting(this); + return; + } + + while (1) { + // Get the next record from our index file. + // This tells us the type of frame this data is, which Transport Stream packet + // (from the input source) the data comes from, and where in the Transport Stream + // packet it comes from: + u_int8_t recordType; + float recordPCR; + Boolean endOfIndexFile = False; + if (!fIndexFile->readIndexRecordValues(fNextIndexRecordNum, + fDesiredTSPacketNum, fDesiredDataOffset, + fDesiredDataSize, recordPCR, + recordType)) { + // We ran off the end of the index file. If we're not delivering a + // pre-saved frame, then handle this the same way as if the + // input Transport Stream source ended. + if (fState != DELIVERING_SAVED_FRAME) { + onSourceClosure1(); + return; + } + endOfIndexFile = True; + } else if (!fHaveStarted) { + fFirstPCR = recordPCR; + fHaveStarted = True; + } + // fprintf(stderr, "#####read index record %ld: ts %ld: %c, PCR %f\n", fNextIndexRecordNum, fDesiredTSPacketNum, isIFrameStart(recordType) ? 'I' : isNonIFrameStart(recordType) ? 'j' : 'x', recordPCR); + fNextIndexRecordNum + += (fState == DELIVERING_SAVED_FRAME) ? 1 : fDirection; + + // Handle this index record, depending on the record type and our current state: + switch (fState) { + case SKIPPING_FRAME: + case SAVING_AND_DELIVERING_FRAME: { + // if (fState == SKIPPING_FRAME) fprintf(stderr, "\tSKIPPING_FRAME\n"); else fprintf(stderr, "\tSAVING_AND_DELIVERING_FRAME\n");//##### + if (isIFrameStart(recordType)) { + // Save a record of this frame: + fSavedFrameIndexRecordStart = fNextIndexRecordNum - fDirection; + fUseSavedFrameNextTime = True; + // fprintf(stderr, "\trecording\n");//##### + if ((fFrameCount++)%fScale == 0 && fUseSavedFrameNextTime) { + // A frame is due now. + fFrameCount = 1; // reset to avoid overflow + if (fDirection > 0) { + // Begin delivering this frame, as we're scanning it: + fState = SAVING_AND_DELIVERING_FRAME; + // fprintf(stderr, "\tdelivering\n");//##### + fDesiredDataPCR = recordPCR; // use this frame's PCR + attemptDeliveryToClient(); + return; + } else { + // Deliver this frame, then resume normal scanning: + // (This relies on the index records having begun with an I-frame.) + fState = DELIVERING_SAVED_FRAME; + fSavedSequentialIndexRecordNum = fNextIndexRecordNum; + fDesiredDataPCR = recordPCR; + // use this frame's (not the saved frame's) PCR + fNextIndexRecordNum = fSavedFrameIndexRecordStart; + // fprintf(stderr, "\tbeginning delivery of saved frame\n");//##### + } + } else { + // No frame is needed now: + fState = SKIPPING_FRAME; + } + } else if (isNonIFrameStart(recordType)) { + if ((fFrameCount++)%fScale == 0 && fUseSavedFrameNextTime) { + // A frame is due now, so begin delivering the one that we had saved: + // (This relies on the index records having begun with an I-frame.) + fFrameCount = 1; // reset to avoid overflow + fState = DELIVERING_SAVED_FRAME; + fSavedSequentialIndexRecordNum = fNextIndexRecordNum; + fDesiredDataPCR = recordPCR; + // use this frame's (not the saved frame's) PCR + fNextIndexRecordNum = fSavedFrameIndexRecordStart; + // fprintf(stderr, "\tbeginning delivery of saved frame\n");//##### + } else { + // No frame is needed now: + fState = SKIPPING_FRAME; + } + } else { + // Not the start of a frame, but deliver it, if it's needed: + if (fState == SAVING_AND_DELIVERING_FRAME) { + // fprintf(stderr, "\tdelivering\n");//##### + fDesiredDataPCR = recordPCR; // use this frame's PCR + attemptDeliveryToClient(); + return; + } + } + break; + } + case DELIVERING_SAVED_FRAME: { + // fprintf(stderr, "\tDELIVERING_SAVED_FRAME\n");//##### + if (endOfIndexFile + || (isIFrameStart(recordType) + && fNextIndexRecordNum-1 != fSavedFrameIndexRecordStart) + || isNonIFrameStart(recordType)) { + // fprintf(stderr, "\tended delivery of saved frame\n");//##### + // We've reached the end of the saved frame, so revert to the + // original sequence of index records: + fNextIndexRecordNum = fSavedSequentialIndexRecordNum; + fUseSavedFrameNextTime = KEEP_ORIGINAL_FRAME_RATE; + fState = SKIPPING_FRAME; + } else { + // Continue delivering: + // fprintf(stderr, "\tdelivering\n");//##### + attemptDeliveryToClient(); + return; + } + break; + } + } + } +} + +void MPEG2TransportStreamTrickModeFilter::doStopGettingFrames() { + FramedFilter::doStopGettingFrames(); + fIndexFile->stopReading(); +} + +void MPEG2TransportStreamTrickModeFilter::attemptDeliveryToClient() { + if (fCurrentTSPacketNum == fDesiredTSPacketNum) { + // fprintf(stderr, "\t\tdelivering ts %d:%d, %d bytes, PCR %f\n", fCurrentTSPacketNum, fDesiredDataOffset, fDesiredDataSize, fDesiredDataPCR);//##### + // We already have the Transport Packet that we want. Deliver its data: + memmove(fTo, &fInputBuffer[fDesiredDataOffset], fDesiredDataSize); + fFrameSize = fDesiredDataSize; + float deliveryPCR = fDirection*(fDesiredDataPCR - fFirstPCR)/fScale; + if (deliveryPCR < 0.0) deliveryPCR = 0.0; + fPresentationTime.tv_sec = (unsigned long)deliveryPCR; + fPresentationTime.tv_usec + = (unsigned long)((deliveryPCR - fPresentationTime.tv_sec)*1000000.0f); + // fprintf(stderr, "#####DGNF9\n"); + + afterGetting(this); + } else { + // Arrange to read the Transport Packet that we want: + readTransportPacket(fDesiredTSPacketNum); + } +} + +void MPEG2TransportStreamTrickModeFilter::seekToTransportPacket(unsigned long tsPacketNum) { + if (tsPacketNum == fNextTSPacketNum) return; // we're already there + + ByteStreamFileSource* tsFile = (ByteStreamFileSource*)fInputSource; + u_int64_t tsPacketNum64 = (u_int64_t)tsPacketNum; + tsFile->seekToByteAbsolute(tsPacketNum64*TRANSPORT_PACKET_SIZE); + + fNextTSPacketNum = tsPacketNum; +} + +void MPEG2TransportStreamTrickModeFilter::readTransportPacket(unsigned long tsPacketNum) { + seekToTransportPacket(tsPacketNum); + fInputSource->getNextFrame(fInputBuffer, TRANSPORT_PACKET_SIZE, + afterGettingFrame, this, + onSourceClosure, this); +} + +void MPEG2TransportStreamTrickModeFilter +::afterGettingFrame(void* clientData, unsigned frameSize, + unsigned /*numTruncatedBytes*/, + struct timeval presentationTime, + unsigned /*durationInMicroseconds*/) { + MPEG2TransportStreamTrickModeFilter* filter = (MPEG2TransportStreamTrickModeFilter*)clientData; + filter->afterGettingFrame1(frameSize); +} + +void MPEG2TransportStreamTrickModeFilter::afterGettingFrame1(unsigned frameSize) { + if (frameSize != TRANSPORT_PACKET_SIZE) { + // Treat this as if the input source ended: + onSourceClosure1(); + return; + } + + fCurrentTSPacketNum = fNextTSPacketNum; // i.e., the one that we just read + ++fNextTSPacketNum; + + // Attempt deliver again: + attemptDeliveryToClient(); +} + +void MPEG2TransportStreamTrickModeFilter::onSourceClosure(void* clientData) { + MPEG2TransportStreamTrickModeFilter* filter = (MPEG2TransportStreamTrickModeFilter*)clientData; + filter->onSourceClosure1(); +} + +void MPEG2TransportStreamTrickModeFilter::onSourceClosure1() { + fIndexFile->stopReading(); + handleClosure(); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG2TransportUDPServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG2TransportUDPServerMediaSubsession.cpp new file mode 100644 index 0000000..5c82b54 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG2TransportUDPServerMediaSubsession.cpp @@ -0,0 +1,75 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from an incoming UDP (or RTP/UDP) MPEG-2 Transport Stream +// Implementation + +#include "MPEG2TransportUDPServerMediaSubsession.hh" +#include "BasicUDPSource.hh" +#include "SimpleRTPSource.hh" +#include "MPEG2TransportStreamFramer.hh" +#include "SimpleRTPSink.hh" +#include "GroupsockHelper.hh" + + +MPEG2TransportUDPServerMediaSubsession* +MPEG2TransportUDPServerMediaSubsession::createNew(UsageEnvironment& env, + char const* inputAddressStr, Port const& inputPort, Boolean inputStreamIsRawUDP) { + return new MPEG2TransportUDPServerMediaSubsession(env, inputAddressStr, inputPort, inputStreamIsRawUDP); +} + +MPEG2TransportUDPServerMediaSubsession +::MPEG2TransportUDPServerMediaSubsession(UsageEnvironment& env, + char const* inputAddressStr, Port const& inputPort, Boolean inputStreamIsRawUDP) + : OnDemandServerMediaSubsession(env, True/*reuseFirstSource*/), + fInputPort(inputPort), fInputGroupsock(NULL), fInputStreamIsRawUDP(inputStreamIsRawUDP) { + fInputAddressStr = strDup(inputAddressStr); +} + +MPEG2TransportUDPServerMediaSubsession:: +~MPEG2TransportUDPServerMediaSubsession() { + delete fInputGroupsock; + delete[] (char*)fInputAddressStr; +} + +FramedSource* MPEG2TransportUDPServerMediaSubsession +::createNewStreamSource(unsigned/* clientSessionId*/, unsigned& estBitrate) { + estBitrate = 5000; // kbps, estimate + + if (fInputGroupsock == NULL) { + // Create a 'groupsock' object for receiving the input stream: + struct in_addr inputAddress; + inputAddress.s_addr = fInputAddressStr == NULL ? 0 : our_inet_addr(fInputAddressStr); + fInputGroupsock = new Groupsock(envir(), inputAddress, fInputPort, 255); + } + + FramedSource* transportStreamSource; + if (fInputStreamIsRawUDP) { + transportStreamSource = BasicUDPSource::createNew(envir(), fInputGroupsock); + } else { + transportStreamSource = SimpleRTPSource::createNew(envir(), fInputGroupsock, 33, 90000, "video/MP2T", 0, False /*no 'M' bit*/); + } + return MPEG2TransportStreamFramer::createNew(envir(), transportStreamSource); +} + +RTPSink* MPEG2TransportUDPServerMediaSubsession +::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char /*rtpPayloadTypeIfDynamic*/, FramedSource* /*inputSource*/) { + return SimpleRTPSink::createNew(envir(), rtpGroupsock, + 33, 90000, "video", "MP2T", + 1, True, False /*no 'M' bit*/); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG4ESVideoRTPSink.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG4ESVideoRTPSink.cpp new file mode 100644 index 0000000..9f49863 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG4ESVideoRTPSink.cpp @@ -0,0 +1,142 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for MPEG-4 Elementary Stream video (RFC 3016) +// Implementation + +#include "MPEG4ESVideoRTPSink.hh" +#include "MPEG4VideoStreamFramer.hh" +#include "MPEG4LATMAudioRTPSource.hh" // for "parseGeneralConfigStr()" + +MPEG4ESVideoRTPSink +::MPEG4ESVideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, u_int32_t rtpTimestampFrequency, + u_int8_t profileAndLevelIndication, char const* configStr) + : VideoRTPSink(env, RTPgs, rtpPayloadFormat, rtpTimestampFrequency, "MP4V-ES"), + fVOPIsPresent(False), fProfileAndLevelIndication(profileAndLevelIndication), fFmtpSDPLine(NULL) { + fConfigBytes = parseGeneralConfigStr(configStr, fNumConfigBytes); +} + +MPEG4ESVideoRTPSink::~MPEG4ESVideoRTPSink() { + delete[] fFmtpSDPLine; + delete[] fConfigBytes; +} + +MPEG4ESVideoRTPSink* +MPEG4ESVideoRTPSink::createNew(UsageEnvironment& env, + Groupsock* RTPgs, unsigned char rtpPayloadFormat, + u_int32_t rtpTimestampFrequency) { + return new MPEG4ESVideoRTPSink(env, RTPgs, rtpPayloadFormat, rtpTimestampFrequency); +} + +MPEG4ESVideoRTPSink* +MPEG4ESVideoRTPSink::createNew(UsageEnvironment& env, + Groupsock* RTPgs, unsigned char rtpPayloadFormat, u_int32_t rtpTimestampFrequency, + u_int8_t profileAndLevelIndication, char const* configStr) { + return new MPEG4ESVideoRTPSink(env, RTPgs, rtpPayloadFormat, rtpTimestampFrequency, profileAndLevelIndication, configStr); +} + +Boolean MPEG4ESVideoRTPSink::sourceIsCompatibleWithUs(MediaSource& source) { + // Our source must be an appropriate framer: + return source.isMPEG4VideoStreamFramer(); +} + +#define VOP_START_CODE 0x000001B6 + +void MPEG4ESVideoRTPSink +::doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes) { + if (fragmentationOffset == 0) { + // Begin by inspecting the 4-byte code at the start of the frame: + if (numBytesInFrame < 4) return; // shouldn't happen + unsigned startCode = (frameStart[0]<<24) | (frameStart[1]<<16) + | (frameStart[2]<<8) | frameStart[3]; + + fVOPIsPresent = startCode == VOP_START_CODE; + } + + // Set the RTP 'M' (marker) bit iff this frame ends a VOP + // (and there are no fragments remaining). + // This relies on the source being a "MPEG4VideoStreamFramer". + MPEG4VideoStreamFramer* framerSource = (MPEG4VideoStreamFramer*)fSource; + if (framerSource != NULL && framerSource->pictureEndMarker() + && numRemainingBytes == 0) { + setMarkerBit(); + framerSource->pictureEndMarker() = False; + } + + // Also set the RTP timestamp. (We do this for each frame + // in the packet, to ensure that the timestamp of the VOP (if present) + // gets used.) + setTimestamp(framePresentationTime); +} + +Boolean MPEG4ESVideoRTPSink::allowFragmentationAfterStart() const { + return True; +} + +Boolean MPEG4ESVideoRTPSink +::frameCanAppearAfterPacketStart(unsigned char const* /*frameStart*/, + unsigned /*numBytesInFrame*/) const { + // Once we've packed a VOP into the packet, then no other + // frame can be packed into it: + return !fVOPIsPresent; +} + +char const* MPEG4ESVideoRTPSink::auxSDPLine() { + // Generate a new "a=fmtp:" line each time, using our own 'configuration' information (if we have it), + // otherwise parameters from our framer source (in case they've changed since the last time that + // we were called): + unsigned configLength = fNumConfigBytes; + unsigned char* config = fConfigBytes; + if (fProfileAndLevelIndication == 0 || config == NULL) { + // We need to get this information from our framer source: + MPEG4VideoStreamFramer* framerSource = (MPEG4VideoStreamFramer*)fSource; + if (framerSource == NULL) return NULL; // we don't yet have a source + + fProfileAndLevelIndication = framerSource->profile_and_level_indication(); + if (fProfileAndLevelIndication == 0) return NULL; // our source isn't ready + + config = framerSource->getConfigBytes(configLength); + if (config == NULL) return NULL; // our source isn't ready + } + + char const* fmtpFmt = + "a=fmtp:%d " + "profile-level-id=%d;" + "config="; + unsigned fmtpFmtSize = strlen(fmtpFmt) + + 3 /* max char len */ + + 3 /* max char len */ + + 2*configLength /* 2*, because each byte prints as 2 chars */ + + 2 /* trailing \r\n */; + char* fmtp = new char[fmtpFmtSize]; + sprintf(fmtp, fmtpFmt, rtpPayloadType(), fProfileAndLevelIndication); + char* endPtr = &fmtp[strlen(fmtp)]; + for (unsigned i = 0; i < configLength; ++i) { + sprintf(endPtr, "%02X", config[i]); + endPtr += 2; + } + sprintf(endPtr, "\r\n"); + + delete[] fFmtpSDPLine; + fFmtpSDPLine = strDup(fmtp); + delete[] fmtp; + return fFmtpSDPLine; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG4ESVideoRTPSource.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG4ESVideoRTPSource.cpp new file mode 100644 index 0000000..07d6915 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG4ESVideoRTPSource.cpp @@ -0,0 +1,65 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MP4V-ES video RTP stream sources +// Implementation + +#include "MPEG4ESVideoRTPSource.hh" + +///////// MPEG4ESVideoRTPSource implementation //////// + +//##### NOTE: INCOMPLETE!!! ##### + +MPEG4ESVideoRTPSource* +MPEG4ESVideoRTPSource::createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) { + return new MPEG4ESVideoRTPSource(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency); +} + +MPEG4ESVideoRTPSource +::MPEG4ESVideoRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) + : MultiFramedRTPSource(env, RTPgs, + rtpPayloadFormat, rtpTimestampFrequency) { +} + +MPEG4ESVideoRTPSource::~MPEG4ESVideoRTPSource() { +} + +Boolean MPEG4ESVideoRTPSource +::processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize) { + // The packet begins a frame iff its data begins with a system code + // (i.e., 0x000001??) + fCurrentPacketBeginsFrame + = packet->dataSize() >= 4 && (packet->data())[0] == 0 + && (packet->data())[1] == 0 && (packet->data())[2] == 1; + + // The RTP "M" (marker) bit indicates the last fragment of a frame: + fCurrentPacketCompletesFrame = packet->rtpMarkerBit(); + + // There is no special header + resultSpecialHeaderSize = 0; + return True; +} + +char const* MPEG4ESVideoRTPSource::MIMEtype() const { + return "video/MP4V-ES"; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG4GenericRTPSink.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG4GenericRTPSink.cpp new file mode 100644 index 0000000..f1de0f3 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG4GenericRTPSink.cpp @@ -0,0 +1,142 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MPEG4-GENERIC ("audio", "video", or "application") RTP stream sinks +// Implementation + +#include "MPEG4GenericRTPSink.hh" +#include "Locale.hh" +#include // needed on some systems to define "tolower()" + +MPEG4GenericRTPSink +::MPEG4GenericRTPSink(UsageEnvironment& env, Groupsock* RTPgs, + u_int8_t rtpPayloadFormat, + u_int32_t rtpTimestampFrequency, + char const* sdpMediaTypeString, + char const* mpeg4Mode, char const* configString, + unsigned numChannels) + : MultiFramedRTPSink(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency, "MPEG4-GENERIC", numChannels), + fSDPMediaTypeString(strDup(sdpMediaTypeString)), + fMPEG4Mode(strDup(mpeg4Mode)), fConfigString(strDup(configString)) { + // Check whether "mpeg4Mode" is one that we handle: + if (mpeg4Mode == NULL) { + env << "MPEG4GenericRTPSink error: NULL \"mpeg4Mode\" parameter\n"; + } else { + // To ease comparison, convert "mpeg4Mode" to lower case: + size_t const len = strlen(mpeg4Mode) + 1; + char* m = new char[len]; + + Locale l("POSIX"); + for (size_t i = 0; i < len; ++i) m[i] = tolower(mpeg4Mode[i]); + + if (strcmp(m, "aac-hbr") != 0) { + env << "MPEG4GenericRTPSink error: Unknown \"mpeg4Mode\" parameter: \"" << mpeg4Mode << "\"\n"; + } + delete[] m; + } + + // Set up the "a=fmtp:" SDP line for this stream: + char const* fmtpFmt = + "a=fmtp:%d " + "streamtype=%d;profile-level-id=1;" + "mode=%s;sizelength=13;indexlength=3;indexdeltalength=3;" + "config=%s\r\n"; + unsigned fmtpFmtSize = strlen(fmtpFmt) + + 3 /* max char len */ + + 3 /* max char len */ + + strlen(fMPEG4Mode) + + strlen(fConfigString); + char* fmtp = new char[fmtpFmtSize]; + sprintf(fmtp, fmtpFmt, + rtpPayloadType(), + strcmp(fSDPMediaTypeString, "video") == 0 ? 4 : 5, + fMPEG4Mode, + fConfigString); + fFmtpSDPLine = strDup(fmtp); + delete[] fmtp; +} + +MPEG4GenericRTPSink::~MPEG4GenericRTPSink() { + delete[] fFmtpSDPLine; + delete[] (char*)fConfigString; + delete[] (char*)fMPEG4Mode; + delete[] (char*)fSDPMediaTypeString; +} + +MPEG4GenericRTPSink* +MPEG4GenericRTPSink::createNew(UsageEnvironment& env, Groupsock* RTPgs, + u_int8_t rtpPayloadFormat, + u_int32_t rtpTimestampFrequency, + char const* sdpMediaTypeString, + char const* mpeg4Mode, + char const* configString, unsigned numChannels) { + return new MPEG4GenericRTPSink(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency, + sdpMediaTypeString, mpeg4Mode, + configString, numChannels); +} + +Boolean MPEG4GenericRTPSink +::frameCanAppearAfterPacketStart(unsigned char const* /*frameStart*/, + unsigned /*numBytesInFrame*/) const { + // (For now) allow at most 1 frame in a single packet: + return False; +} + +void MPEG4GenericRTPSink +::doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes) { + // Set the "AU Header Section". This is 4 bytes: 2 bytes for the + // initial "AU-headers-length" field, and 2 bytes for the first + // (and only) "AU Header": + unsigned fullFrameSize + = fragmentationOffset + numBytesInFrame + numRemainingBytes; + unsigned char headers[4]; + headers[0] = 0; headers[1] = 16 /* bits */; // AU-headers-length + headers[2] = fullFrameSize >> 5; headers[3] = (fullFrameSize&0x1F)<<3; + + setSpecialHeaderBytes(headers, sizeof headers); + + if (numRemainingBytes == 0) { + // This packet contains the last (or only) fragment of the frame. + // Set the RTP 'M' ('marker') bit: + setMarkerBit(); + } + + // Important: Also call our base class's doSpecialFrameHandling(), + // to set the packet's timestamp: + MultiFramedRTPSink::doSpecialFrameHandling(fragmentationOffset, + frameStart, numBytesInFrame, + framePresentationTime, + numRemainingBytes); +} + +unsigned MPEG4GenericRTPSink::specialHeaderSize() const { + return 2 + 2; +} + +char const* MPEG4GenericRTPSink::sdpMediaType() const { + return fSDPMediaTypeString; +} + +char const* MPEG4GenericRTPSink::auxSDPLine() { + return fFmtpSDPLine; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG4GenericRTPSource.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG4GenericRTPSource.cpp new file mode 100644 index 0000000..55e19aa --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG4GenericRTPSource.cpp @@ -0,0 +1,234 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MPEG4-GENERIC ("audio", "video", or "application") RTP stream sources +// Implementation + +#include "MPEG4GenericRTPSource.hh" +#include "BitVector.hh" +#include "MPEG4LATMAudioRTPSource.hh" // for parseGeneralConfigStr() + +////////// MPEG4GenericBufferedPacket and MPEG4GenericBufferedPacketFactory + +class MPEG4GenericBufferedPacket: public BufferedPacket { +public: + MPEG4GenericBufferedPacket(MPEG4GenericRTPSource* ourSource); + virtual ~MPEG4GenericBufferedPacket(); + +private: // redefined virtual functions + virtual unsigned nextEnclosedFrameSize(unsigned char*& framePtr, + unsigned dataSize); +private: + MPEG4GenericRTPSource* fOurSource; +}; + +class MPEG4GenericBufferedPacketFactory: public BufferedPacketFactory { +private: // redefined virtual functions + virtual BufferedPacket* createNewPacket(MultiFramedRTPSource* ourSource); +}; + + +////////// AUHeader ////////// +struct AUHeader { + unsigned size; + unsigned index; // indexDelta for the 2nd & subsequent headers +}; + + +///////// MPEG4GenericRTPSource implementation //////// + +//##### NOTE: INCOMPLETE!!! Support more modes, and interleaving ##### + +MPEG4GenericRTPSource* +MPEG4GenericRTPSource::createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency, + char const* mediumName, + char const* mode, + unsigned sizeLength, unsigned indexLength, + unsigned indexDeltaLength + ) { + return new MPEG4GenericRTPSource(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency, mediumName, + mode, sizeLength, indexLength, + indexDeltaLength + ); +} + +MPEG4GenericRTPSource +::MPEG4GenericRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency, + char const* mediumName, + char const* mode, + unsigned sizeLength, unsigned indexLength, + unsigned indexDeltaLength + ) + : MultiFramedRTPSource(env, RTPgs, + rtpPayloadFormat, rtpTimestampFrequency, + new MPEG4GenericBufferedPacketFactory), + fSizeLength(sizeLength), fIndexLength(indexLength), + fIndexDeltaLength(indexDeltaLength), + fNumAUHeaders(0), fNextAUHeader(0), fAUHeaders(NULL) { + unsigned mimeTypeLength = + strlen(mediumName) + 14 /* strlen("/MPEG4-GENERIC") */ + 1; + fMIMEType = new char[mimeTypeLength]; + if (fMIMEType != NULL) { + sprintf(fMIMEType, "%s/MPEG4-GENERIC", mediumName); + } + + fMode = strDup(mode); + // Check for a "mode" that we don't yet support: //##### + if (mode == NULL || + (strcmp(mode, "aac-hbr") != 0 && strcmp(mode, "generic") != 0)) { + envir() << "MPEG4GenericRTPSource Warning: Unknown or unsupported \"mode\": " + << mode << "\n"; + } +} + +MPEG4GenericRTPSource::~MPEG4GenericRTPSource() { + delete[] fAUHeaders; + delete[] fMode; + delete[] fMIMEType; +} + +Boolean MPEG4GenericRTPSource +::processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize) { + unsigned char* headerStart = packet->data(); + unsigned packetSize = packet->dataSize(); + + fCurrentPacketBeginsFrame = fCurrentPacketCompletesFrame; + // whether the *previous* packet ended a frame + + // The RTP "M" (marker) bit indicates the last fragment of a frame: + fCurrentPacketCompletesFrame = packet->rtpMarkerBit(); + + // default values: + resultSpecialHeaderSize = 0; + fNumAUHeaders = 0; + fNextAUHeader = 0; + delete[] fAUHeaders; fAUHeaders = NULL; + + if (fSizeLength > 0) { + // The packet begins with a "AU Header Section". Parse it, to + // determine the "AU-header"s for each frame present in this packet: + resultSpecialHeaderSize += 2; + if (packetSize < resultSpecialHeaderSize) return False; + + unsigned AU_headers_length = (headerStart[0]<<8)|headerStart[1]; + unsigned AU_headers_length_bytes = (AU_headers_length+7)/8; + if (packetSize + < resultSpecialHeaderSize + AU_headers_length_bytes) return False; + resultSpecialHeaderSize += AU_headers_length_bytes; + + // Figure out how many AU-headers are present in the packet: + int bitsAvail = AU_headers_length - (fSizeLength + fIndexLength); + if (bitsAvail >= 0 && (fSizeLength + fIndexDeltaLength) > 0) { + fNumAUHeaders = 1 + bitsAvail/(fSizeLength + fIndexDeltaLength); + } + if (fNumAUHeaders > 0) { + fAUHeaders = new AUHeader[fNumAUHeaders]; + // Fill in each header: + BitVector bv(&headerStart[2], 0, AU_headers_length); + fAUHeaders[0].size = bv.getBits(fSizeLength); + fAUHeaders[0].index = bv.getBits(fIndexLength); + + for (unsigned i = 1; i < fNumAUHeaders; ++i) { + fAUHeaders[i].size = bv.getBits(fSizeLength); + fAUHeaders[i].index = bv.getBits(fIndexDeltaLength); + } + } + + } + + return True; +} + +char const* MPEG4GenericRTPSource::MIMEtype() const { + return fMIMEType; +} + + +////////// MPEG4GenericBufferedPacket +////////// and MPEG4GenericBufferedPacketFactory implementation + +MPEG4GenericBufferedPacket +::MPEG4GenericBufferedPacket(MPEG4GenericRTPSource* ourSource) + : fOurSource(ourSource) { +} + +MPEG4GenericBufferedPacket::~MPEG4GenericBufferedPacket() { +} + +unsigned MPEG4GenericBufferedPacket +::nextEnclosedFrameSize(unsigned char*& /*framePtr*/, unsigned dataSize) { + // WE CURRENTLY DON'T IMPLEMENT INTERLEAVING. FIX THIS! ##### + AUHeader* auHeader = fOurSource->fAUHeaders; + if (auHeader == NULL) return dataSize; + unsigned numAUHeaders = fOurSource->fNumAUHeaders; + + if (fOurSource->fNextAUHeader >= numAUHeaders) { + fOurSource->envir() << "MPEG4GenericBufferedPacket::nextEnclosedFrameSize(" + << dataSize << "): data error (" + << auHeader << "," << fOurSource->fNextAUHeader + << "," << numAUHeaders << ")!\n"; + return dataSize; + } + + auHeader = &auHeader[fOurSource->fNextAUHeader++]; + return auHeader->size <= dataSize ? auHeader->size : dataSize; +} + +BufferedPacket* MPEG4GenericBufferedPacketFactory +::createNewPacket(MultiFramedRTPSource* ourSource) { + return new MPEG4GenericBufferedPacket((MPEG4GenericRTPSource*)ourSource); +} + + +////////// samplingFrequencyFromAudioSpecificConfig() implementation ////////// + +static unsigned const samplingFrequencyFromIndex[16] = { + 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, + 16000, 12000, 11025, 8000, 7350, 0, 0, 0 +}; + +unsigned samplingFrequencyFromAudioSpecificConfig(char const* configStr) { + unsigned char* config = NULL; + unsigned result = 0; // if returned, indicates an error + + do { + // Begin by parsing the config string: + unsigned configSize; + config = parseGeneralConfigStr(configStr, configSize); + if (config == NULL) break; + + if (configSize < 2) break; + unsigned char samplingFrequencyIndex = ((config[0]&0x07)<<1) | (config[1]>>7); + if (samplingFrequencyIndex < 15) { + result = samplingFrequencyFromIndex[samplingFrequencyIndex]; + break; + } + + // Index == 15 means that the actual frequency is next (24 bits): + if (configSize < 5) break; + result = ((config[1]&0x7F)<<17) | (config[2]<<9) | (config[3]<<1) | (config[4]>>7); + } while (0); + + delete[] config; + return result; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG4LATMAudioRTPSink.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG4LATMAudioRTPSink.cpp new file mode 100644 index 0000000..efb8a24 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG4LATMAudioRTPSink.cpp @@ -0,0 +1,95 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for MPEG-4 audio, using LATM multiplexing (RFC 3016) +// Implementation + +#include "MPEG4LATMAudioRTPSink.hh" + +MPEG4LATMAudioRTPSink +::MPEG4LATMAudioRTPSink(UsageEnvironment& env, Groupsock* RTPgs, + u_int8_t rtpPayloadFormat, + u_int32_t rtpTimestampFrequency, + char const* streamMuxConfigString, + unsigned numChannels, + Boolean allowMultipleFramesPerPacket) + : AudioRTPSink(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency, "MP4A-LATM", numChannels), + fStreamMuxConfigString(strDup(streamMuxConfigString)), + fAllowMultipleFramesPerPacket(allowMultipleFramesPerPacket) { + // Set up the "a=fmtp:" SDP line for this stream: + char const* fmtpFmt = + "a=fmtp:%d " + "cpresent=0;config=%s\r\n"; + unsigned fmtpFmtSize = strlen(fmtpFmt) + + 3 /* max char len */ + + strlen(fStreamMuxConfigString); + char* fmtp = new char[fmtpFmtSize]; + sprintf(fmtp, fmtpFmt, + rtpPayloadType(), + fStreamMuxConfigString); + fFmtpSDPLine = strDup(fmtp); + delete[] fmtp; +} + +MPEG4LATMAudioRTPSink::~MPEG4LATMAudioRTPSink() { + delete[] fFmtpSDPLine; + delete[] (char*)fStreamMuxConfigString; +} + +MPEG4LATMAudioRTPSink* +MPEG4LATMAudioRTPSink::createNew(UsageEnvironment& env, Groupsock* RTPgs, + u_int8_t rtpPayloadFormat, + u_int32_t rtpTimestampFrequency, + char const* streamMuxConfigString, + unsigned numChannels, + Boolean allowMultipleFramesPerPacket) { + return new MPEG4LATMAudioRTPSink(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency, streamMuxConfigString, + numChannels, + allowMultipleFramesPerPacket); +} + +Boolean MPEG4LATMAudioRTPSink +::frameCanAppearAfterPacketStart(unsigned char const* /*frameStart*/, + unsigned /*numBytesInFrame*/) const { + return fAllowMultipleFramesPerPacket; +} + +void MPEG4LATMAudioRTPSink +::doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes) { + if (numRemainingBytes == 0) { + // This packet contains the last (or only) fragment of the frame. + // Set the RTP 'M' ('marker') bit: + setMarkerBit(); + } + + // Important: Also call our base class's doSpecialFrameHandling(), + // to set the packet's timestamp: + MultiFramedRTPSink::doSpecialFrameHandling(fragmentationOffset, + frameStart, numBytesInFrame, + framePresentationTime, + numRemainingBytes); +} + +char const* MPEG4LATMAudioRTPSink::auxSDPLine() { + return fFmtpSDPLine; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG4LATMAudioRTPSource.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG4LATMAudioRTPSource.cpp new file mode 100644 index 0000000..af4fdc3 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG4LATMAudioRTPSource.cpp @@ -0,0 +1,264 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MPEG-4 audio, using LATM multiplexing +// Implementation + +#include "MPEG4LATMAudioRTPSource.hh" + +////////// LATMBufferedPacket and LATMBufferedPacketFactory ////////// + +class LATMBufferedPacket: public BufferedPacket { +public: + LATMBufferedPacket(Boolean includeLATMDataLengthField); + virtual ~LATMBufferedPacket(); + +private: // redefined virtual functions + virtual unsigned nextEnclosedFrameSize(unsigned char*& framePtr, + unsigned dataSize); + +private: + Boolean fIncludeLATMDataLengthField; +}; + +class LATMBufferedPacketFactory: public BufferedPacketFactory { +private: // redefined virtual functions + virtual BufferedPacket* createNewPacket(MultiFramedRTPSource* ourSource); +}; + +///////// MPEG4LATMAudioRTPSource implementation //////// + +MPEG4LATMAudioRTPSource* +MPEG4LATMAudioRTPSource::createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) { + return new MPEG4LATMAudioRTPSource(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency); +} + +MPEG4LATMAudioRTPSource +::MPEG4LATMAudioRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) + : MultiFramedRTPSource(env, RTPgs, + rtpPayloadFormat, rtpTimestampFrequency, + new LATMBufferedPacketFactory), + fIncludeLATMDataLengthField(True) { +} + +MPEG4LATMAudioRTPSource::~MPEG4LATMAudioRTPSource() { +} + +void MPEG4LATMAudioRTPSource::omitLATMDataLengthField() { + fIncludeLATMDataLengthField = False; +} + +Boolean MPEG4LATMAudioRTPSource +::processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize) { + fCurrentPacketBeginsFrame = fCurrentPacketCompletesFrame; + // whether the *previous* packet ended a frame + + // The RTP "M" (marker) bit indicates the last fragment of a frame: + fCurrentPacketCompletesFrame = packet->rtpMarkerBit(); + + // There is no special header + resultSpecialHeaderSize = 0; + return True; +} + +char const* MPEG4LATMAudioRTPSource::MIMEtype() const { + return "audio/MP4A-LATM"; +} + + +////////// LATMBufferedPacket and LATMBufferedPacketFactory implementation + +LATMBufferedPacket::LATMBufferedPacket(Boolean includeLATMDataLengthField) + : fIncludeLATMDataLengthField(includeLATMDataLengthField) { +} + +LATMBufferedPacket::~LATMBufferedPacket() { +} + +unsigned LATMBufferedPacket +::nextEnclosedFrameSize(unsigned char*& framePtr, unsigned dataSize) { + // Look at the LATM data length byte(s), to determine the size + // of the LATM payload. + unsigned resultFrameSize = 0; + unsigned i; + for (i = 0; i < dataSize; ++i) { + resultFrameSize += framePtr[i]; + if (framePtr[i] != 0xFF) break; + } + ++i; + if (fIncludeLATMDataLengthField) { + resultFrameSize += i; + } else { + framePtr += i; + dataSize -= i; + } + + return (resultFrameSize <= dataSize) ? resultFrameSize : dataSize; +} + +BufferedPacket* LATMBufferedPacketFactory +::createNewPacket(MultiFramedRTPSource* ourSource) { + MPEG4LATMAudioRTPSource* source = (MPEG4LATMAudioRTPSource*)ourSource; + return new LATMBufferedPacket(source->returnedFrameIncludesLATMDataLengthField()); +} + + +////////// parseStreamMuxConfigStr() implementation ////////// + +static Boolean getNibble(char const*& configStr, + unsigned char& resultNibble) { + char c = configStr[0]; + if (c == '\0') return False; // we've reached the end + + if (c >= '0' && c <= '9') { + resultNibble = c - '0'; + } else if (c >= 'A' && c <= 'F') { + resultNibble = 10 + c - 'A'; + } else if (c >= 'a' && c <= 'f') { + resultNibble = 10 + c - 'a'; + } else { + return False; + } + + ++configStr; // move to the next nibble + return True; +} + +static Boolean getByte(char const*& configStr, unsigned char& resultByte) { + resultByte = 0; // by default, in case parsing fails + + unsigned char firstNibble; + if (!getNibble(configStr, firstNibble)) return False; + resultByte = firstNibble<<4; + + unsigned char secondNibble = 0; + if (!getNibble(configStr, secondNibble) && configStr[0] != '\0') { + // There's a second nibble, but it's malformed + return False; + } + resultByte |= secondNibble; + + return True; +} + +Boolean +parseStreamMuxConfigStr(char const* configStr, + // result parameters: + Boolean& audioMuxVersion, + Boolean& allStreamsSameTimeFraming, + unsigned char& numSubFrames, + unsigned char& numProgram, + unsigned char& numLayer, + unsigned char*& audioSpecificConfig, + unsigned& audioSpecificConfigSize) { + // Set default versions of the result parameters: + audioMuxVersion = False; + allStreamsSameTimeFraming = True; + numSubFrames = numProgram = numLayer = 0; + audioSpecificConfig = NULL; + audioSpecificConfigSize = 0; + + do { + if (configStr == NULL) break; + + unsigned char nextByte; + + if (!getByte(configStr, nextByte)) break; + audioMuxVersion = (nextByte&0x80) != 0; + if (audioMuxVersion) break; + + allStreamsSameTimeFraming = ((nextByte&0x40)>>6) != 0; + numSubFrames = (nextByte&0x3F); + + if (!getByte(configStr, nextByte)) break; + numProgram = (nextByte&0xF0)>>4; + + numLayer = (nextByte&0x0E)>>1; + + // The one remaining bit, and the rest of the string, + // are used for "audioSpecificConfig": + unsigned char remainingBit = nextByte&1; + + unsigned ascSize = (strlen(configStr)+1)/2 + 1; + audioSpecificConfig = new unsigned char[ascSize]; + + Boolean parseSuccess; + unsigned i = 0; + do { + nextByte = 0; + parseSuccess = getByte(configStr, nextByte); + audioSpecificConfig[i++] = (remainingBit<<7)|((nextByte&0xFE)>>1); + remainingBit = nextByte&1; + } while (parseSuccess); + if (i != ascSize) break; // part of the remaining string was bad + + audioSpecificConfigSize = ascSize; + return True; // parsing succeeded + } while (0); + + delete[] audioSpecificConfig; + return False; // parsing failed +} + +unsigned char* parseStreamMuxConfigStr(char const* configStr, + // result parameter: + unsigned& audioSpecificConfigSize) { + Boolean audioMuxVersion, allStreamsSameTimeFraming; + unsigned char numSubFrames, numProgram, numLayer; + unsigned char* audioSpecificConfig; + + if (!parseStreamMuxConfigStr(configStr, + audioMuxVersion, allStreamsSameTimeFraming, + numSubFrames, numProgram, numLayer, + audioSpecificConfig, audioSpecificConfigSize)) { + audioSpecificConfigSize = 0; + return NULL; + } + + return audioSpecificConfig; +} + +unsigned char* parseGeneralConfigStr(char const* configStr, + // result parameter: + unsigned& configSize) { + unsigned char* config = NULL; + do { + if (configStr == NULL) break; + configSize = (strlen(configStr)+1)/2; + + config = new unsigned char[configSize]; + if (config == NULL) break; + + unsigned i; + for (i = 0; i < configSize; ++i) { + if (!getByte(configStr, config[i])) break; + } + if (i != configSize) break; // part of the string was bad + + return config; + } while (0); + + configSize = 0; + delete[] config; + return NULL; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG4VideoFileServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG4VideoFileServerMediaSubsession.cpp new file mode 100644 index 0000000..bb9edcf --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG4VideoFileServerMediaSubsession.cpp @@ -0,0 +1,125 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a MPEG-4 video file. +// Implementation + +#include "MPEG4VideoFileServerMediaSubsession.hh" +#include "MPEG4ESVideoRTPSink.hh" +#include "ByteStreamFileSource.hh" +#include "MPEG4VideoStreamFramer.hh" + +MPEG4VideoFileServerMediaSubsession* +MPEG4VideoFileServerMediaSubsession::createNew(UsageEnvironment& env, + char const* fileName, + Boolean reuseFirstSource) { + return new MPEG4VideoFileServerMediaSubsession(env, fileName, reuseFirstSource); +} + +MPEG4VideoFileServerMediaSubsession +::MPEG4VideoFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, Boolean reuseFirstSource) + : FileServerMediaSubsession(env, fileName, reuseFirstSource), + fAuxSDPLine(NULL), fDoneFlag(0), fDummyRTPSink(NULL) { +} + +MPEG4VideoFileServerMediaSubsession::~MPEG4VideoFileServerMediaSubsession() { + delete[] fAuxSDPLine; +} + +static void afterPlayingDummy(void* clientData) { + MPEG4VideoFileServerMediaSubsession* subsess + = (MPEG4VideoFileServerMediaSubsession*)clientData; + subsess->afterPlayingDummy1(); +} + +void MPEG4VideoFileServerMediaSubsession::afterPlayingDummy1() { + // Unschedule any pending 'checking' task: + envir().taskScheduler().unscheduleDelayedTask(nextTask()); + // Signal the event loop that we're done: + setDoneFlag(); +} + +static void checkForAuxSDPLine(void* clientData) { + MPEG4VideoFileServerMediaSubsession* subsess + = (MPEG4VideoFileServerMediaSubsession*)clientData; + subsess->checkForAuxSDPLine1(); +} + +void MPEG4VideoFileServerMediaSubsession::checkForAuxSDPLine1() { + char const* dasl; + + if (fAuxSDPLine != NULL) { + // Signal the event loop that we're done: + setDoneFlag(); + } else if (fDummyRTPSink != NULL && (dasl = fDummyRTPSink->auxSDPLine()) != NULL) { + fAuxSDPLine= strDup(dasl); + fDummyRTPSink = NULL; + + // Signal the event loop that we're done: + setDoneFlag(); + } else if (!fDoneFlag) { + // try again after a brief delay: + int uSecsToDelay = 100000; // 100 ms + nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay, + (TaskFunc*)checkForAuxSDPLine, this); + } +} + +char const* MPEG4VideoFileServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource) { + if (fAuxSDPLine != NULL) return fAuxSDPLine; // it's already been set up (for a previous client) + + if (fDummyRTPSink == NULL) { // we're not already setting it up for another, concurrent stream + // Note: For MPEG-4 video files, the 'config' information isn't known + // until we start reading the file. This means that "rtpSink"s + // "auxSDPLine()" will be NULL initially, and we need to start reading data from our file until this changes. + fDummyRTPSink = rtpSink; + + // Start reading the file: + fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this); + + // Check whether the sink's 'auxSDPLine()' is ready: + checkForAuxSDPLine(this); + } + + envir().taskScheduler().doEventLoop(&fDoneFlag); + + return fAuxSDPLine; +} + +FramedSource* MPEG4VideoFileServerMediaSubsession +::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) { + estBitrate = 500; // kbps, estimate + + // Create the video source: + ByteStreamFileSource* fileSource + = ByteStreamFileSource::createNew(envir(), fFileName); + if (fileSource == NULL) return NULL; + fFileSize = fileSource->fileSize(); + + // Create a framer for the Video Elementary Stream: + return MPEG4VideoStreamFramer::createNew(envir(), fileSource); +} + +RTPSink* MPEG4VideoFileServerMediaSubsession +::createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* /*inputSource*/) { + return MPEG4ESVideoRTPSink::createNew(envir(), rtpGroupsock, + rtpPayloadTypeIfDynamic); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG4VideoStreamDiscreteFramer.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG4VideoStreamDiscreteFramer.cpp new file mode 100644 index 0000000..ebb89c3 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG4VideoStreamDiscreteFramer.cpp @@ -0,0 +1,252 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A simplified version of "MPEG4VideoStreamFramer" that takes only complete, +// discrete frames (rather than an arbitrary byte stream) as input. +// This avoids the parsing and data copying overhead of the full +// "MPEG4VideoStreamFramer". +// Implementation + +#include "MPEG4VideoStreamDiscreteFramer.hh" + +MPEG4VideoStreamDiscreteFramer* +MPEG4VideoStreamDiscreteFramer::createNew(UsageEnvironment& env, + FramedSource* inputSource, Boolean leavePresentationTimesUnmodified) { + // Need to add source type checking here??? ##### + return new MPEG4VideoStreamDiscreteFramer(env, inputSource, leavePresentationTimesUnmodified); +} + +MPEG4VideoStreamDiscreteFramer +::MPEG4VideoStreamDiscreteFramer(UsageEnvironment& env, + FramedSource* inputSource, Boolean leavePresentationTimesUnmodified) + : MPEG4VideoStreamFramer(env, inputSource, False/*don't create a parser*/), + fLeavePresentationTimesUnmodified(leavePresentationTimesUnmodified), vop_time_increment_resolution(0), fNumVTIRBits(0), + fLastNonBFrameVop_time_increment(0) { + fLastNonBFramePresentationTime.tv_sec = 0; + fLastNonBFramePresentationTime.tv_usec = 0; +} + +MPEG4VideoStreamDiscreteFramer::~MPEG4VideoStreamDiscreteFramer() { +} + +void MPEG4VideoStreamDiscreteFramer::doGetNextFrame() { + // Arrange to read data (which should be a complete MPEG-4 video frame) + // from our data source, directly into the client's input buffer. + // After reading this, we'll do some parsing on the frame. + fInputSource->getNextFrame(fTo, fMaxSize, + afterGettingFrame, this, + FramedSource::handleClosure, this); +} + +void MPEG4VideoStreamDiscreteFramer +::afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + MPEG4VideoStreamDiscreteFramer* source = (MPEG4VideoStreamDiscreteFramer*)clientData; + source->afterGettingFrame1(frameSize, numTruncatedBytes, + presentationTime, durationInMicroseconds); +} + +void MPEG4VideoStreamDiscreteFramer +::afterGettingFrame1(unsigned frameSize, unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + // Check that the first 4 bytes are a system code: + if (frameSize >= 4 && fTo[0] == 0 && fTo[1] == 0 && fTo[2] == 1) { + fPictureEndMarker = True; // Assume that we have a complete 'picture' here + unsigned i = 3; + if (fTo[i] == 0xB0) { // VISUAL_OBJECT_SEQUENCE_START_CODE + // The next byte is the "profile_and_level_indication": + if (frameSize >= 5) fProfileAndLevelIndication = fTo[4]; + + // The start of this frame - up to the first GROUP_VOP_START_CODE + // or VOP_START_CODE - is stream configuration information. Save this: + for (i = 7; i < frameSize; ++i) { + if ((fTo[i] == 0xB3 /*GROUP_VOP_START_CODE*/ || + fTo[i] == 0xB6 /*VOP_START_CODE*/) + && fTo[i-1] == 1 && fTo[i-2] == 0 && fTo[i-3] == 0) { + break; // The configuration information ends here + } + } + fNumConfigBytes = i < frameSize ? i-3 : frameSize; + delete[] fConfigBytes; fConfigBytes = new unsigned char[fNumConfigBytes]; + for (unsigned j = 0; j < fNumConfigBytes; ++j) fConfigBytes[j] = fTo[j]; + + // This information (should) also contain a VOL header, which we need + // to analyze, to get "vop_time_increment_resolution" (which we need + // - along with "vop_time_increment" - in order to generate accurate + // presentation times for "B" frames). + analyzeVOLHeader(); + } + + if (i < frameSize) { + u_int8_t nextCode = fTo[i]; + + if (nextCode == 0xB3 /*GROUP_VOP_START_CODE*/) { + // Skip to the following VOP_START_CODE (if any): + for (i += 4; i < frameSize; ++i) { + if (fTo[i] == 0xB6 /*VOP_START_CODE*/ + && fTo[i-1] == 1 && fTo[i-2] == 0 && fTo[i-3] == 0) { + nextCode = fTo[i]; + break; + } + } + } + + if (nextCode == 0xB6 /*VOP_START_CODE*/ && i+5 < frameSize) { + ++i; + + // Get the "vop_coding_type" from the next byte: + u_int8_t nextByte = fTo[i++]; + u_int8_t vop_coding_type = nextByte>>6; + + // Next, get the "modulo_time_base" by counting the '1' bits that + // follow. We look at the next 32-bits only. + // This should be enough in most cases. + u_int32_t next4Bytes + = (fTo[i]<<24)|(fTo[i+1]<<16)|(fTo[i+2]<<8)|fTo[i+3]; + i += 4; + u_int32_t timeInfo = (nextByte<<(32-6))|(next4Bytes>>6); + unsigned modulo_time_base = 0; + u_int32_t mask = 0x80000000; + while ((timeInfo&mask) != 0) { + ++modulo_time_base; + mask >>= 1; + } + mask >>= 2; + + // Then, get the "vop_time_increment". + unsigned vop_time_increment = 0; + // First, make sure we have enough bits left for this: + if ((mask>>(fNumVTIRBits-1)) != 0) { + for (unsigned i = 0; i < fNumVTIRBits; ++i) { + vop_time_increment |= timeInfo&mask; + mask >>= 1; + } + while (mask != 0) { + vop_time_increment >>= 1; + mask >>= 1; + } + } + + // If this is a "B" frame, then we have to tweak "presentationTime": + if (!fLeavePresentationTimesUnmodified && vop_coding_type == 2/*B*/ + && (fLastNonBFramePresentationTime.tv_usec > 0 || + fLastNonBFramePresentationTime.tv_sec > 0)) { + int timeIncrement + = fLastNonBFrameVop_time_increment - vop_time_increment; + if (timeIncrement<0) timeIncrement += vop_time_increment_resolution; + unsigned const MILLION = 1000000; + double usIncrement = vop_time_increment_resolution == 0 ? 0.0 + : ((double)timeIncrement*MILLION)/vop_time_increment_resolution; + unsigned secondsToSubtract = (unsigned)(usIncrement/MILLION); + unsigned uSecondsToSubtract = ((unsigned)usIncrement)%MILLION; + + presentationTime = fLastNonBFramePresentationTime; + if ((unsigned)presentationTime.tv_usec < uSecondsToSubtract) { + presentationTime.tv_usec += MILLION; + if (presentationTime.tv_sec > 0) --presentationTime.tv_sec; + } + presentationTime.tv_usec -= uSecondsToSubtract; + if ((unsigned)presentationTime.tv_sec > secondsToSubtract) { + presentationTime.tv_sec -= secondsToSubtract; + } else { + presentationTime.tv_sec = presentationTime.tv_usec = 0; + } + } else { + fLastNonBFramePresentationTime = presentationTime; + fLastNonBFrameVop_time_increment = vop_time_increment; + } + } + } + } + + // Complete delivery to the client: + fFrameSize = frameSize; + fNumTruncatedBytes = numTruncatedBytes; + fPresentationTime = presentationTime; + fDurationInMicroseconds = durationInMicroseconds; + afterGetting(this); +} + +Boolean MPEG4VideoStreamDiscreteFramer::getNextFrameBit(u_int8_t& result) { + if (fNumBitsSeenSoFar/8 >= fNumConfigBytes) return False; + + u_int8_t nextByte = fConfigBytes[fNumBitsSeenSoFar/8]; + result = (nextByte>>(7-fNumBitsSeenSoFar%8))&1; + ++fNumBitsSeenSoFar; + return True; +} + +Boolean MPEG4VideoStreamDiscreteFramer::getNextFrameBits(unsigned numBits, + u_int32_t& result) { + result = 0; + for (unsigned i = 0; i < numBits; ++i) { + u_int8_t nextBit; + if (!getNextFrameBit(nextBit)) return False; + result = (result<<1)|nextBit; + } + return True; +} + +void MPEG4VideoStreamDiscreteFramer::analyzeVOLHeader() { + // Begin by moving to the VOL header: + unsigned i; + for (i = 3; i < fNumConfigBytes; ++i) { + if (fConfigBytes[i] >= 0x20 && fConfigBytes[i] <= 0x2F + && fConfigBytes[i-1] == 1 + && fConfigBytes[i-2] == 0 && fConfigBytes[i-3] == 0) { + ++i; + break; + } + } + + fNumBitsSeenSoFar = 8*i + 9; + do { + u_int8_t is_object_layer_identifier; + if (!getNextFrameBit(is_object_layer_identifier)) break; + if (is_object_layer_identifier) fNumBitsSeenSoFar += 7; + + u_int32_t aspect_ratio_info; + if (!getNextFrameBits(4, aspect_ratio_info)) break; + if (aspect_ratio_info == 15 /*extended_PAR*/) fNumBitsSeenSoFar += 16; + + u_int8_t vol_control_parameters; + if (!getNextFrameBit(vol_control_parameters)) break; + if (vol_control_parameters) { + fNumBitsSeenSoFar += 3; // chroma_format; low_delay + u_int8_t vbw_parameters; + if (!getNextFrameBit(vbw_parameters)) break; + if (vbw_parameters) fNumBitsSeenSoFar += 79; + } + + fNumBitsSeenSoFar += 2; // video_object_layer_shape + u_int8_t marker_bit; + if (!getNextFrameBit(marker_bit)) break; + if (marker_bit != 1) break; // sanity check + + if (!getNextFrameBits(16, vop_time_increment_resolution)) break; + if (vop_time_increment_resolution == 0) break; // shouldn't happen + + // Compute how many bits are necessary to represent this: + fNumVTIRBits = 0; + for (unsigned test = vop_time_increment_resolution; test>0; test /= 2) { + ++fNumVTIRBits; + } + } while (0); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEG4VideoStreamFramer.cpp b/AnyCore/lib_rtsp/liveMedia/MPEG4VideoStreamFramer.cpp new file mode 100644 index 0000000..700f64c --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEG4VideoStreamFramer.cpp @@ -0,0 +1,681 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up an MPEG-4 video elementary stream into +// frames for: +// - Visual Object Sequence (VS) Header + Visual Object (VO) Header +// + Video Object Layer (VOL) Header +// - Group of VOP (GOV) Header +// - VOP frame +// Implementation + +#include "MPEG4VideoStreamFramer.hh" +#include "MPEGVideoStreamParser.hh" +#include "MPEG4LATMAudioRTPSource.hh" // for "parseGeneralConfigStr()" +#include + +////////// MPEG4VideoStreamParser definition ////////// + +// An enum representing the current state of the parser: +enum MPEGParseState { + PARSING_VISUAL_OBJECT_SEQUENCE, + PARSING_VISUAL_OBJECT_SEQUENCE_SEEN_CODE, + PARSING_VISUAL_OBJECT, + PARSING_VIDEO_OBJECT_LAYER, + PARSING_GROUP_OF_VIDEO_OBJECT_PLANE, + PARSING_VIDEO_OBJECT_PLANE, + PARSING_VISUAL_OBJECT_SEQUENCE_END_CODE +}; + +class MPEG4VideoStreamParser: public MPEGVideoStreamParser { +public: + MPEG4VideoStreamParser(MPEG4VideoStreamFramer* usingSource, + FramedSource* inputSource); + virtual ~MPEG4VideoStreamParser(); + +private: // redefined virtual functions: + virtual void flushInput(); + virtual unsigned parse(); + +private: + MPEG4VideoStreamFramer* usingSource() { + return (MPEG4VideoStreamFramer*)fUsingSource; + } + void setParseState(MPEGParseState parseState); + + unsigned parseVisualObjectSequence(Boolean haveSeenStartCode = False); + unsigned parseVisualObject(); + unsigned parseVideoObjectLayer(); + unsigned parseGroupOfVideoObjectPlane(); + unsigned parseVideoObjectPlane(); + unsigned parseVisualObjectSequenceEndCode(); + + // These are used for parsing within an already-read frame: + Boolean getNextFrameBit(u_int8_t& result); + Boolean getNextFrameBits(unsigned numBits, u_int32_t& result); + + // Which are used by: + void analyzeVOLHeader(); + +private: + MPEGParseState fCurrentParseState; + unsigned fNumBitsSeenSoFar; // used by the getNextFrameBit*() routines + u_int32_t vop_time_increment_resolution; + unsigned fNumVTIRBits; + // # of bits needed to count to "vop_time_increment_resolution" + u_int8_t fixed_vop_rate; + unsigned fixed_vop_time_increment; // used if 'fixed_vop_rate' is set + unsigned fSecondsSinceLastTimeCode, fTotalTicksSinceLastTimeCode, fPrevNewTotalTicks; + unsigned fPrevPictureCountDelta; + Boolean fJustSawTimeCode; +}; + + +////////// MPEG4VideoStreamFramer implementation ////////// + +MPEG4VideoStreamFramer* +MPEG4VideoStreamFramer::createNew(UsageEnvironment& env, + FramedSource* inputSource) { + // Need to add source type checking here??? ##### + return new MPEG4VideoStreamFramer(env, inputSource); +} + +unsigned char* MPEG4VideoStreamFramer +::getConfigBytes(unsigned& numBytes) const { + numBytes = fNumConfigBytes; + return fConfigBytes; +} + +void MPEG4VideoStreamFramer +::setConfigInfo(u_int8_t profileAndLevelIndication, char const* configStr) { + fProfileAndLevelIndication = profileAndLevelIndication; + + delete[] fConfigBytes; + fConfigBytes = parseGeneralConfigStr(configStr, fNumConfigBytes); +} + +MPEG4VideoStreamFramer::MPEG4VideoStreamFramer(UsageEnvironment& env, + FramedSource* inputSource, + Boolean createParser) + : MPEGVideoStreamFramer(env, inputSource), + fProfileAndLevelIndication(0), + fConfigBytes(NULL), fNumConfigBytes(0), + fNewConfigBytes(NULL), fNumNewConfigBytes(0) { + fParser = createParser + ? new MPEG4VideoStreamParser(this, inputSource) + : NULL; +} + +MPEG4VideoStreamFramer::~MPEG4VideoStreamFramer() { + delete[] fConfigBytes; delete[] fNewConfigBytes; +} + +void MPEG4VideoStreamFramer::startNewConfig() { + delete[] fNewConfigBytes; fNewConfigBytes = NULL; + fNumNewConfigBytes = 0; +} + +void MPEG4VideoStreamFramer +::appendToNewConfig(unsigned char* newConfigBytes, unsigned numNewBytes) { + // Allocate a new block of memory for the new config bytes: + unsigned char* configNew + = new unsigned char[fNumNewConfigBytes + numNewBytes]; + + // Copy the old, then the new, config bytes there: + memmove(configNew, fNewConfigBytes, fNumNewConfigBytes); + memmove(&configNew[fNumNewConfigBytes], newConfigBytes, numNewBytes); + + delete[] fNewConfigBytes; fNewConfigBytes = configNew; + fNumNewConfigBytes += numNewBytes; +} + +void MPEG4VideoStreamFramer::completeNewConfig() { + delete[] fConfigBytes; fConfigBytes = fNewConfigBytes; + fNewConfigBytes = NULL; + fNumConfigBytes = fNumNewConfigBytes; + fNumNewConfigBytes = 0; +} + +Boolean MPEG4VideoStreamFramer::isMPEG4VideoStreamFramer() const { + return True; +} + +////////// MPEG4VideoStreamParser implementation ////////// + +MPEG4VideoStreamParser +::MPEG4VideoStreamParser(MPEG4VideoStreamFramer* usingSource, + FramedSource* inputSource) + : MPEGVideoStreamParser(usingSource, inputSource), + fCurrentParseState(PARSING_VISUAL_OBJECT_SEQUENCE), + vop_time_increment_resolution(0), fNumVTIRBits(0), + fixed_vop_rate(0), fixed_vop_time_increment(0), + fSecondsSinceLastTimeCode(0), fTotalTicksSinceLastTimeCode(0), + fPrevNewTotalTicks(0), fPrevPictureCountDelta(1), fJustSawTimeCode(False) { +} + +MPEG4VideoStreamParser::~MPEG4VideoStreamParser() { +} + +void MPEG4VideoStreamParser::setParseState(MPEGParseState parseState) { + fCurrentParseState = parseState; + MPEGVideoStreamParser::setParseState(); +} + +void MPEG4VideoStreamParser::flushInput() { + fSecondsSinceLastTimeCode = 0; + fTotalTicksSinceLastTimeCode = 0; + fPrevNewTotalTicks = 0; + fPrevPictureCountDelta = 1; + + StreamParser::flushInput(); + if (fCurrentParseState != PARSING_VISUAL_OBJECT_SEQUENCE) { + setParseState(PARSING_VISUAL_OBJECT_SEQUENCE); // later, change to GOV or VOP? ##### + } +} + + +unsigned MPEG4VideoStreamParser::parse() { + try { + switch (fCurrentParseState) { + case PARSING_VISUAL_OBJECT_SEQUENCE: { + return parseVisualObjectSequence(); + } + case PARSING_VISUAL_OBJECT_SEQUENCE_SEEN_CODE: { + return parseVisualObjectSequence(True); + } + case PARSING_VISUAL_OBJECT: { + return parseVisualObject(); + } + case PARSING_VIDEO_OBJECT_LAYER: { + return parseVideoObjectLayer(); + } + case PARSING_GROUP_OF_VIDEO_OBJECT_PLANE: { + return parseGroupOfVideoObjectPlane(); + } + case PARSING_VIDEO_OBJECT_PLANE: { + return parseVideoObjectPlane(); + } + case PARSING_VISUAL_OBJECT_SEQUENCE_END_CODE: { + return parseVisualObjectSequenceEndCode(); + } + default: { + return 0; // shouldn't happen + } + } + } catch (int /*e*/) { +#ifdef DEBUG + fprintf(stderr, "MPEG4VideoStreamParser::parse() EXCEPTION (This is normal behavior - *not* an error)\n"); +#endif + return 0; // the parsing got interrupted + } +} + +#define VISUAL_OBJECT_SEQUENCE_START_CODE 0x000001B0 +#define VISUAL_OBJECT_SEQUENCE_END_CODE 0x000001B1 +#define GROUP_VOP_START_CODE 0x000001B3 +#define VISUAL_OBJECT_START_CODE 0x000001B5 +#define VOP_START_CODE 0x000001B6 + +unsigned MPEG4VideoStreamParser +::parseVisualObjectSequence(Boolean haveSeenStartCode) { +#ifdef DEBUG + fprintf(stderr, "parsing VisualObjectSequence\n"); +#endif + usingSource()->startNewConfig(); + u_int32_t first4Bytes; + if (!haveSeenStartCode) { + while ((first4Bytes = test4Bytes()) != VISUAL_OBJECT_SEQUENCE_START_CODE) { +#ifdef DEBUG + fprintf(stderr, "ignoring non VS header: 0x%08x\n", first4Bytes); +#endif + get1Byte(); setParseState(PARSING_VISUAL_OBJECT_SEQUENCE); + // ensures we progress over bad data + } + first4Bytes = get4Bytes(); + } else { + // We've already seen the start code + first4Bytes = VISUAL_OBJECT_SEQUENCE_START_CODE; + } + save4Bytes(first4Bytes); + + // The next byte is the "profile_and_level_indication": + u_int8_t pali = get1Byte(); +#ifdef DEBUG + fprintf(stderr, "profile_and_level_indication: %02x\n", pali); +#endif + saveByte(pali); + usingSource()->fProfileAndLevelIndication = pali; + + // Now, copy all bytes that we see, up until we reach + // a VISUAL_OBJECT_START_CODE: + u_int32_t next4Bytes = get4Bytes(); + while (next4Bytes != VISUAL_OBJECT_START_CODE) { + saveToNextCode(next4Bytes); + } + + setParseState(PARSING_VISUAL_OBJECT); + + // Compute this frame's presentation time: + usingSource()->computePresentationTime(fTotalTicksSinceLastTimeCode); + + // This header forms part of the 'configuration' information: + usingSource()->appendToNewConfig(fStartOfFrame, curFrameSize()); + + return curFrameSize(); +} + +static inline Boolean isVideoObjectStartCode(u_int32_t code) { + return code >= 0x00000100 && code <= 0x0000011F; +} + +unsigned MPEG4VideoStreamParser::parseVisualObject() { +#ifdef DEBUG + fprintf(stderr, "parsing VisualObject\n"); +#endif + // Note that we've already read the VISUAL_OBJECT_START_CODE + save4Bytes(VISUAL_OBJECT_START_CODE); + + // Next, extract the "visual_object_type" from the next 1 or 2 bytes: + u_int8_t nextByte = get1Byte(); saveByte(nextByte); + Boolean is_visual_object_identifier = (nextByte&0x80) != 0; + u_int8_t visual_object_type; + if (is_visual_object_identifier) { +#ifdef DEBUG + fprintf(stderr, "visual_object_verid: 0x%x; visual_object_priority: 0x%x\n", (nextByte&0x78)>>3, (nextByte&0x07)); +#endif + nextByte = get1Byte(); saveByte(nextByte); + visual_object_type = (nextByte&0xF0)>>4; + } else { + visual_object_type = (nextByte&0x78)>>3; + } +#ifdef DEBUG + fprintf(stderr, "visual_object_type: 0x%x\n", visual_object_type); +#endif + // At present, we support only the "Video ID" "visual_object_type" (1) + if (visual_object_type != 1) { + usingSource()->envir() << "MPEG4VideoStreamParser::parseVisualObject(): Warning: We don't handle visual_object_type " << visual_object_type << "\n"; + } + + // Now, copy all bytes that we see, up until we reach + // a video_object_start_code + u_int32_t next4Bytes = get4Bytes(); + while (!isVideoObjectStartCode(next4Bytes)) { + saveToNextCode(next4Bytes); + } + save4Bytes(next4Bytes); +#ifdef DEBUG + fprintf(stderr, "saw a video_object_start_code: 0x%08x\n", next4Bytes); +#endif + + setParseState(PARSING_VIDEO_OBJECT_LAYER); + + // Compute this frame's presentation time: + usingSource()->computePresentationTime(fTotalTicksSinceLastTimeCode); + + // This header forms part of the 'configuration' information: + usingSource()->appendToNewConfig(fStartOfFrame, curFrameSize()); + + return curFrameSize(); +} + +static inline Boolean isVideoObjectLayerStartCode(u_int32_t code) { + return code >= 0x00000120 && code <= 0x0000012F; +} + +Boolean MPEG4VideoStreamParser::getNextFrameBit(u_int8_t& result) { + if (fNumBitsSeenSoFar/8 >= curFrameSize()) return False; + + u_int8_t nextByte = fStartOfFrame[fNumBitsSeenSoFar/8]; + result = (nextByte>>(7-fNumBitsSeenSoFar%8))&1; + ++fNumBitsSeenSoFar; + return True; +} + +Boolean MPEG4VideoStreamParser::getNextFrameBits(unsigned numBits, + u_int32_t& result) { + result = 0; + for (unsigned i = 0; i < numBits; ++i) { + u_int8_t nextBit; + if (!getNextFrameBit(nextBit)) return False; + result = (result<<1)|nextBit; + } + return True; +} + +void MPEG4VideoStreamParser::analyzeVOLHeader() { + // Extract timing information (in particular, + // "vop_time_increment_resolution") from the VOL Header: + fNumBitsSeenSoFar = 41; + do { + u_int8_t is_object_layer_identifier; + if (!getNextFrameBit(is_object_layer_identifier)) break; + if (is_object_layer_identifier) fNumBitsSeenSoFar += 7; + + u_int32_t aspect_ratio_info; + if (!getNextFrameBits(4, aspect_ratio_info)) break; + if (aspect_ratio_info == 15 /*extended_PAR*/) fNumBitsSeenSoFar += 16; + + u_int8_t vol_control_parameters; + if (!getNextFrameBit(vol_control_parameters)) break; + if (vol_control_parameters) { + fNumBitsSeenSoFar += 3; // chroma_format; low_delay + u_int8_t vbw_parameters; + if (!getNextFrameBit(vbw_parameters)) break; + if (vbw_parameters) fNumBitsSeenSoFar += 79; + } + + fNumBitsSeenSoFar += 2; // video_object_layer_shape + u_int8_t marker_bit; + if (!getNextFrameBit(marker_bit)) break; + if (marker_bit != 1) { // sanity check + usingSource()->envir() << "MPEG4VideoStreamParser::analyzeVOLHeader(): marker_bit 1 not set!\n"; + break; + } + + if (!getNextFrameBits(16, vop_time_increment_resolution)) break; +#ifdef DEBUG + fprintf(stderr, "vop_time_increment_resolution: %d\n", vop_time_increment_resolution); +#endif + if (vop_time_increment_resolution == 0) { + usingSource()->envir() << "MPEG4VideoStreamParser::analyzeVOLHeader(): vop_time_increment_resolution is zero!\n"; + break; + } + // Compute how many bits are necessary to represent this: + fNumVTIRBits = 0; + for (unsigned test = vop_time_increment_resolution; test>0; test /= 2) { + ++fNumVTIRBits; + } + + if (!getNextFrameBit(marker_bit)) break; + if (marker_bit != 1) { // sanity check + usingSource()->envir() << "MPEG4VideoStreamParser::analyzeVOLHeader(): marker_bit 2 not set!\n"; + break; + } + + if (!getNextFrameBit(fixed_vop_rate)) break; + if (fixed_vop_rate) { + // Get the following "fixed_vop_time_increment": + if (!getNextFrameBits(fNumVTIRBits, fixed_vop_time_increment)) break; +#ifdef DEBUG + fprintf(stderr, "fixed_vop_time_increment: %d\n", fixed_vop_time_increment); + if (fixed_vop_time_increment == 0) { + usingSource()->envir() << "MPEG4VideoStreamParser::analyzeVOLHeader(): fixed_vop_time_increment is zero!\n"; + } +#endif + } + // Use "vop_time_increment_resolution" as the 'frame rate' + // (really, 'tick rate'): + usingSource()->fFrameRate = (double)vop_time_increment_resolution; +#ifdef DEBUG + fprintf(stderr, "fixed_vop_rate: %d; 'frame' (really tick) rate: %f\n", fixed_vop_rate, usingSource()->fFrameRate); +#endif + + return; + } while (0); + + if (fNumBitsSeenSoFar/8 >= curFrameSize()) { + char errMsg[200]; + sprintf(errMsg, "Not enough bits in VOL header: %d/8 >= %d\n", fNumBitsSeenSoFar, curFrameSize()); + usingSource()->envir() << errMsg; + } +} + +unsigned MPEG4VideoStreamParser::parseVideoObjectLayer() { +#ifdef DEBUG + fprintf(stderr, "parsing VideoObjectLayer\n"); +#endif + // The first 4 bytes must be a "video_object_layer_start_code". + // If not, this is a 'short video header', which we currently + // don't support: + u_int32_t next4Bytes = get4Bytes(); + if (!isVideoObjectLayerStartCode(next4Bytes)) { + usingSource()->envir() << "MPEG4VideoStreamParser::parseVideoObjectLayer(): This appears to be a 'short video header', which we current don't support\n"; + } + + // Now, copy all bytes that we see, up until we reach + // a GROUP_VOP_START_CODE or a VOP_START_CODE: + do { + saveToNextCode(next4Bytes); + } while (next4Bytes != GROUP_VOP_START_CODE + && next4Bytes != VOP_START_CODE); + + analyzeVOLHeader(); + + setParseState((next4Bytes == GROUP_VOP_START_CODE) + ? PARSING_GROUP_OF_VIDEO_OBJECT_PLANE + : PARSING_VIDEO_OBJECT_PLANE); + + // Compute this frame's presentation time: + usingSource()->computePresentationTime(fTotalTicksSinceLastTimeCode); + + // This header ends the 'configuration' information: + usingSource()->appendToNewConfig(fStartOfFrame, curFrameSize()); + usingSource()->completeNewConfig(); + + return curFrameSize(); +} + +unsigned MPEG4VideoStreamParser::parseGroupOfVideoObjectPlane() { +#ifdef DEBUG + fprintf(stderr, "parsing GroupOfVideoObjectPlane\n"); +#endif + // Note that we've already read the GROUP_VOP_START_CODE + save4Bytes(GROUP_VOP_START_CODE); + + // Next, extract the (18-bit) time code from the next 3 bytes: + u_int8_t next3Bytes[3]; + getBytes(next3Bytes, 3); + saveByte(next3Bytes[0]);saveByte(next3Bytes[1]);saveByte(next3Bytes[2]); + unsigned time_code + = (next3Bytes[0]<<10)|(next3Bytes[1]<<2)|(next3Bytes[2]>>6); + unsigned time_code_hours = (time_code&0x0003E000)>>13; + unsigned time_code_minutes = (time_code&0x00001F80)>>7; +#if defined(DEBUG) || defined(DEBUG_TIMESTAMPS) + Boolean marker_bit = (time_code&0x00000040) != 0; +#endif + unsigned time_code_seconds = (time_code&0x0000003F); +#if defined(DEBUG) || defined(DEBUG_TIMESTAMPS) + fprintf(stderr, "time_code: 0x%05x, hours %d, minutes %d, marker_bit %d, seconds %d\n", time_code, time_code_hours, time_code_minutes, marker_bit, time_code_seconds); +#endif + fJustSawTimeCode = True; + + // Now, copy all bytes that we see, up until we reach a VOP_START_CODE: + u_int32_t next4Bytes = get4Bytes(); + while (next4Bytes != VOP_START_CODE) { + saveToNextCode(next4Bytes); + } + + // Compute this frame's presentation time: + usingSource()->computePresentationTime(fTotalTicksSinceLastTimeCode); + + // Record the time code: + usingSource()->setTimeCode(time_code_hours, time_code_minutes, + time_code_seconds, 0, 0); + // Note: Because the GOV header can appear anywhere (not just at a 1s point), we + // don't pass "fTotalTicksSinceLastTimeCode" as the "picturesSinceLastGOP" parameter. + fSecondsSinceLastTimeCode = 0; + if (fixed_vop_rate) fTotalTicksSinceLastTimeCode = 0; + + setParseState(PARSING_VIDEO_OBJECT_PLANE); + + return curFrameSize(); +} + +unsigned MPEG4VideoStreamParser::parseVideoObjectPlane() { +#ifdef DEBUG + fprintf(stderr, "#parsing VideoObjectPlane\n"); +#endif + // Note that we've already read the VOP_START_CODE + save4Bytes(VOP_START_CODE); + + // Get the "vop_coding_type" from the next byte: + u_int8_t nextByte = get1Byte(); saveByte(nextByte); + u_int8_t vop_coding_type = nextByte>>6; + + // Next, get the "modulo_time_base" by counting the '1' bits that follow. + // We look at the next 32-bits only. This should be enough in most cases. + u_int32_t next4Bytes = get4Bytes(); + u_int32_t timeInfo = (nextByte<<(32-6))|(next4Bytes>>6); + unsigned modulo_time_base = 0; + u_int32_t mask = 0x80000000; + while ((timeInfo&mask) != 0) { + ++modulo_time_base; + mask >>= 1; + } + mask >>= 1; + + // Check the following marker bit: + if ((timeInfo&mask) == 0) { + usingSource()->envir() << "MPEG4VideoStreamParser::parseVideoObjectPlane(): marker bit not set!\n"; + } + mask >>= 1; + + // Then, get the "vop_time_increment". + // First, make sure we have enough bits left for this: + if ((mask>>(fNumVTIRBits-1)) == 0) { + usingSource()->envir() << "MPEG4VideoStreamParser::parseVideoObjectPlane(): 32-bits are not enough to get \"vop_time_increment\"!\n"; + } + unsigned vop_time_increment = 0; + for (unsigned i = 0; i < fNumVTIRBits; ++i) { + vop_time_increment |= timeInfo&mask; + mask >>= 1; + } + while (mask != 0) { + vop_time_increment >>= 1; + mask >>= 1; + } +#ifdef DEBUG + fprintf(stderr, "vop_coding_type: %d(%c), modulo_time_base: %d, vop_time_increment: %d\n", vop_coding_type, "IPBS"[vop_coding_type], modulo_time_base, vop_time_increment); +#endif + + // Now, copy all bytes that we see, up until we reach a code of some sort: + saveToNextCode(next4Bytes); + + // Update our counters based on the frame timing information that we saw: + if (fixed_vop_time_increment > 0) { + // This is a 'fixed_vop_rate' stream. Use 'fixed_vop_time_increment': + usingSource()->fPictureCount += fixed_vop_time_increment; + if (vop_time_increment > 0 || modulo_time_base > 0) { + fTotalTicksSinceLastTimeCode += fixed_vop_time_increment; + // Note: "fSecondsSinceLastTimeCode" and "fPrevNewTotalTicks" are not used. + } + } else { + // Use 'vop_time_increment': + unsigned newTotalTicks + = (fSecondsSinceLastTimeCode + modulo_time_base)*vop_time_increment_resolution + + vop_time_increment; + if (newTotalTicks == fPrevNewTotalTicks && fPrevNewTotalTicks > 0) { + // This is apparently a buggy MPEG-4 video stream, because + // "vop_time_increment" did not change. Overcome this error, + // by pretending that it did change. +#ifdef DEBUG + fprintf(stderr, "Buggy MPEG-4 video stream: \"vop_time_increment\" did not change!\n"); +#endif + // The following assumes that we don't have 'B' frames. If we do, then TARFU! + usingSource()->fPictureCount += vop_time_increment; + fTotalTicksSinceLastTimeCode += vop_time_increment; + fSecondsSinceLastTimeCode += modulo_time_base; + } else { + if (newTotalTicks < fPrevNewTotalTicks && vop_coding_type != 2/*B*/ + && modulo_time_base == 0 && vop_time_increment == 0 && !fJustSawTimeCode) { + // This is another kind of buggy MPEG-4 video stream, in which + // "vop_time_increment" wraps around, but without + // "modulo_time_base" changing (or just having had a new time code). + // Overcome this by pretending that "vop_time_increment" *did* wrap around: +#ifdef DEBUG + fprintf(stderr, "Buggy MPEG-4 video stream: \"vop_time_increment\" wrapped around, but without \"modulo_time_base\" changing!\n"); +#endif + ++fSecondsSinceLastTimeCode; + newTotalTicks += vop_time_increment_resolution; + } + fPrevNewTotalTicks = newTotalTicks; + if (vop_coding_type != 2/*B*/) { + int pictureCountDelta = newTotalTicks - fTotalTicksSinceLastTimeCode; + if (pictureCountDelta <= 0) pictureCountDelta = fPrevPictureCountDelta; + // ensures that the picture count is always increasing + usingSource()->fPictureCount += pictureCountDelta; + fPrevPictureCountDelta = pictureCountDelta; + fTotalTicksSinceLastTimeCode = newTotalTicks; + fSecondsSinceLastTimeCode += modulo_time_base; + } + } + } + fJustSawTimeCode = False; // for next time + + // The next thing to parse depends on the code that we just saw, + // but we are assumed to have ended the current picture: + usingSource()->fPictureEndMarker = True; // HACK ##### + switch (next4Bytes) { + case VISUAL_OBJECT_SEQUENCE_END_CODE: { + setParseState(PARSING_VISUAL_OBJECT_SEQUENCE_END_CODE); + break; + } + case VISUAL_OBJECT_SEQUENCE_START_CODE: { + setParseState(PARSING_VISUAL_OBJECT_SEQUENCE_SEEN_CODE); + break; + } + case VISUAL_OBJECT_START_CODE: { + setParseState(PARSING_VISUAL_OBJECT); + break; + } + case GROUP_VOP_START_CODE: { + setParseState(PARSING_GROUP_OF_VIDEO_OBJECT_PLANE); + break; + } + case VOP_START_CODE: { + setParseState(PARSING_VIDEO_OBJECT_PLANE); + break; + } + default: { + if (isVideoObjectStartCode(next4Bytes)) { + setParseState(PARSING_VIDEO_OBJECT_LAYER); + } else if (isVideoObjectLayerStartCode(next4Bytes)){ + // copy all bytes that we see, up until we reach a VOP_START_CODE: + u_int32_t next4Bytes = get4Bytes(); + while (next4Bytes != VOP_START_CODE) { + saveToNextCode(next4Bytes); + } + setParseState(PARSING_VIDEO_OBJECT_PLANE); + } else { + usingSource()->envir() << "MPEG4VideoStreamParser::parseVideoObjectPlane(): Saw unexpected code " + << (void*)next4Bytes << "\n"; + setParseState(PARSING_VIDEO_OBJECT_PLANE); // the safest way to recover... + } + break; + } + } + + // Compute this frame's presentation time: + usingSource()->computePresentationTime(fTotalTicksSinceLastTimeCode); + + return curFrameSize(); +} + +unsigned MPEG4VideoStreamParser::parseVisualObjectSequenceEndCode() { +#ifdef DEBUG + fprintf(stderr, "parsing VISUAL_OBJECT_SEQUENCE_END_CODE\n"); +#endif + // Note that we've already read the VISUAL_OBJECT_SEQUENCE_END_CODE + save4Bytes(VISUAL_OBJECT_SEQUENCE_END_CODE); + + setParseState(PARSING_VISUAL_OBJECT_SEQUENCE); + + // Treat this as if we had ended a picture: + usingSource()->fPictureEndMarker = True; // HACK ##### + + return curFrameSize(); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEGVideoStreamFramer.cpp b/AnyCore/lib_rtsp/liveMedia/MPEGVideoStreamFramer.cpp new file mode 100644 index 0000000..58f5d4d --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEGVideoStreamFramer.cpp @@ -0,0 +1,180 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up an MPEG video elementary stream into +// headers and frames +// Implementation + +#include "MPEGVideoStreamParser.hh" +#include + +////////// TimeCode implementation ////////// + +TimeCode::TimeCode() + : days(0), hours(0), minutes(0), seconds(0), pictures(0) { +} + +TimeCode::~TimeCode() { +} + +int TimeCode::operator==(TimeCode const& arg2) { + return pictures == arg2.pictures && seconds == arg2.seconds + && minutes == arg2.minutes && hours == arg2.hours && days == arg2.days; +} + +////////// MPEGVideoStreamFramer implementation ////////// + +MPEGVideoStreamFramer::MPEGVideoStreamFramer(UsageEnvironment& env, + FramedSource* inputSource) + : FramedFilter(env, inputSource), + fFrameRate(0.0) /* until we learn otherwise */, + fParser(NULL) { + reset(); +} + +MPEGVideoStreamFramer::~MPEGVideoStreamFramer() { + delete fParser; +} + +void MPEGVideoStreamFramer::flushInput() { + reset(); + if (fParser != NULL) fParser->flushInput(); +} + +void MPEGVideoStreamFramer::reset() { + fPictureCount = 0; + fPictureEndMarker = False; + fPicturesAdjustment = 0; + fPictureTimeBase = 0.0; + fTcSecsBase = 0; + fHaveSeenFirstTimeCode = False; + + // Use the current wallclock time as the base 'presentation time': + gettimeofday(&fPresentationTimeBase, NULL); +} + +#ifdef DEBUG +static struct timeval firstPT; +#endif +void MPEGVideoStreamFramer +::computePresentationTime(unsigned numAdditionalPictures) { + // Computes "fPresentationTime" from the most recent GOP's + // time_code, along with the "numAdditionalPictures" parameter: + TimeCode& tc = fCurGOPTimeCode; + + unsigned tcSecs + = (((tc.days*24)+tc.hours)*60+tc.minutes)*60+tc.seconds - fTcSecsBase; + double pictureTime = fFrameRate == 0.0 ? 0.0 + : (tc.pictures + fPicturesAdjustment + numAdditionalPictures)/fFrameRate; + while (pictureTime < fPictureTimeBase) { // "if" should be enough, but just in case + if (tcSecs > 0) tcSecs -= 1; + pictureTime += 1.0; + } + pictureTime -= fPictureTimeBase; + if (pictureTime < 0.0) pictureTime = 0.0; // sanity check + unsigned pictureSeconds = (unsigned)pictureTime; + double pictureFractionOfSecond = pictureTime - (double)pictureSeconds; + + fPresentationTime = fPresentationTimeBase; + fPresentationTime.tv_sec += tcSecs + pictureSeconds; + fPresentationTime.tv_usec += (long)(pictureFractionOfSecond*1000000.0); + if (fPresentationTime.tv_usec >= 1000000) { + fPresentationTime.tv_usec -= 1000000; + ++fPresentationTime.tv_sec; + } +#ifdef DEBUG + if (firstPT.tv_sec == 0 && firstPT.tv_usec == 0) firstPT = fPresentationTime; + struct timeval diffPT; + diffPT.tv_sec = fPresentationTime.tv_sec - firstPT.tv_sec; + diffPT.tv_usec = fPresentationTime.tv_usec - firstPT.tv_usec; + if (fPresentationTime.tv_usec < firstPT.tv_usec) { + --diffPT.tv_sec; + diffPT.tv_usec += 1000000; + } + fprintf(stderr, "MPEGVideoStreamFramer::computePresentationTime(%d) -> %lu.%06ld [%lu.%06ld]\n", numAdditionalPictures, fPresentationTime.tv_sec, fPresentationTime.tv_usec, diffPT.tv_sec, diffPT.tv_usec); +#endif +} + +void MPEGVideoStreamFramer +::setTimeCode(unsigned hours, unsigned minutes, unsigned seconds, + unsigned pictures, unsigned picturesSinceLastGOP) { + TimeCode& tc = fCurGOPTimeCode; // abbrev + unsigned days = tc.days; + if (hours < tc.hours) { + // Assume that the 'day' has wrapped around: + ++days; + } + tc.days = days; + tc.hours = hours; + tc.minutes = minutes; + tc.seconds = seconds; + tc.pictures = pictures; + if (!fHaveSeenFirstTimeCode) { + fPictureTimeBase = fFrameRate == 0.0 ? 0.0 : tc.pictures/fFrameRate; + fTcSecsBase = (((tc.days*24)+tc.hours)*60+tc.minutes)*60+tc.seconds; + fHaveSeenFirstTimeCode = True; + } else if (fCurGOPTimeCode == fPrevGOPTimeCode) { + // The time code has not changed since last time. Adjust for this: + fPicturesAdjustment += picturesSinceLastGOP; + } else { + // Normal case: The time code changed since last time. + fPrevGOPTimeCode = tc; + fPicturesAdjustment = 0; + } +} + +void MPEGVideoStreamFramer::doGetNextFrame() { + fParser->registerReadInterest(fTo, fMaxSize); + continueReadProcessing(); +} + +void MPEGVideoStreamFramer +::continueReadProcessing(void* clientData, + unsigned char* /*ptr*/, unsigned /*size*/, + struct timeval /*presentationTime*/) { + MPEGVideoStreamFramer* framer = (MPEGVideoStreamFramer*)clientData; + framer->continueReadProcessing(); +} + +void MPEGVideoStreamFramer::continueReadProcessing() { + unsigned acquiredFrameSize = fParser->parse(); + if (acquiredFrameSize > 0) { + // We were able to acquire a frame from the input. + // It has already been copied to the reader's space. + fFrameSize = acquiredFrameSize; + fNumTruncatedBytes = fParser->numTruncatedBytes(); + + // "fPresentationTime" should have already been computed. + + // Compute "fDurationInMicroseconds" now: + fDurationInMicroseconds + = (fFrameRate == 0.0 || ((int)fPictureCount) < 0) ? 0 + : (unsigned)((fPictureCount*1000000)/fFrameRate); +#ifdef DEBUG + fprintf(stderr, "%d bytes @%u.%06d, fDurationInMicroseconds: %d ((%d*1000000)/%f)\n", acquiredFrameSize, fPresentationTime.tv_sec, fPresentationTime.tv_usec, fDurationInMicroseconds, fPictureCount, fFrameRate); +#endif + fPictureCount = 0; + + // Call our own 'after getting' function. Because we're not a 'leaf' + // source, we can call this directly, without risking infinite recursion. + afterGetting(this); + } else { + // We were unable to parse a complete frame from the input, because: + // - we had to read more data from the source stream, or + // - the source stream has ended. + } +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEGVideoStreamParser.cpp b/AnyCore/lib_rtsp/liveMedia/MPEGVideoStreamParser.cpp new file mode 100644 index 0000000..e0eda9f --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEGVideoStreamParser.cpp @@ -0,0 +1,45 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// An abstract parser for MPEG video streams +// Implementation + +#include "MPEGVideoStreamParser.hh" + +MPEGVideoStreamParser +::MPEGVideoStreamParser(MPEGVideoStreamFramer* usingSource, + FramedSource* inputSource) + : StreamParser(inputSource, FramedSource::handleClosure, usingSource, + &MPEGVideoStreamFramer::continueReadProcessing, usingSource), + fUsingSource(usingSource) { +} + +MPEGVideoStreamParser::~MPEGVideoStreamParser() { +} + +void MPEGVideoStreamParser::restoreSavedParserState() { + StreamParser::restoreSavedParserState(); + fTo = fSavedTo; + fNumTruncatedBytes = fSavedNumTruncatedBytes; +} + +void MPEGVideoStreamParser::registerReadInterest(unsigned char* to, + unsigned maxSize) { + fStartOfFrame = fTo = fSavedTo = to; + fLimit = to + maxSize; + fNumTruncatedBytes = fSavedNumTruncatedBytes = 0; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MPEGVideoStreamParser.hh b/AnyCore/lib_rtsp/liveMedia/MPEGVideoStreamParser.hh new file mode 100644 index 0000000..7e09098 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MPEGVideoStreamParser.hh @@ -0,0 +1,122 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// An abstract parser for MPEG video streams +// C++ header + +#ifndef _MPEG_VIDEO_STREAM_PARSER_HH +#define _MPEG_VIDEO_STREAM_PARSER_HH + +#ifndef _STREAM_PARSER_HH +#include "StreamParser.hh" +#endif +#ifndef _MPEG_VIDEO_STREAM_FRAMER_HH +#include "MPEGVideoStreamFramer.hh" +#endif + +////////// MPEGVideoStreamParser definition ////////// + +class MPEGVideoStreamParser: public StreamParser { +public: + MPEGVideoStreamParser(MPEGVideoStreamFramer* usingSource, + FramedSource* inputSource); + virtual ~MPEGVideoStreamParser(); + +public: + void registerReadInterest(unsigned char* to, unsigned maxSize); + + virtual unsigned parse() = 0; + // returns the size of the frame that was acquired, or 0 if none was + // The number of truncated bytes (if any) is given by: + unsigned numTruncatedBytes() const { return fNumTruncatedBytes; } + +protected: + void setParseState() { + fSavedTo = fTo; + fSavedNumTruncatedBytes = fNumTruncatedBytes; + saveParserState(); + } + + // Record "byte" in the current output frame: + void saveByte(u_int8_t byte) { + if (fTo >= fLimit) { // there's no space left + ++fNumTruncatedBytes; + return; + } + + *fTo++ = byte; + } + + void save4Bytes(u_int32_t word) { + if (fTo+4 > fLimit) { // there's no space left + fNumTruncatedBytes += 4; + return; + } + + *fTo++ = word>>24; *fTo++ = word>>16; *fTo++ = word>>8; *fTo++ = word; + } + + // Save data until we see a sync word (0x000001xx): + void saveToNextCode(u_int32_t& curWord) { + saveByte(curWord>>24); + curWord = (curWord<<8)|get1Byte(); + while ((curWord&0xFFFFFF00) != 0x00000100) { + if ((unsigned)(curWord&0xFF) > 1) { + // a sync word definitely doesn't begin anywhere in "curWord" + save4Bytes(curWord); + curWord = get4Bytes(); + } else { + // a sync word might begin in "curWord", although not at its start + saveByte(curWord>>24); + unsigned char newByte = get1Byte(); + curWord = (curWord<<8)|newByte; + } + } + } + + // Skip data until we see a sync word (0x000001xx): + void skipToNextCode(u_int32_t& curWord) { + curWord = (curWord<<8)|get1Byte(); + while ((curWord&0xFFFFFF00) != 0x00000100) { + if ((unsigned)(curWord&0xFF) > 1) { + // a sync word definitely doesn't begin anywhere in "curWord" + curWord = get4Bytes(); + } else { + // a sync word might begin in "curWord", although not at its start + unsigned char newByte = get1Byte(); + curWord = (curWord<<8)|newByte; + } + } + } + +protected: + MPEGVideoStreamFramer* fUsingSource; + + // state of the frame that's currently being read: + unsigned char* fStartOfFrame; + unsigned char* fTo; + unsigned char* fLimit; + unsigned fNumTruncatedBytes; + unsigned curFrameSize() { return fTo - fStartOfFrame; } + unsigned char* fSavedTo; + unsigned fSavedNumTruncatedBytes; + +private: // redefined virtual functions + virtual void restoreSavedParserState(); +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/Makefile b/AnyCore/lib_rtsp/liveMedia/Makefile new file mode 100644 index 0000000..a698985 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/Makefile @@ -0,0 +1,433 @@ +INCLUDES = -Iinclude -I../UsageEnvironment/include -I../groupsock/include +PREFIX = /usr/local +LIBDIR = $(PREFIX)/lib +##### Change the following for your environment: +#------------------------------------------------------------------------------------------------- +# +# Create BY zhang zhiwei 2014.07.03 +# sunfrank2012@gmail.com +# +#------------------------------------------------------------------------------------------------- +# NDK ROOT +ANDROID_NDK_ROOT=/cygdrive/c/Android/NDK/android-ndk-r8e/ +# For sysroot | arm-linux-androideabi-gcc can't find cygwin path +NDK_ROOT=c:/Android/NDK/android-ndk-r8e/ +# For x86 +PREBUILT=$(ANDROID_NDK_ROOT)/toolchains/arm-linux-androideabi-4.6/prebuilt/windows +# Fro x86_64 +#PREBUILT=$(ANDROID_NDK_ROOT)/toolchains/arm-linux-androideabi-4.6/prebuilt/windows-x86_64 +PLATFORM=android-14 + +CROSS_COMPILE= $(PREBUILT)/bin/arm-linux-androideabi- +COMPILE_OPTS = $(INCLUDES) -fPIC -DANDROID -std=c99 -mfpu=neon -mfloat-abi=softfp -I. -O2 -DXLOCALE_NOT_USED -DANDROID -DSOCKLEN_T=socklen_t -DNO_SSTREAM=1 -D_LARGEFILE_SOURCE=1 -D_FILE_OFFSET_BITS=64 -DNULL=0 --sysroot=${NDK_ROOT}/platforms/${PLATFORM}/arch-arm/ -I${ANDROID_NDK_ROOT}/platforms/${PLATFORM}/arch-arm/usr/include +C = c +C_COMPILER = $(CROSS_COMPILE)gcc +C_FLAGS = $(COMPILE_OPTS) +CPP = cpp +CPLUSPLUS_COMPILER = $(CROSS_COMPILE)g++ +CPLUSPLUS_FLAGS = $(COMPILE_OPTS) -Wall -DBSD=1 -fexceptions +OBJ = o +LINK = $(CROSS_COMPILE)g++ -o +LINK_OPTS = +CONSOLE_LINK_OPTS = $(LINK_OPTS) +LIBRARY_LINK = $(CROSS_COMPILE)ar cr +LIBRARY_LINK_OPTS = $(LINK_OPTS) +LIB_SUFFIX = a +LIBS_FOR_CONSOLE_APPLICATION =-lc -lm -lz -L./ -lsupc++ -fexceptions -L${ANDROID_NDK_ROOT}/platforms/${PLATFORM}/arch-arm/usr/lib -lstdc++ +LIBS_FOR_GUI_APPLICATION = +EXE = +##### End of variables to change + +NAME = libliveMedia +LIVEMEDIA_LIB = $(NAME).$(LIB_SUFFIX) +ALL = $(LIVEMEDIA_LIB) +all: $(ALL) + +.$(C).$(OBJ): + $(C_COMPILER) -c $(C_FLAGS) $< +.$(CPP).$(OBJ): + $(CPLUSPLUS_COMPILER) -c $(CPLUSPLUS_FLAGS) $< + +MP3_SOURCE_OBJS = MP3FileSource.$(OBJ) MP3Transcoder.$(OBJ) MP3ADU.$(OBJ) MP3ADUdescriptor.$(OBJ) MP3ADUinterleaving.$(OBJ) MP3ADUTranscoder.$(OBJ) MP3StreamState.$(OBJ) MP3Internals.$(OBJ) MP3InternalsHuffman.$(OBJ) MP3InternalsHuffmanTable.$(OBJ) MP3ADURTPSource.$(OBJ) +MPEG_SOURCE_OBJS = MPEG1or2Demux.$(OBJ) MPEG1or2DemuxedElementaryStream.$(OBJ) MPEGVideoStreamFramer.$(OBJ) MPEG1or2VideoStreamFramer.$(OBJ) MPEG1or2VideoStreamDiscreteFramer.$(OBJ) MPEG4VideoStreamFramer.$(OBJ) MPEG4VideoStreamDiscreteFramer.$(OBJ) H264or5VideoStreamFramer.$(OBJ) H264or5VideoStreamDiscreteFramer.$(OBJ) H264VideoStreamFramer.$(OBJ) H264VideoStreamDiscreteFramer.$(OBJ) H265VideoStreamFramer.$(OBJ) H265VideoStreamDiscreteFramer.$(OBJ) MPEGVideoStreamParser.$(OBJ) MPEG1or2AudioStreamFramer.$(OBJ) MPEG1or2AudioRTPSource.$(OBJ) MPEG4LATMAudioRTPSource.$(OBJ) MPEG4ESVideoRTPSource.$(OBJ) MPEG4GenericRTPSource.$(OBJ) $(MP3_SOURCE_OBJS) MPEG1or2VideoRTPSource.$(OBJ) MPEG2TransportStreamMultiplexor.$(OBJ) MPEG2TransportStreamFromPESSource.$(OBJ) MPEG2TransportStreamFromESSource.$(OBJ) MPEG2TransportStreamFramer.$(OBJ) ADTSAudioFileSource.$(OBJ) +H263_SOURCE_OBJS = H263plusVideoRTPSource.$(OBJ) H263plusVideoStreamFramer.$(OBJ) H263plusVideoStreamParser.$(OBJ) +AC3_SOURCE_OBJS = AC3AudioStreamFramer.$(OBJ) AC3AudioRTPSource.$(OBJ) +DV_SOURCE_OBJS = DVVideoStreamFramer.$(OBJ) DVVideoRTPSource.$(OBJ) +MP3_SINK_OBJS = MP3ADURTPSink.$(OBJ) +MPEG_SINK_OBJS = MPEG1or2AudioRTPSink.$(OBJ) $(MP3_SINK_OBJS) MPEG1or2VideoRTPSink.$(OBJ) MPEG4LATMAudioRTPSink.$(OBJ) MPEG4GenericRTPSink.$(OBJ) MPEG4ESVideoRTPSink.$(OBJ) +H263_SINK_OBJS = H263plusVideoRTPSink.$(OBJ) +H264_OR_5_SINK_OBJS = H264or5VideoRTPSink.$(OBJ) H264VideoRTPSink.$(OBJ) H265VideoRTPSink.$(OBJ) +DV_SINK_OBJS = DVVideoRTPSink.$(OBJ) +AC3_SINK_OBJS = AC3AudioRTPSink.$(OBJ) + +MISC_SOURCE_OBJS = MediaSource.$(OBJ) FramedSource.$(OBJ) FramedFileSource.$(OBJ) FramedFilter.$(OBJ) ByteStreamFileSource.$(OBJ) ByteStreamMultiFileSource.$(OBJ) ByteStreamMemoryBufferSource.$(OBJ) BasicUDPSource.$(OBJ) DeviceSource.$(OBJ) AudioInputDevice.$(OBJ) WAVAudioFileSource.$(OBJ) $(MPEG_SOURCE_OBJS) $(H263_SOURCE_OBJS) $(AC3_SOURCE_OBJS) $(DV_SOURCE_OBJS) JPEGVideoSource.$(OBJ) AMRAudioSource.$(OBJ) AMRAudioFileSource.$(OBJ) InputFile.$(OBJ) StreamReplicator.$(OBJ) +MISC_SINK_OBJS = MediaSink.$(OBJ) FileSink.$(OBJ) BasicUDPSink.$(OBJ) AMRAudioFileSink.$(OBJ) H264or5VideoFileSink.$(OBJ) H264VideoFileSink.$(OBJ) H265VideoFileSink.$(OBJ) OggFileSink.$(OBJ) $(MPEG_SINK_OBJS) $(H263_SINK_OBJS) $(H264_OR_5_SINK_OBJS) $(DV_SINK_OBJS) $(AC3_SINK_OBJS) VorbisAudioRTPSink.$(OBJ) TheoraVideoRTPSink.$(OBJ) VP8VideoRTPSink.$(OBJ) GSMAudioRTPSink.$(OBJ) JPEGVideoRTPSink.$(OBJ) SimpleRTPSink.$(OBJ) AMRAudioRTPSink.$(OBJ) T140TextRTPSink.$(OBJ) TCPStreamSink.$(OBJ) OutputFile.$(OBJ) +MISC_FILTER_OBJS = uLawAudioFilter.$(OBJ) +TRANSPORT_STREAM_TRICK_PLAY_OBJS = MPEG2IndexFromTransportStream.$(OBJ) MPEG2TransportStreamIndexFile.$(OBJ) MPEG2TransportStreamTrickModeFilter.$(OBJ) + +RTP_SOURCE_OBJS = RTPSource.$(OBJ) MultiFramedRTPSource.$(OBJ) SimpleRTPSource.$(OBJ) H261VideoRTPSource.$(OBJ) H264VideoRTPSource.$(OBJ) H265VideoRTPSource.$(OBJ) QCELPAudioRTPSource.$(OBJ) AMRAudioRTPSource.$(OBJ) JPEGVideoRTPSource.$(OBJ) VorbisAudioRTPSource.$(OBJ) TheoraVideoRTPSource.$(OBJ) VP8VideoRTPSource.$(OBJ) +RTP_SINK_OBJS = RTPSink.$(OBJ) MultiFramedRTPSink.$(OBJ) AudioRTPSink.$(OBJ) VideoRTPSink.$(OBJ) TextRTPSink.$(OBJ) +RTP_INTERFACE_OBJS = RTPInterface.$(OBJ) +RTP_OBJS = $(RTP_SOURCE_OBJS) $(RTP_SINK_OBJS) $(RTP_INTERFACE_OBJS) + +RTCP_OBJS = RTCP.$(OBJ) rtcp_from_spec.$(OBJ) +RTSP_OBJS = RTSPServer.$(OBJ) RTSPClient.$(OBJ) RTSPCommon.$(OBJ) RTSPServerSupportingHTTPStreaming.$(OBJ) RTSPRegisterSender.$(OBJ) +SIP_OBJS = SIPClient.$(OBJ) + +SESSION_OBJS = MediaSession.$(OBJ) ServerMediaSession.$(OBJ) PassiveServerMediaSubsession.$(OBJ) OnDemandServerMediaSubsession.$(OBJ) FileServerMediaSubsession.$(OBJ) MPEG4VideoFileServerMediaSubsession.$(OBJ) H264VideoFileServerMediaSubsession.$(OBJ) H265VideoFileServerMediaSubsession.$(OBJ) H263plusVideoFileServerMediaSubsession.$(OBJ) WAVAudioFileServerMediaSubsession.$(OBJ) AMRAudioFileServerMediaSubsession.$(OBJ) MP3AudioFileServerMediaSubsession.$(OBJ) MPEG1or2VideoFileServerMediaSubsession.$(OBJ) MPEG1or2FileServerDemux.$(OBJ) MPEG1or2DemuxedServerMediaSubsession.$(OBJ) MPEG2TransportFileServerMediaSubsession.$(OBJ) ADTSAudioFileServerMediaSubsession.$(OBJ) DVVideoFileServerMediaSubsession.$(OBJ) AC3AudioFileServerMediaSubsession.$(OBJ) MPEG2TransportUDPServerMediaSubsession.$(OBJ) ProxyServerMediaSession.$(OBJ) + +QUICKTIME_OBJS = QuickTimeFileSink.$(OBJ) QuickTimeGenericRTPSource.$(OBJ) +AVI_OBJS = AVIFileSink.$(OBJ) + +MATROSKA_FILE_OBJS = MatroskaFile.$(OBJ) MatroskaFileParser.$(OBJ) EBMLNumber.$(OBJ) MatroskaDemuxedTrack.$(OBJ) +MATROSKA_SERVER_MEDIA_SUBSESSION_OBJS = MatroskaFileServerMediaSubsession.$(OBJ) MP3AudioMatroskaFileServerMediaSubsession.$(OBJ) +MATROSKA_RTSP_SERVER_OBJS = MatroskaFileServerDemux.$(OBJ) $(MATROSKA_SERVER_MEDIA_SUBSESSION_OBJS) +MATROSKA_OBJS = $(MATROSKA_FILE_OBJS) $(MATROSKA_RTSP_SERVER_OBJS) + +OGG_FILE_OBJS = OggFile.$(OBJ) OggFileParser.$(OBJ) OggDemuxedTrack.$(OBJ) +OGG_SERVER_MEDIA_SUBSESSION_OBJS = OggFileServerMediaSubsession.$(OBJ) +OGG_RTSP_SERVER_OBJS = OggFileServerDemux.$(OBJ) $(OGG_SERVER_MEDIA_SUBSESSION_OBJS) +OGG_OBJS = $(OGG_FILE_OBJS) $(OGG_RTSP_SERVER_OBJS) + +MISC_OBJS = DarwinInjector.$(OBJ) BitVector.$(OBJ) StreamParser.$(OBJ) DigestAuthentication.$(OBJ) ourMD5.$(OBJ) Base64.$(OBJ) Locale.$(OBJ) + +LIVEMEDIA_LIB_OBJS = Media.$(OBJ) $(MISC_SOURCE_OBJS) $(MISC_SINK_OBJS) $(MISC_FILTER_OBJS) $(RTP_OBJS) $(RTCP_OBJS) $(RTSP_OBJS) $(SIP_OBJS) $(SESSION_OBJS) $(QUICKTIME_OBJS) $(AVI_OBJS) $(TRANSPORT_STREAM_TRICK_PLAY_OBJS) $(MATROSKA_OBJS) $(OGG_OBJS) $(MISC_OBJS) + +$(LIVEMEDIA_LIB): $(LIVEMEDIA_LIB_OBJS) \ + $(PLATFORM_SPECIFIC_LIB_OBJS) + $(LIBRARY_LINK)$@ $(LIBRARY_LINK_OPTS) \ + $(LIVEMEDIA_LIB_OBJS) + +Media.$(CPP): include/Media.hh +include/Media.hh: include/liveMedia_version.hh +MediaSource.$(CPP): include/MediaSource.hh +include/MediaSource.hh: include/Media.hh +FramedSource.$(CPP): include/FramedSource.hh +include/FramedSource.hh: include/MediaSource.hh +FramedFileSource.$(CPP): include/FramedFileSource.hh +include/FramedFileSource.hh: include/FramedSource.hh +FramedFilter.$(CPP): include/FramedFilter.hh +include/FramedFilter.hh: include/FramedSource.hh +RTPSource.$(CPP): include/RTPSource.hh +include/RTPSource.hh: include/FramedSource.hh include/RTPInterface.hh +include/RTPInterface.hh: include/Media.hh +MultiFramedRTPSource.$(CPP): include/MultiFramedRTPSource.hh include/RTCP.hh +include/MultiFramedRTPSource.hh: include/RTPSource.hh +SimpleRTPSource.$(CPP): include/SimpleRTPSource.hh +include/SimpleRTPSource.hh: include/MultiFramedRTPSource.hh +H261VideoRTPSource.$(CPP): include/H261VideoRTPSource.hh +include/H261VideoRTPSource.hh: include/MultiFramedRTPSource.hh +H264VideoRTPSource.$(CPP): include/H264VideoRTPSource.hh include/Base64.hh +include/H264VideoRTPSource.hh: include/MultiFramedRTPSource.hh +H265VideoRTPSource.$(CPP): include/H265VideoRTPSource.hh +include/H265VideoRTPSource.hh: include/MultiFramedRTPSource.hh +QCELPAudioRTPSource.$(CPP): include/QCELPAudioRTPSource.hh include/MultiFramedRTPSource.hh include/FramedFilter.hh +include/QCELPAudioRTPSource.hh: include/RTPSource.hh +AMRAudioRTPSource.$(CPP): include/AMRAudioRTPSource.hh include/MultiFramedRTPSource.hh +include/AMRAudioRTPSource.hh: include/RTPSource.hh include/AMRAudioSource.hh +JPEGVideoRTPSource.$(CPP): include/JPEGVideoRTPSource.hh +include/JPEGVideoRTPSource.hh: include/MultiFramedRTPSource.hh +VorbisAudioRTPSource.$(CPP): include/VorbisAudioRTPSource.hh include/Base64.hh +include/VorbisAudioRTPSource.hh: include/MultiFramedRTPSource.hh +TheoraVideoRTPSource.$(CPP): include/TheoraVideoRTPSource.hh +include/TheoraVideoRTPSource.hh: include/MultiFramedRTPSource.hh +VP8VideoRTPSource.$(CPP): include/VP8VideoRTPSource.hh +include/VP8VideoRTPSource.hh: include/MultiFramedRTPSource.hh +ByteStreamFileSource.$(CPP): include/ByteStreamFileSource.hh include/InputFile.hh +include/ByteStreamFileSource.hh: include/FramedFileSource.hh +ByteStreamMultiFileSource.$(CPP): include/ByteStreamMultiFileSource.hh +include/ByteStreamMultiFileSource.hh: include/ByteStreamFileSource.hh +ByteStreamMemoryBufferSource.$(CPP): include/ByteStreamMemoryBufferSource.hh +include/ByteStreamMemoryBufferSource.hh: include/FramedSource.hh +BasicUDPSource.$(CPP): include/BasicUDPSource.hh +include/BasicUDPSource.hh: include/FramedSource.hh +DeviceSource.$(CPP): include/DeviceSource.hh +include/DeviceSource.hh: include/FramedSource.hh +AudioInputDevice.$(CPP): include/AudioInputDevice.hh +include/AudioInputDevice.hh: include/FramedSource.hh +WAVAudioFileSource.$(CPP): include/WAVAudioFileSource.hh include/InputFile.hh +include/WAVAudioFileSource.hh: include/AudioInputDevice.hh +MPEG1or2Demux.$(CPP): include/MPEG1or2Demux.hh include/MPEG1or2DemuxedElementaryStream.hh StreamParser.hh +include/MPEG1or2Demux.hh: include/FramedSource.hh +include/MPEG1or2DemuxedElementaryStream.hh: include/MPEG1or2Demux.hh +StreamParser.hh: include/FramedSource.hh +MPEG1or2DemuxedElementaryStream.$(CPP): include/MPEG1or2DemuxedElementaryStream.hh +MPEGVideoStreamFramer.$(CPP): MPEGVideoStreamParser.hh +MPEGVideoStreamParser.hh: StreamParser.hh include/MPEGVideoStreamFramer.hh +include/MPEGVideoStreamFramer.hh: include/FramedFilter.hh +MPEG1or2VideoStreamFramer.$(CPP): include/MPEG1or2VideoStreamFramer.hh MPEGVideoStreamParser.hh +include/MPEG1or2VideoStreamFramer.hh: include/MPEGVideoStreamFramer.hh +MPEG1or2VideoStreamDiscreteFramer.$(CPP): include/MPEG1or2VideoStreamDiscreteFramer.hh +include/MPEG1or2VideoStreamDiscreteFramer.hh: include/MPEG1or2VideoStreamFramer.hh +MPEG4VideoStreamFramer.$(CPP): include/MPEG4VideoStreamFramer.hh MPEGVideoStreamParser.hh include/MPEG4LATMAudioRTPSource.hh +include/MPEG4VideoStreamFramer.hh: include/MPEGVideoStreamFramer.hh +MPEG4VideoStreamDiscreteFramer.$(CPP): include/MPEG4VideoStreamDiscreteFramer.hh +include/MPEG4VideoStreamDiscreteFramer.hh: include/MPEG4VideoStreamFramer.hh +H264or5VideoStreamFramer.$(CPP): include/H264or5VideoStreamFramer.hh MPEGVideoStreamParser.hh include/BitVector.hh +include/H264or5VideoStreamFramer.hh: include/MPEGVideoStreamFramer.hh +H264or5VideoStreamDiscreteFramer.$(CPP): include/H264or5VideoStreamDiscreteFramer.hh +include/H264or5VideoStreamDiscreteFramer.hh: include/H264or5VideoStreamFramer.hh +H264VideoStreamFramer.$(CPP): include/H264VideoStreamFramer.hh +include/H264VideoStreamFramer.hh: include/H264or5VideoStreamFramer.hh +H264VideoStreamDiscreteFramer.$(CPP): include/H264VideoStreamDiscreteFramer.hh +include/H264VideoStreamDiscreteFramer.hh: include/H264VideoStreamFramer.hh +H265VideoStreamFramer.$(CPP): include/H265VideoStreamFramer.hh +include/H265VideoStreamFramer.hh: include/H264or5VideoStreamFramer.hh +H265VideoStreamDiscreteFramer.$(CPP): include/H265VideoStreamDiscreteFramer.hh +include/H265VideoStreamDiscreteFramer.hh: include/H265VideoStreamFramer.hh +MPEGVideoStreamParser.$(CPP): MPEGVideoStreamParser.hh +MPEG1or2AudioStreamFramer.$(CPP): include/MPEG1or2AudioStreamFramer.hh StreamParser.hh MP3Internals.hh +include/MPEG1or2AudioStreamFramer.hh: include/FramedFilter.hh +MPEG1or2AudioRTPSource.$(CPP): include/MPEG1or2AudioRTPSource.hh +include/MPEG1or2AudioRTPSource.hh: include/MultiFramedRTPSource.hh +MPEG4LATMAudioRTPSource.$(CPP): include/MPEG4LATMAudioRTPSource.hh +include/MPEG4LATMAudioRTPSource.hh: include/MultiFramedRTPSource.hh +MPEG4ESVideoRTPSource.$(CPP): include/MPEG4ESVideoRTPSource.hh +include/MPEG4ESVideoRTPSource.hh: include/MultiFramedRTPSource.hh +MPEG4GenericRTPSource.$(CPP): include/MPEG4GenericRTPSource.hh include/BitVector.hh include/MPEG4LATMAudioRTPSource.hh +include/MPEG4GenericRTPSource.hh: include/MultiFramedRTPSource.hh +MP3FileSource.$(CPP): include/MP3FileSource.hh MP3StreamState.hh include/InputFile.hh +include/MP3FileSource.hh: include/FramedFileSource.hh +MP3StreamState.hh: MP3Internals.hh +MP3Internals.hh: include/BitVector.hh +MP3Transcoder.$(CPP): include/MP3ADU.hh include/MP3Transcoder.hh +include/MP3ADU.hh: include/FramedFilter.hh +include/MP3Transcoder.hh: include/MP3ADU.hh include/MP3ADUTranscoder.hh +include/MP3ADUTranscoder.hh: include/FramedFilter.hh +MP3ADU.$(CPP): include/MP3ADU.hh MP3ADUdescriptor.hh MP3Internals.hh +MP3ADUdescriptor.$(CPP): MP3ADUdescriptor.hh +MP3ADUinterleaving.$(CPP): include/MP3ADUinterleaving.hh MP3ADUdescriptor.hh +include/MP3ADUinterleaving.hh: include/FramedFilter.hh +MP3ADUTranscoder.$(CPP): include/MP3ADUTranscoder.hh MP3Internals.hh +MP3StreamState.$(CPP): MP3StreamState.hh include/InputFile.hh +MP3Internals.$(CPP): MP3InternalsHuffman.hh +MP3InternalsHuffman.hh: MP3Internals.hh +MP3InternalsHuffman.$(CPP): MP3InternalsHuffman.hh +MP3InternalsHuffmanTable.$(CPP): MP3InternalsHuffman.hh +MP3ADURTPSource.$(CPP): include/MP3ADURTPSource.hh MP3ADUdescriptor.hh +include/MP3ADURTPSource.hh: include/MultiFramedRTPSource.hh +MPEG1or2VideoRTPSource.$(CPP): include/MPEG1or2VideoRTPSource.hh +include/MPEG1or2VideoRTPSource.hh: include/MultiFramedRTPSource.hh +MPEG2TransportStreamMultiplexor.$(CPP): include/MPEG2TransportStreamMultiplexor.hh +include/MPEG2TransportStreamMultiplexor.hh: include/FramedSource.hh include/MPEG1or2Demux.hh +MPEG2TransportStreamFromPESSource.$(CPP): include/MPEG2TransportStreamFromPESSource.hh +include/MPEG2TransportStreamFromPESSource.hh: include/MPEG2TransportStreamMultiplexor.hh include/MPEG1or2DemuxedElementaryStream.hh +MPEG2TransportStreamFromESSource.$(CPP): include/MPEG2TransportStreamFromESSource.hh +include/MPEG2TransportStreamFromESSource.hh: include/MPEG2TransportStreamMultiplexor.hh +MPEG2TransportStreamFramer.$(CPP): include/MPEG2TransportStreamFramer.hh +include/MPEG2TransportStreamFramer.hh: include/FramedFilter.hh include/MPEG2TransportStreamIndexFile.hh +ADTSAudioFileSource.$(CPP): include/ADTSAudioFileSource.hh include/InputFile.hh +include/ADTSAudioFileSource.hh: include/FramedFileSource.hh +H263plusVideoRTPSource.$(CPP): include/H263plusVideoRTPSource.hh +include/H263plusVideoRTPSource.hh: include/MultiFramedRTPSource.hh +H263plusVideoStreamFramer.$(CPP): include/H263plusVideoStreamFramer.hh H263plusVideoStreamParser.hh +include/H263plusVideoStreamFramer.hh: include/FramedFilter.hh +H263plusVideoStreamParser.hh: StreamParser.hh +H263plusVideoStreamParser.$(CPP): H263plusVideoStreamParser.hh include/H263plusVideoStreamFramer.hh +AC3AudioStreamFramer.$(CPP): include/AC3AudioStreamFramer.hh StreamParser.hh +include/AC3AudioStreamFramer.hh: include/FramedFilter.hh +AC3AudioRTPSource.$(CPP): include/AC3AudioRTPSource.hh +include/AC3AudioRTPSource.hh: include/MultiFramedRTPSource.hh +DVVideoRTPSource.$(CPP): include/DVVideoRTPSource.hh +include/DVVideoRTPSource.hh: include/MultiFramedRTPSource.hh +JPEGVideoSource.$(CPP): include/JPEGVideoSource.hh +include/JPEGVideoSource.hh: include/FramedSource.hh +AMRAudioSource.$(CPP): include/AMRAudioSource.hh +include/AMRAudioSource.hh: include/FramedSource.hh +AMRAudioFileSource.$(CPP): include/AMRAudioFileSource.hh include/InputFile.hh +include/AMRAudioFileSource.hh: include/AMRAudioSource.hh +InputFile.$(CPP): include/InputFile.hh +StreamReplicator.$(CPP): include/StreamReplicator.hh +include/StreamReplicator.hh: include/FramedSource.hh +MediaSink.$(CPP): include/MediaSink.hh +include/MediaSink.hh: include/FramedSource.hh +FileSink.$(CPP): include/FileSink.hh include/OutputFile.hh +include/FileSink.hh: include/MediaSink.hh +BasicUDPSink.$(CPP): include/BasicUDPSink.hh +include/BasicUDPSink.hh: include/MediaSink.hh +AMRAudioFileSink.$(CPP): include/AMRAudioFileSink.hh include/AMRAudioSource.hh include/OutputFile.hh +include/AMRAudioFileSink.hh: include/FileSink.hh +H264or5VideoFileSink.$(CPP): include/H264or5VideoFileSink.hh include/H264VideoRTPSource.hh +include/H264or5VideoFileSink.hh: include/FileSink.hh +H264VideoFileSink.$(CPP): include/H264VideoFileSink.hh include/OutputFile.hh +include/H264VideoFileSink.hh: include/H264or5VideoFileSink.hh +H265VideoFileSink.$(CPP): include/H265VideoFileSink.hh include/OutputFile.hh +include/H265VideoFileSink.hh: include/H264or5VideoFileSink.hh +OggFileSink.$(CPP): include/OggFileSink.hh include/OutputFile.hh include/VorbisAudioRTPSource.hh include/MPEG2TransportStreamMultiplexor.hh include/FramedSource.hh +include/OggFileSink.hh: include/FileSink.hh +RTPSink.$(CPP): include/RTPSink.hh +include/RTPSink.hh: include/MediaSink.hh include/RTPInterface.hh +MultiFramedRTPSink.$(CPP): include/MultiFramedRTPSink.hh +include/MultiFramedRTPSink.hh: include/RTPSink.hh +AudioRTPSink.$(CPP): include/AudioRTPSink.hh +include/AudioRTPSink.hh: include/MultiFramedRTPSink.hh +VideoRTPSink.$(CPP): include/VideoRTPSink.hh +include/VideoRTPSink.hh: include/MultiFramedRTPSink.hh +TextRTPSink.$(CPP): include/TextRTPSink.hh +include/TextRTPSink.hh: include/MultiFramedRTPSink.hh +RTPInterface.$(CPP): include/RTPInterface.hh +MPEG1or2AudioRTPSink.$(CPP): include/MPEG1or2AudioRTPSink.hh +include/MPEG1or2AudioRTPSink.hh: include/AudioRTPSink.hh +MP3ADURTPSink.$(CPP): include/MP3ADURTPSink.hh +include/MP3ADURTPSink.hh: include/AudioRTPSink.hh +MPEG1or2VideoRTPSink.$(CPP): include/MPEG1or2VideoRTPSink.hh include/MPEG1or2VideoStreamFramer.hh +include/MPEG1or2VideoRTPSink.hh: include/VideoRTPSink.hh +MPEG4LATMAudioRTPSink.$(CPP): include/MPEG4LATMAudioRTPSink.hh +include/MPEG4LATMAudioRTPSink.hh: include/AudioRTPSink.hh +MPEG4GenericRTPSink.$(CPP): include/MPEG4GenericRTPSink.hh include/Locale.hh +include/MPEG4GenericRTPSink.hh: include/MultiFramedRTPSink.hh +MPEG4ESVideoRTPSink.$(CPP): include/MPEG4ESVideoRTPSink.hh include/MPEG4VideoStreamFramer.hh include/MPEG4LATMAudioRTPSource.hh +include/MPEG4ESVideoRTPSink.hh: include/VideoRTPSink.hh +H263plusVideoRTPSink.$(CPP): include/H263plusVideoRTPSink.hh +include/H263plusVideoRTPSink.hh: include/VideoRTPSink.hh +H264or5VideoRTPSink.$(CPP): include/H264or5VideoRTPSink.hh include/H264or5VideoStreamFramer.hh +include/H264or5VideoRTPSink.hh: include/VideoRTPSink.hh include/FramedFilter.hh +H264VideoRTPSink.$(CPP): include/H264VideoRTPSink.hh include/H264VideoStreamFramer.hh include/Base64.hh include/H264VideoRTPSource.hh +include/H264VideoRTPSink.hh: include/H264or5VideoRTPSink.hh +H265VideoRTPSink.$(CPP): include/H265VideoRTPSink.hh include/H265VideoStreamFramer.hh include/Base64.hh include/BitVector.hh include/H264VideoRTPSource.hh +include/H265VideoRTPSink.hh: include/H264or5VideoRTPSink.hh +DVVideoRTPSink.$(CPP): include/DVVideoRTPSink.hh +include/DVVideoRTPSink.hh: include/VideoRTPSink.hh include/DVVideoStreamFramer.hh +include/DVVideoStreamFramer.hh: include/FramedFilter.hh +AC3AudioRTPSink.$(CPP): include/AC3AudioRTPSink.hh +include/AC3AudioRTPSink.hh: include/AudioRTPSink.hh +VorbisAudioRTPSink.$(CPP): include/VorbisAudioRTPSink.hh include/Base64.hh include/VorbisAudioRTPSource.hh +include/VorbisAudioRTPSink.hh: include/AudioRTPSink.hh +TheoraVideoRTPSink.$(CPP): include/TheoraVideoRTPSink.hh include/Base64.hh include/VorbisAudioRTPSource.hh include/VorbisAudioRTPSink.hh +include/TheoraVideoRTPSink.hh: include/VideoRTPSink.hh +VP8VideoRTPSink.$(CPP): include/VP8VideoRTPSink.hh +include/VP8VideoRTPSink.hh: include/VideoRTPSink.hh +GSMAudioRTPSink.$(CPP): include/GSMAudioRTPSink.hh +include/GSMAudioRTPSink.hh: include/AudioRTPSink.hh +JPEGVideoRTPSink.$(CPP): include/JPEGVideoRTPSink.hh include/JPEGVideoSource.hh +include/JPEGVideoRTPSink.hh: include/VideoRTPSink.hh +SimpleRTPSink.$(CPP): include/SimpleRTPSink.hh +include/SimpleRTPSink.hh: include/MultiFramedRTPSink.hh +AMRAudioRTPSink.$(CPP): include/AMRAudioRTPSink.hh include/AMRAudioSource.hh +include/AMRAudioRTPSink.hh: include/AudioRTPSink.hh +T140TextRTPSink.$(CPP): include/T140TextRTPSink.hh +include/T140TextRTPSink.hh: include/TextRTPSink.hh include/FramedFilter.hh +TCPStreamSink.$(CPP): include/TCPStreamSink.hh include/RTSPCommon.hh +include/TCPStreamSink.hh: include/MediaSink.hh +OutputFile.$(CPP): include/OutputFile.hh +uLawAudioFilter.$(CPP): include/uLawAudioFilter.hh +include/uLawAudioFilter.hh: include/FramedFilter.hh +MPEG2IndexFromTransportStream.$(CPP): include/MPEG2IndexFromTransportStream.hh +include/MPEG2IndexFromTransportStream.hh: include/FramedFilter.hh +MPEG2TransportStreamIndexFile.$(CPP): include/MPEG2TransportStreamIndexFile.hh include/InputFile.hh +include/MPEG2TransportStreamIndexFile.hh: include/Media.hh +MPEG2TransportStreamTrickModeFilter.$(CPP): include/MPEG2TransportStreamTrickModeFilter.hh include/ByteStreamFileSource.hh +include/MPEG2TransportStreamTrickModeFilter.hh: include/FramedFilter.hh include/MPEG2TransportStreamIndexFile.hh +RTCP.$(CPP): include/RTCP.hh rtcp_from_spec.h +include/RTCP.hh: include/RTPSink.hh include/RTPSource.hh +rtcp_from_spec.$(C): rtcp_from_spec.h +RTSPServer.$(CPP): include/RTSPServer.hh include/RTSPCommon.hh include/RTSPRegisterSender.hh include/ProxyServerMediaSession.hh include/Base64.hh +include/RTSPServer.hh: include/ServerMediaSession.hh include/DigestAuthentication.hh include/RTSPCommon.hh +include/ServerMediaSession.hh: include/Media.hh include/FramedSource.hh include/RTPInterface.hh +RTSPClient.$(CPP): include/RTSPClient.hh include/RTSPCommon.hh include/Base64.hh include/Locale.hh include/ourMD5.hh +include/RTSPClient.hh: include/MediaSession.hh include/DigestAuthentication.hh +RTSPCommon.$(CPP): include/RTSPCommon.hh include/Locale.hh +RTSPServerSupportingHTTPStreaming.$(CPP): include/RTSPServerSupportingHTTPStreaming.hh include/RTSPCommon.hh +include/RTSPServerSupportingHTTPStreaming.hh: include/RTSPServer.hh include/ByteStreamMemoryBufferSource.hh include/TCPStreamSink.hh +RTSPRegisterSender.$(CPP): include/RTSPRegisterSender.hh +include/RTSPRegisterSender.hh: include/RTSPClient.hh +SIPClient.$(CPP): include/SIPClient.hh +include/SIPClient.hh: include/MediaSession.hh include/DigestAuthentication.hh +MediaSession.$(CPP): include/liveMedia.hh include/Locale.hh +include/MediaSession.hh: include/RTCP.hh include/FramedFilter.hh +ServerMediaSession.$(CPP): include/ServerMediaSession.hh +PassiveServerMediaSubsession.$(CPP): include/PassiveServerMediaSubsession.hh +include/PassiveServerMediaSubsession.hh: include/ServerMediaSession.hh include/RTPSink.hh include/RTCP.hh +OnDemandServerMediaSubsession.$(CPP): include/OnDemandServerMediaSubsession.hh +include/OnDemandServerMediaSubsession.hh: include/ServerMediaSession.hh include/RTPSink.hh include/BasicUDPSink.hh include/RTCP.hh +FileServerMediaSubsession.$(CPP): include/FileServerMediaSubsession.hh +include/FileServerMediaSubsession.hh: include/OnDemandServerMediaSubsession.hh +MPEG4VideoFileServerMediaSubsession.$(CPP): include/MPEG4VideoFileServerMediaSubsession.hh include/MPEG4ESVideoRTPSink.hh include/ByteStreamFileSource.hh include/MPEG4VideoStreamFramer.hh +include/MPEG4VideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +H264VideoFileServerMediaSubsession.$(CPP): include/H264VideoFileServerMediaSubsession.hh include/H264VideoRTPSink.hh include/ByteStreamFileSource.hh include/H264VideoStreamFramer.hh +include/H264VideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +H265VideoFileServerMediaSubsession.$(CPP): include/H265VideoFileServerMediaSubsession.hh include/H265VideoRTPSink.hh include/ByteStreamFileSource.hh include/H265VideoStreamFramer.hh +include/H265VideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +H263plusVideoFileServerMediaSubsession.$(CPP): include/H263plusVideoFileServerMediaSubsession.hh include/H263plusVideoRTPSink.hh include/ByteStreamFileSource.hh include/H263plusVideoStreamFramer.hh +include/H263plusVideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +WAVAudioFileServerMediaSubsession.$(CPP): include/WAVAudioFileServerMediaSubsession.hh include/WAVAudioFileSource.hh include/uLawAudioFilter.hh include/SimpleRTPSink.hh +include/WAVAudioFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +AMRAudioFileServerMediaSubsession.$(CPP): include/AMRAudioFileServerMediaSubsession.hh include/AMRAudioRTPSink.hh include/AMRAudioFileSource.hh +include/AMRAudioFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +MP3AudioFileServerMediaSubsession.$(CPP): include/MP3AudioFileServerMediaSubsession.hh include/MPEG1or2AudioRTPSink.hh include/MP3ADURTPSink.hh include/MP3FileSource.hh include/MP3ADU.hh +include/MP3AudioFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh include/MP3ADUinterleaving.hh +MPEG1or2VideoFileServerMediaSubsession.$(CPP): include/MPEG1or2VideoFileServerMediaSubsession.hh include/MPEG1or2VideoRTPSink.hh include/ByteStreamFileSource.hh include/MPEG1or2VideoStreamFramer.hh +include/MPEG1or2VideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +MPEG1or2FileServerDemux.$(CPP): include/MPEG1or2FileServerDemux.hh include/MPEG1or2DemuxedServerMediaSubsession.hh include/ByteStreamFileSource.hh +include/MPEG1or2FileServerDemux.hh: include/ServerMediaSession.hh include/MPEG1or2DemuxedElementaryStream.hh +MPEG1or2DemuxedServerMediaSubsession.$(CPP): include/MPEG1or2DemuxedServerMediaSubsession.hh include/MPEG1or2AudioStreamFramer.hh include/MPEG1or2AudioRTPSink.hh include/MPEG1or2VideoStreamFramer.hh include/MPEG1or2VideoRTPSink.hh include/AC3AudioStreamFramer.hh include/AC3AudioRTPSink.hh include/ByteStreamFileSource.hh +include/MPEG1or2DemuxedServerMediaSubsession.hh: include/OnDemandServerMediaSubsession.hh include/MPEG1or2FileServerDemux.hh +MPEG2TransportFileServerMediaSubsession.$(CPP): include/MPEG2TransportFileServerMediaSubsession.hh include/SimpleRTPSink.hh +include/MPEG2TransportFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh include/MPEG2TransportStreamFramer.hh include/ByteStreamFileSource.hh include/MPEG2TransportStreamTrickModeFilter.hh include/MPEG2TransportStreamFromESSource.hh +ADTSAudioFileServerMediaSubsession.$(CPP): include/ADTSAudioFileServerMediaSubsession.hh include/ADTSAudioFileSource.hh include/MPEG4GenericRTPSink.hh +include/ADTSAudioFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +DVVideoFileServerMediaSubsession.$(CPP): include/DVVideoFileServerMediaSubsession.hh include/DVVideoRTPSink.hh include/ByteStreamFileSource.hh include/DVVideoStreamFramer.hh +include/DVVideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +AC3AudioFileServerMediaSubsession.$(CPP): include/AC3AudioFileServerMediaSubsession.hh include/AC3AudioRTPSink.hh include/ByteStreamFileSource.hh include/AC3AudioStreamFramer.hh +include/AC3AudioFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +MPEG2TransportUDPServerMediaSubsession.$(CPP): include/MPEG2TransportUDPServerMediaSubsession.hh include/BasicUDPSource.hh include/SimpleRTPSource.hh include/MPEG2TransportStreamFramer.hh include/SimpleRTPSink.hh +include/MPEG2TransportUDPServerMediaSubsession.hh: include/OnDemandServerMediaSubsession.hh +ProxyServerMediaSession.$(CPP): include/liveMedia.hh include/RTSPCommon.hh +include/ProxyServerMediaSession.hh: include/ServerMediaSession.hh include/MediaSession.hh include/RTSPClient.hh +QuickTimeFileSink.$(CPP): include/QuickTimeFileSink.hh include/InputFile.hh include/OutputFile.hh include/QuickTimeGenericRTPSource.hh include/H263plusVideoRTPSource.hh include/MPEG4GenericRTPSource.hh include/MPEG4LATMAudioRTPSource.hh +include/QuickTimeFileSink.hh: include/MediaSession.hh +QuickTimeGenericRTPSource.$(CPP): include/QuickTimeGenericRTPSource.hh +include/QuickTimeGenericRTPSource.hh: include/MultiFramedRTPSource.hh +AVIFileSink.$(CPP): include/AVIFileSink.hh include/InputFile.hh include/OutputFile.hh +include/AVIFileSink.hh: include/MediaSession.hh +MatroskaFile.$(CPP): MatroskaFileParser.hh MatroskaDemuxedTrack.hh include/ByteStreamFileSource.hh include/H264VideoStreamDiscreteFramer.hh include/H265VideoStreamDiscreteFramer.hh include/MPEG1or2AudioRTPSink.hh include/MPEG4GenericRTPSink.hh include/AC3AudioRTPSink.hh include/SimpleRTPSink.hh include/VorbisAudioRTPSink.hh include/H264VideoRTPSink.hh include/H265VideoRTPSink.hh include/VP8VideoRTPSink.hh include/T140TextRTPSink.hh +MatroskaFileParser.hh: StreamParser.hh include/MatroskaFile.hh EBMLNumber.hh +include/MatroskaFile.hh: include/RTPSink.hh +MatroskaDemuxedTrack.hh: include/FramedSource.hh +MatroskaFileParser.$(CPP): MatroskaFileParser.hh MatroskaDemuxedTrack.hh include/ByteStreamFileSource.hh +EBMLNumber.$(CPP): EBMLNumber.hh +MatroskaDemuxedTrack.$(CPP): MatroskaDemuxedTrack.hh include/MatroskaFile.hh +MatroskaFileServerMediaSubsession.$(CPP): MatroskaFileServerMediaSubsession.hh MatroskaDemuxedTrack.hh include/FramedFilter.hh +MatroskaFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh include/MatroskaFileServerDemux.hh +MP3AudioMatroskaFileServerMediaSubsession.$(CPP): MP3AudioMatroskaFileServerMediaSubsession.hh MatroskaDemuxedTrack.hh +MP3AudioMatroskaFileServerMediaSubsession.hh: include/MP3AudioFileServerMediaSubsession.hh include/MatroskaFileServerDemux.hh +MatroskaFileServerDemux.$(CPP): include/MatroskaFileServerDemux.hh MP3AudioMatroskaFileServerMediaSubsession.hh MatroskaFileServerMediaSubsession.hh +include/MatroskaFileServerDemux.hh: include/ServerMediaSession.hh include/MatroskaFile.hh +OggFile.$(CPP): OggFileParser.hh OggDemuxedTrack.hh include/ByteStreamFileSource.hh include/VorbisAudioRTPSink.hh include/SimpleRTPSink.hh include/TheoraVideoRTPSink.hh +OggFileParser.hh: StreamParser.hh include/OggFile.hh +include/OggFile.hh: include/RTPSink.hh +OggDemuxedTrack.hh: include/FramedSource.hh +OggFileParser.$(CPP): OggFileParser.hh OggDemuxedTrack.hh +OggDemuxedTrack.$(CPP): OggDemuxedTrack.hh include/OggFile.hh +OggFileServerMediaSubsession.$(CPP): OggFileServerMediaSubsession.hh OggDemuxedTrack.hh include/FramedFilter.hh +OggFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh include/OggFileServerDemux.hh +OggFileServerDemux.$(CPP): include/OggFileServerDemux.hh OggFileServerMediaSubsession.hh +include/OggFileServerDemux.hh: include/ServerMediaSession.hh include/OggFile.hh +DarwinInjector.$(CPP): include/DarwinInjector.hh +include/DarwinInjector.hh: include/RTSPClient.hh include/RTCP.hh +BitVector.$(CPP): include/BitVector.hh +StreamParser.$(CPP): StreamParser.hh +DigestAuthentication.$(CPP): include/DigestAuthentication.hh include/ourMD5.hh +ourMD5.$(CPP): include/ourMD5.hh +Base64.$(CPP): include/Base64.hh +Locale.$(CPP): include/Locale.hh + +include/liveMedia.hh:: include/MPEG1or2AudioRTPSink.hh include/MP3ADURTPSink.hh include/MPEG1or2VideoRTPSink.hh include/MPEG4ESVideoRTPSink.hh include/BasicUDPSink.hh include/AMRAudioFileSink.hh include/H264VideoFileSink.hh include/H265VideoFileSink.hh include/OggFileSink.hh include/GSMAudioRTPSink.hh include/H263plusVideoRTPSink.hh include/H264VideoRTPSink.hh include/H265VideoRTPSink.hh include/DVVideoRTPSource.hh include/DVVideoRTPSink.hh include/DVVideoStreamFramer.hh include/H264VideoStreamFramer.hh include/H265VideoStreamFramer.hh include/H264VideoStreamDiscreteFramer.hh include/H265VideoStreamDiscreteFramer.hh include/JPEGVideoRTPSink.hh include/SimpleRTPSink.hh include/uLawAudioFilter.hh include/MPEG2IndexFromTransportStream.hh include/MPEG2TransportStreamTrickModeFilter.hh include/ByteStreamMultiFileSource.hh include/ByteStreamMemoryBufferSource.hh include/BasicUDPSource.hh include/SimpleRTPSource.hh include/MPEG1or2AudioRTPSource.hh include/MPEG4LATMAudioRTPSource.hh include/MPEG4LATMAudioRTPSink.hh include/MPEG4ESVideoRTPSource.hh include/MPEG4GenericRTPSource.hh include/MP3ADURTPSource.hh include/QCELPAudioRTPSource.hh include/AMRAudioRTPSource.hh include/JPEGVideoRTPSource.hh include/JPEGVideoSource.hh include/MPEG1or2VideoRTPSource.hh include/VorbisAudioRTPSource.hh include/TheoraVideoRTPSource.hh include/VP8VideoRTPSource.hh + +include/liveMedia.hh:: include/MPEG2TransportStreamFromPESSource.hh include/MPEG2TransportStreamFromESSource.hh include/MPEG2TransportStreamFramer.hh include/ADTSAudioFileSource.hh include/H261VideoRTPSource.hh include/H263plusVideoRTPSource.hh include/H264VideoRTPSource.hh include/H265VideoRTPSource.hh include/MP3FileSource.hh include/MP3ADU.hh include/MP3ADUinterleaving.hh include/MP3Transcoder.hh include/MPEG1or2DemuxedElementaryStream.hh include/MPEG1or2AudioStreamFramer.hh include/MPEG1or2VideoStreamDiscreteFramer.hh include/MPEG4VideoStreamDiscreteFramer.hh include/H263plusVideoStreamFramer.hh include/AC3AudioStreamFramer.hh include/AC3AudioRTPSource.hh include/AC3AudioRTPSink.hh include/VorbisAudioRTPSink.hh include/TheoraVideoRTPSink.hh include/VP8VideoRTPSink.hh include/MPEG4GenericRTPSink.hh include/DeviceSource.hh include/AudioInputDevice.hh include/WAVAudioFileSource.hh include/StreamReplicator.hh include/RTSPRegisterSender.hh + +include/liveMedia.hh:: include/RTSPServerSupportingHTTPStreaming.hh include/RTSPClient.hh include/SIPClient.hh include/QuickTimeFileSink.hh include/QuickTimeGenericRTPSource.hh include/AVIFileSink.hh include/PassiveServerMediaSubsession.hh include/MPEG4VideoFileServerMediaSubsession.hh include/H264VideoFileServerMediaSubsession.hh include/H265VideoFileServerMediaSubsession.hh include/WAVAudioFileServerMediaSubsession.hh include/AMRAudioFileServerMediaSubsession.hh include/AMRAudioFileSource.hh include/AMRAudioRTPSink.hh include/T140TextRTPSink.hh include/TCPStreamSink.hh include/MP3AudioFileServerMediaSubsession.hh include/MPEG1or2VideoFileServerMediaSubsession.hh include/MPEG1or2FileServerDemux.hh include/MPEG2TransportFileServerMediaSubsession.hh include/H263plusVideoFileServerMediaSubsession.hh include/ADTSAudioFileServerMediaSubsession.hh include/DVVideoFileServerMediaSubsession.hh include/AC3AudioFileServerMediaSubsession.hh include/MPEG2TransportUDPServerMediaSubsession.hh include/MatroskaFileServerDemux.hh include/OggFileServerDemux.hh include/ProxyServerMediaSession.hh include/DarwinInjector.hh + +clean: + -rm -rf *.$(OBJ) $(ALL) core *.core *~ include/*~ + +install: install1 $(INSTALL2) +install1: $(LIVEMEDIA_LIB) + install -d $(DESTDIR)$(PREFIX)/include/liveMedia $(DESTDIR)$(LIBDIR) + install -m 644 include/*.hh $(DESTDIR)$(PREFIX)/include/liveMedia + install -m 644 $(LIVEMEDIA_LIB) $(DESTDIR)$(LIBDIR) +install_shared_libraries: $(LIVEMEDIA_LIB) + ln -s $(NAME).$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/$(NAME).$(SHORT_LIB_SUFFIX) + ln -s $(NAME).$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/$(NAME).so + +##### Any additional, platform-specific rules come here: diff --git a/AnyCore/lib_rtsp/liveMedia/Makefile.head b/AnyCore/lib_rtsp/liveMedia/Makefile.head new file mode 100644 index 0000000..f4e4414 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/Makefile.head @@ -0,0 +1,4 @@ +INCLUDES = -Iinclude -I../UsageEnvironment/include -I../groupsock/include +PREFIX = /usr/local +LIBDIR = $(PREFIX)/lib +##### Change the following for your environment: diff --git a/AnyCore/lib_rtsp/liveMedia/Makefile.tail b/AnyCore/lib_rtsp/liveMedia/Makefile.tail new file mode 100644 index 0000000..c9067ea --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/Makefile.tail @@ -0,0 +1,395 @@ +##### End of variables to change + +NAME = libliveMedia +LIVEMEDIA_LIB = $(NAME).$(LIB_SUFFIX) +ALL = $(LIVEMEDIA_LIB) +all: $(ALL) + +.$(C).$(OBJ): + $(C_COMPILER) -c $(C_FLAGS) $< +.$(CPP).$(OBJ): + $(CPLUSPLUS_COMPILER) -c $(CPLUSPLUS_FLAGS) $< + +MP3_SOURCE_OBJS = MP3FileSource.$(OBJ) MP3Transcoder.$(OBJ) MP3ADU.$(OBJ) MP3ADUdescriptor.$(OBJ) MP3ADUinterleaving.$(OBJ) MP3ADUTranscoder.$(OBJ) MP3StreamState.$(OBJ) MP3Internals.$(OBJ) MP3InternalsHuffman.$(OBJ) MP3InternalsHuffmanTable.$(OBJ) MP3ADURTPSource.$(OBJ) +MPEG_SOURCE_OBJS = MPEG1or2Demux.$(OBJ) MPEG1or2DemuxedElementaryStream.$(OBJ) MPEGVideoStreamFramer.$(OBJ) MPEG1or2VideoStreamFramer.$(OBJ) MPEG1or2VideoStreamDiscreteFramer.$(OBJ) MPEG4VideoStreamFramer.$(OBJ) MPEG4VideoStreamDiscreteFramer.$(OBJ) H264or5VideoStreamFramer.$(OBJ) H264or5VideoStreamDiscreteFramer.$(OBJ) H264VideoStreamFramer.$(OBJ) H264VideoStreamDiscreteFramer.$(OBJ) H265VideoStreamFramer.$(OBJ) H265VideoStreamDiscreteFramer.$(OBJ) MPEGVideoStreamParser.$(OBJ) MPEG1or2AudioStreamFramer.$(OBJ) MPEG1or2AudioRTPSource.$(OBJ) MPEG4LATMAudioRTPSource.$(OBJ) MPEG4ESVideoRTPSource.$(OBJ) MPEG4GenericRTPSource.$(OBJ) $(MP3_SOURCE_OBJS) MPEG1or2VideoRTPSource.$(OBJ) MPEG2TransportStreamMultiplexor.$(OBJ) MPEG2TransportStreamFromPESSource.$(OBJ) MPEG2TransportStreamFromESSource.$(OBJ) MPEG2TransportStreamFramer.$(OBJ) ADTSAudioFileSource.$(OBJ) +H263_SOURCE_OBJS = H263plusVideoRTPSource.$(OBJ) H263plusVideoStreamFramer.$(OBJ) H263plusVideoStreamParser.$(OBJ) +AC3_SOURCE_OBJS = AC3AudioStreamFramer.$(OBJ) AC3AudioRTPSource.$(OBJ) +DV_SOURCE_OBJS = DVVideoStreamFramer.$(OBJ) DVVideoRTPSource.$(OBJ) +MP3_SINK_OBJS = MP3ADURTPSink.$(OBJ) +MPEG_SINK_OBJS = MPEG1or2AudioRTPSink.$(OBJ) $(MP3_SINK_OBJS) MPEG1or2VideoRTPSink.$(OBJ) MPEG4LATMAudioRTPSink.$(OBJ) MPEG4GenericRTPSink.$(OBJ) MPEG4ESVideoRTPSink.$(OBJ) +H263_SINK_OBJS = H263plusVideoRTPSink.$(OBJ) +H264_OR_5_SINK_OBJS = H264or5VideoRTPSink.$(OBJ) H264VideoRTPSink.$(OBJ) H265VideoRTPSink.$(OBJ) +DV_SINK_OBJS = DVVideoRTPSink.$(OBJ) +AC3_SINK_OBJS = AC3AudioRTPSink.$(OBJ) + +MISC_SOURCE_OBJS = MediaSource.$(OBJ) FramedSource.$(OBJ) FramedFileSource.$(OBJ) FramedFilter.$(OBJ) ByteStreamFileSource.$(OBJ) ByteStreamMultiFileSource.$(OBJ) ByteStreamMemoryBufferSource.$(OBJ) BasicUDPSource.$(OBJ) DeviceSource.$(OBJ) AudioInputDevice.$(OBJ) WAVAudioFileSource.$(OBJ) $(MPEG_SOURCE_OBJS) $(H263_SOURCE_OBJS) $(AC3_SOURCE_OBJS) $(DV_SOURCE_OBJS) JPEGVideoSource.$(OBJ) AMRAudioSource.$(OBJ) AMRAudioFileSource.$(OBJ) InputFile.$(OBJ) StreamReplicator.$(OBJ) +MISC_SINK_OBJS = MediaSink.$(OBJ) FileSink.$(OBJ) BasicUDPSink.$(OBJ) AMRAudioFileSink.$(OBJ) H264or5VideoFileSink.$(OBJ) H264VideoFileSink.$(OBJ) H265VideoFileSink.$(OBJ) OggFileSink.$(OBJ) $(MPEG_SINK_OBJS) $(H263_SINK_OBJS) $(H264_OR_5_SINK_OBJS) $(DV_SINK_OBJS) $(AC3_SINK_OBJS) VorbisAudioRTPSink.$(OBJ) TheoraVideoRTPSink.$(OBJ) VP8VideoRTPSink.$(OBJ) GSMAudioRTPSink.$(OBJ) JPEGVideoRTPSink.$(OBJ) SimpleRTPSink.$(OBJ) AMRAudioRTPSink.$(OBJ) T140TextRTPSink.$(OBJ) TCPStreamSink.$(OBJ) OutputFile.$(OBJ) +MISC_FILTER_OBJS = uLawAudioFilter.$(OBJ) +TRANSPORT_STREAM_TRICK_PLAY_OBJS = MPEG2IndexFromTransportStream.$(OBJ) MPEG2TransportStreamIndexFile.$(OBJ) MPEG2TransportStreamTrickModeFilter.$(OBJ) + +RTP_SOURCE_OBJS = RTPSource.$(OBJ) MultiFramedRTPSource.$(OBJ) SimpleRTPSource.$(OBJ) H261VideoRTPSource.$(OBJ) H264VideoRTPSource.$(OBJ) H265VideoRTPSource.$(OBJ) QCELPAudioRTPSource.$(OBJ) AMRAudioRTPSource.$(OBJ) JPEGVideoRTPSource.$(OBJ) VorbisAudioRTPSource.$(OBJ) TheoraVideoRTPSource.$(OBJ) VP8VideoRTPSource.$(OBJ) +RTP_SINK_OBJS = RTPSink.$(OBJ) MultiFramedRTPSink.$(OBJ) AudioRTPSink.$(OBJ) VideoRTPSink.$(OBJ) TextRTPSink.$(OBJ) +RTP_INTERFACE_OBJS = RTPInterface.$(OBJ) +RTP_OBJS = $(RTP_SOURCE_OBJS) $(RTP_SINK_OBJS) $(RTP_INTERFACE_OBJS) + +RTCP_OBJS = RTCP.$(OBJ) rtcp_from_spec.$(OBJ) +RTSP_OBJS = RTSPServer.$(OBJ) RTSPClient.$(OBJ) RTSPCommon.$(OBJ) RTSPServerSupportingHTTPStreaming.$(OBJ) RTSPRegisterSender.$(OBJ) +SIP_OBJS = SIPClient.$(OBJ) + +SESSION_OBJS = MediaSession.$(OBJ) ServerMediaSession.$(OBJ) PassiveServerMediaSubsession.$(OBJ) OnDemandServerMediaSubsession.$(OBJ) FileServerMediaSubsession.$(OBJ) MPEG4VideoFileServerMediaSubsession.$(OBJ) H264VideoFileServerMediaSubsession.$(OBJ) H265VideoFileServerMediaSubsession.$(OBJ) H263plusVideoFileServerMediaSubsession.$(OBJ) WAVAudioFileServerMediaSubsession.$(OBJ) AMRAudioFileServerMediaSubsession.$(OBJ) MP3AudioFileServerMediaSubsession.$(OBJ) MPEG1or2VideoFileServerMediaSubsession.$(OBJ) MPEG1or2FileServerDemux.$(OBJ) MPEG1or2DemuxedServerMediaSubsession.$(OBJ) MPEG2TransportFileServerMediaSubsession.$(OBJ) ADTSAudioFileServerMediaSubsession.$(OBJ) DVVideoFileServerMediaSubsession.$(OBJ) AC3AudioFileServerMediaSubsession.$(OBJ) MPEG2TransportUDPServerMediaSubsession.$(OBJ) ProxyServerMediaSession.$(OBJ) + +QUICKTIME_OBJS = QuickTimeFileSink.$(OBJ) QuickTimeGenericRTPSource.$(OBJ) +AVI_OBJS = AVIFileSink.$(OBJ) + +MATROSKA_FILE_OBJS = MatroskaFile.$(OBJ) MatroskaFileParser.$(OBJ) EBMLNumber.$(OBJ) MatroskaDemuxedTrack.$(OBJ) +MATROSKA_SERVER_MEDIA_SUBSESSION_OBJS = MatroskaFileServerMediaSubsession.$(OBJ) MP3AudioMatroskaFileServerMediaSubsession.$(OBJ) +MATROSKA_RTSP_SERVER_OBJS = MatroskaFileServerDemux.$(OBJ) $(MATROSKA_SERVER_MEDIA_SUBSESSION_OBJS) +MATROSKA_OBJS = $(MATROSKA_FILE_OBJS) $(MATROSKA_RTSP_SERVER_OBJS) + +OGG_FILE_OBJS = OggFile.$(OBJ) OggFileParser.$(OBJ) OggDemuxedTrack.$(OBJ) +OGG_SERVER_MEDIA_SUBSESSION_OBJS = OggFileServerMediaSubsession.$(OBJ) +OGG_RTSP_SERVER_OBJS = OggFileServerDemux.$(OBJ) $(OGG_SERVER_MEDIA_SUBSESSION_OBJS) +OGG_OBJS = $(OGG_FILE_OBJS) $(OGG_RTSP_SERVER_OBJS) + +MISC_OBJS = DarwinInjector.$(OBJ) BitVector.$(OBJ) StreamParser.$(OBJ) DigestAuthentication.$(OBJ) ourMD5.$(OBJ) Base64.$(OBJ) Locale.$(OBJ) + +LIVEMEDIA_LIB_OBJS = Media.$(OBJ) $(MISC_SOURCE_OBJS) $(MISC_SINK_OBJS) $(MISC_FILTER_OBJS) $(RTP_OBJS) $(RTCP_OBJS) $(RTSP_OBJS) $(SIP_OBJS) $(SESSION_OBJS) $(QUICKTIME_OBJS) $(AVI_OBJS) $(TRANSPORT_STREAM_TRICK_PLAY_OBJS) $(MATROSKA_OBJS) $(OGG_OBJS) $(MISC_OBJS) + +$(LIVEMEDIA_LIB): $(LIVEMEDIA_LIB_OBJS) \ + $(PLATFORM_SPECIFIC_LIB_OBJS) + $(LIBRARY_LINK)$@ $(LIBRARY_LINK_OPTS) \ + $(LIVEMEDIA_LIB_OBJS) + +Media.$(CPP): include/Media.hh +include/Media.hh: include/liveMedia_version.hh +MediaSource.$(CPP): include/MediaSource.hh +include/MediaSource.hh: include/Media.hh +FramedSource.$(CPP): include/FramedSource.hh +include/FramedSource.hh: include/MediaSource.hh +FramedFileSource.$(CPP): include/FramedFileSource.hh +include/FramedFileSource.hh: include/FramedSource.hh +FramedFilter.$(CPP): include/FramedFilter.hh +include/FramedFilter.hh: include/FramedSource.hh +RTPSource.$(CPP): include/RTPSource.hh +include/RTPSource.hh: include/FramedSource.hh include/RTPInterface.hh +include/RTPInterface.hh: include/Media.hh +MultiFramedRTPSource.$(CPP): include/MultiFramedRTPSource.hh include/RTCP.hh +include/MultiFramedRTPSource.hh: include/RTPSource.hh +SimpleRTPSource.$(CPP): include/SimpleRTPSource.hh +include/SimpleRTPSource.hh: include/MultiFramedRTPSource.hh +H261VideoRTPSource.$(CPP): include/H261VideoRTPSource.hh +include/H261VideoRTPSource.hh: include/MultiFramedRTPSource.hh +H264VideoRTPSource.$(CPP): include/H264VideoRTPSource.hh include/Base64.hh +include/H264VideoRTPSource.hh: include/MultiFramedRTPSource.hh +H265VideoRTPSource.$(CPP): include/H265VideoRTPSource.hh +include/H265VideoRTPSource.hh: include/MultiFramedRTPSource.hh +QCELPAudioRTPSource.$(CPP): include/QCELPAudioRTPSource.hh include/MultiFramedRTPSource.hh include/FramedFilter.hh +include/QCELPAudioRTPSource.hh: include/RTPSource.hh +AMRAudioRTPSource.$(CPP): include/AMRAudioRTPSource.hh include/MultiFramedRTPSource.hh +include/AMRAudioRTPSource.hh: include/RTPSource.hh include/AMRAudioSource.hh +JPEGVideoRTPSource.$(CPP): include/JPEGVideoRTPSource.hh +include/JPEGVideoRTPSource.hh: include/MultiFramedRTPSource.hh +VorbisAudioRTPSource.$(CPP): include/VorbisAudioRTPSource.hh include/Base64.hh +include/VorbisAudioRTPSource.hh: include/MultiFramedRTPSource.hh +TheoraVideoRTPSource.$(CPP): include/TheoraVideoRTPSource.hh +include/TheoraVideoRTPSource.hh: include/MultiFramedRTPSource.hh +VP8VideoRTPSource.$(CPP): include/VP8VideoRTPSource.hh +include/VP8VideoRTPSource.hh: include/MultiFramedRTPSource.hh +ByteStreamFileSource.$(CPP): include/ByteStreamFileSource.hh include/InputFile.hh +include/ByteStreamFileSource.hh: include/FramedFileSource.hh +ByteStreamMultiFileSource.$(CPP): include/ByteStreamMultiFileSource.hh +include/ByteStreamMultiFileSource.hh: include/ByteStreamFileSource.hh +ByteStreamMemoryBufferSource.$(CPP): include/ByteStreamMemoryBufferSource.hh +include/ByteStreamMemoryBufferSource.hh: include/FramedSource.hh +BasicUDPSource.$(CPP): include/BasicUDPSource.hh +include/BasicUDPSource.hh: include/FramedSource.hh +DeviceSource.$(CPP): include/DeviceSource.hh +include/DeviceSource.hh: include/FramedSource.hh +AudioInputDevice.$(CPP): include/AudioInputDevice.hh +include/AudioInputDevice.hh: include/FramedSource.hh +WAVAudioFileSource.$(CPP): include/WAVAudioFileSource.hh include/InputFile.hh +include/WAVAudioFileSource.hh: include/AudioInputDevice.hh +MPEG1or2Demux.$(CPP): include/MPEG1or2Demux.hh include/MPEG1or2DemuxedElementaryStream.hh StreamParser.hh +include/MPEG1or2Demux.hh: include/FramedSource.hh +include/MPEG1or2DemuxedElementaryStream.hh: include/MPEG1or2Demux.hh +StreamParser.hh: include/FramedSource.hh +MPEG1or2DemuxedElementaryStream.$(CPP): include/MPEG1or2DemuxedElementaryStream.hh +MPEGVideoStreamFramer.$(CPP): MPEGVideoStreamParser.hh +MPEGVideoStreamParser.hh: StreamParser.hh include/MPEGVideoStreamFramer.hh +include/MPEGVideoStreamFramer.hh: include/FramedFilter.hh +MPEG1or2VideoStreamFramer.$(CPP): include/MPEG1or2VideoStreamFramer.hh MPEGVideoStreamParser.hh +include/MPEG1or2VideoStreamFramer.hh: include/MPEGVideoStreamFramer.hh +MPEG1or2VideoStreamDiscreteFramer.$(CPP): include/MPEG1or2VideoStreamDiscreteFramer.hh +include/MPEG1or2VideoStreamDiscreteFramer.hh: include/MPEG1or2VideoStreamFramer.hh +MPEG4VideoStreamFramer.$(CPP): include/MPEG4VideoStreamFramer.hh MPEGVideoStreamParser.hh include/MPEG4LATMAudioRTPSource.hh +include/MPEG4VideoStreamFramer.hh: include/MPEGVideoStreamFramer.hh +MPEG4VideoStreamDiscreteFramer.$(CPP): include/MPEG4VideoStreamDiscreteFramer.hh +include/MPEG4VideoStreamDiscreteFramer.hh: include/MPEG4VideoStreamFramer.hh +H264or5VideoStreamFramer.$(CPP): include/H264or5VideoStreamFramer.hh MPEGVideoStreamParser.hh include/BitVector.hh +include/H264or5VideoStreamFramer.hh: include/MPEGVideoStreamFramer.hh +H264or5VideoStreamDiscreteFramer.$(CPP): include/H264or5VideoStreamDiscreteFramer.hh +include/H264or5VideoStreamDiscreteFramer.hh: include/H264or5VideoStreamFramer.hh +H264VideoStreamFramer.$(CPP): include/H264VideoStreamFramer.hh +include/H264VideoStreamFramer.hh: include/H264or5VideoStreamFramer.hh +H264VideoStreamDiscreteFramer.$(CPP): include/H264VideoStreamDiscreteFramer.hh +include/H264VideoStreamDiscreteFramer.hh: include/H264VideoStreamFramer.hh +H265VideoStreamFramer.$(CPP): include/H265VideoStreamFramer.hh +include/H265VideoStreamFramer.hh: include/H264or5VideoStreamFramer.hh +H265VideoStreamDiscreteFramer.$(CPP): include/H265VideoStreamDiscreteFramer.hh +include/H265VideoStreamDiscreteFramer.hh: include/H265VideoStreamFramer.hh +MPEGVideoStreamParser.$(CPP): MPEGVideoStreamParser.hh +MPEG1or2AudioStreamFramer.$(CPP): include/MPEG1or2AudioStreamFramer.hh StreamParser.hh MP3Internals.hh +include/MPEG1or2AudioStreamFramer.hh: include/FramedFilter.hh +MPEG1or2AudioRTPSource.$(CPP): include/MPEG1or2AudioRTPSource.hh +include/MPEG1or2AudioRTPSource.hh: include/MultiFramedRTPSource.hh +MPEG4LATMAudioRTPSource.$(CPP): include/MPEG4LATMAudioRTPSource.hh +include/MPEG4LATMAudioRTPSource.hh: include/MultiFramedRTPSource.hh +MPEG4ESVideoRTPSource.$(CPP): include/MPEG4ESVideoRTPSource.hh +include/MPEG4ESVideoRTPSource.hh: include/MultiFramedRTPSource.hh +MPEG4GenericRTPSource.$(CPP): include/MPEG4GenericRTPSource.hh include/BitVector.hh include/MPEG4LATMAudioRTPSource.hh +include/MPEG4GenericRTPSource.hh: include/MultiFramedRTPSource.hh +MP3FileSource.$(CPP): include/MP3FileSource.hh MP3StreamState.hh include/InputFile.hh +include/MP3FileSource.hh: include/FramedFileSource.hh +MP3StreamState.hh: MP3Internals.hh +MP3Internals.hh: include/BitVector.hh +MP3Transcoder.$(CPP): include/MP3ADU.hh include/MP3Transcoder.hh +include/MP3ADU.hh: include/FramedFilter.hh +include/MP3Transcoder.hh: include/MP3ADU.hh include/MP3ADUTranscoder.hh +include/MP3ADUTranscoder.hh: include/FramedFilter.hh +MP3ADU.$(CPP): include/MP3ADU.hh MP3ADUdescriptor.hh MP3Internals.hh +MP3ADUdescriptor.$(CPP): MP3ADUdescriptor.hh +MP3ADUinterleaving.$(CPP): include/MP3ADUinterleaving.hh MP3ADUdescriptor.hh +include/MP3ADUinterleaving.hh: include/FramedFilter.hh +MP3ADUTranscoder.$(CPP): include/MP3ADUTranscoder.hh MP3Internals.hh +MP3StreamState.$(CPP): MP3StreamState.hh include/InputFile.hh +MP3Internals.$(CPP): MP3InternalsHuffman.hh +MP3InternalsHuffman.hh: MP3Internals.hh +MP3InternalsHuffman.$(CPP): MP3InternalsHuffman.hh +MP3InternalsHuffmanTable.$(CPP): MP3InternalsHuffman.hh +MP3ADURTPSource.$(CPP): include/MP3ADURTPSource.hh MP3ADUdescriptor.hh +include/MP3ADURTPSource.hh: include/MultiFramedRTPSource.hh +MPEG1or2VideoRTPSource.$(CPP): include/MPEG1or2VideoRTPSource.hh +include/MPEG1or2VideoRTPSource.hh: include/MultiFramedRTPSource.hh +MPEG2TransportStreamMultiplexor.$(CPP): include/MPEG2TransportStreamMultiplexor.hh +include/MPEG2TransportStreamMultiplexor.hh: include/FramedSource.hh include/MPEG1or2Demux.hh +MPEG2TransportStreamFromPESSource.$(CPP): include/MPEG2TransportStreamFromPESSource.hh +include/MPEG2TransportStreamFromPESSource.hh: include/MPEG2TransportStreamMultiplexor.hh include/MPEG1or2DemuxedElementaryStream.hh +MPEG2TransportStreamFromESSource.$(CPP): include/MPEG2TransportStreamFromESSource.hh +include/MPEG2TransportStreamFromESSource.hh: include/MPEG2TransportStreamMultiplexor.hh +MPEG2TransportStreamFramer.$(CPP): include/MPEG2TransportStreamFramer.hh +include/MPEG2TransportStreamFramer.hh: include/FramedFilter.hh include/MPEG2TransportStreamIndexFile.hh +ADTSAudioFileSource.$(CPP): include/ADTSAudioFileSource.hh include/InputFile.hh +include/ADTSAudioFileSource.hh: include/FramedFileSource.hh +H263plusVideoRTPSource.$(CPP): include/H263plusVideoRTPSource.hh +include/H263plusVideoRTPSource.hh: include/MultiFramedRTPSource.hh +H263plusVideoStreamFramer.$(CPP): include/H263plusVideoStreamFramer.hh H263plusVideoStreamParser.hh +include/H263plusVideoStreamFramer.hh: include/FramedFilter.hh +H263plusVideoStreamParser.hh: StreamParser.hh +H263plusVideoStreamParser.$(CPP): H263plusVideoStreamParser.hh include/H263plusVideoStreamFramer.hh +AC3AudioStreamFramer.$(CPP): include/AC3AudioStreamFramer.hh StreamParser.hh +include/AC3AudioStreamFramer.hh: include/FramedFilter.hh +AC3AudioRTPSource.$(CPP): include/AC3AudioRTPSource.hh +include/AC3AudioRTPSource.hh: include/MultiFramedRTPSource.hh +DVVideoRTPSource.$(CPP): include/DVVideoRTPSource.hh +include/DVVideoRTPSource.hh: include/MultiFramedRTPSource.hh +JPEGVideoSource.$(CPP): include/JPEGVideoSource.hh +include/JPEGVideoSource.hh: include/FramedSource.hh +AMRAudioSource.$(CPP): include/AMRAudioSource.hh +include/AMRAudioSource.hh: include/FramedSource.hh +AMRAudioFileSource.$(CPP): include/AMRAudioFileSource.hh include/InputFile.hh +include/AMRAudioFileSource.hh: include/AMRAudioSource.hh +InputFile.$(CPP): include/InputFile.hh +StreamReplicator.$(CPP): include/StreamReplicator.hh +include/StreamReplicator.hh: include/FramedSource.hh +MediaSink.$(CPP): include/MediaSink.hh +include/MediaSink.hh: include/FramedSource.hh +FileSink.$(CPP): include/FileSink.hh include/OutputFile.hh +include/FileSink.hh: include/MediaSink.hh +BasicUDPSink.$(CPP): include/BasicUDPSink.hh +include/BasicUDPSink.hh: include/MediaSink.hh +AMRAudioFileSink.$(CPP): include/AMRAudioFileSink.hh include/AMRAudioSource.hh include/OutputFile.hh +include/AMRAudioFileSink.hh: include/FileSink.hh +H264or5VideoFileSink.$(CPP): include/H264or5VideoFileSink.hh include/H264VideoRTPSource.hh +include/H264or5VideoFileSink.hh: include/FileSink.hh +H264VideoFileSink.$(CPP): include/H264VideoFileSink.hh include/OutputFile.hh +include/H264VideoFileSink.hh: include/H264or5VideoFileSink.hh +H265VideoFileSink.$(CPP): include/H265VideoFileSink.hh include/OutputFile.hh +include/H265VideoFileSink.hh: include/H264or5VideoFileSink.hh +OggFileSink.$(CPP): include/OggFileSink.hh include/OutputFile.hh include/VorbisAudioRTPSource.hh include/MPEG2TransportStreamMultiplexor.hh include/FramedSource.hh +include/OggFileSink.hh: include/FileSink.hh +RTPSink.$(CPP): include/RTPSink.hh +include/RTPSink.hh: include/MediaSink.hh include/RTPInterface.hh +MultiFramedRTPSink.$(CPP): include/MultiFramedRTPSink.hh +include/MultiFramedRTPSink.hh: include/RTPSink.hh +AudioRTPSink.$(CPP): include/AudioRTPSink.hh +include/AudioRTPSink.hh: include/MultiFramedRTPSink.hh +VideoRTPSink.$(CPP): include/VideoRTPSink.hh +include/VideoRTPSink.hh: include/MultiFramedRTPSink.hh +TextRTPSink.$(CPP): include/TextRTPSink.hh +include/TextRTPSink.hh: include/MultiFramedRTPSink.hh +RTPInterface.$(CPP): include/RTPInterface.hh +MPEG1or2AudioRTPSink.$(CPP): include/MPEG1or2AudioRTPSink.hh +include/MPEG1or2AudioRTPSink.hh: include/AudioRTPSink.hh +MP3ADURTPSink.$(CPP): include/MP3ADURTPSink.hh +include/MP3ADURTPSink.hh: include/AudioRTPSink.hh +MPEG1or2VideoRTPSink.$(CPP): include/MPEG1or2VideoRTPSink.hh include/MPEG1or2VideoStreamFramer.hh +include/MPEG1or2VideoRTPSink.hh: include/VideoRTPSink.hh +MPEG4LATMAudioRTPSink.$(CPP): include/MPEG4LATMAudioRTPSink.hh +include/MPEG4LATMAudioRTPSink.hh: include/AudioRTPSink.hh +MPEG4GenericRTPSink.$(CPP): include/MPEG4GenericRTPSink.hh include/Locale.hh +include/MPEG4GenericRTPSink.hh: include/MultiFramedRTPSink.hh +MPEG4ESVideoRTPSink.$(CPP): include/MPEG4ESVideoRTPSink.hh include/MPEG4VideoStreamFramer.hh include/MPEG4LATMAudioRTPSource.hh +include/MPEG4ESVideoRTPSink.hh: include/VideoRTPSink.hh +H263plusVideoRTPSink.$(CPP): include/H263plusVideoRTPSink.hh +include/H263plusVideoRTPSink.hh: include/VideoRTPSink.hh +H264or5VideoRTPSink.$(CPP): include/H264or5VideoRTPSink.hh include/H264or5VideoStreamFramer.hh +include/H264or5VideoRTPSink.hh: include/VideoRTPSink.hh include/FramedFilter.hh +H264VideoRTPSink.$(CPP): include/H264VideoRTPSink.hh include/H264VideoStreamFramer.hh include/Base64.hh include/H264VideoRTPSource.hh +include/H264VideoRTPSink.hh: include/H264or5VideoRTPSink.hh +H265VideoRTPSink.$(CPP): include/H265VideoRTPSink.hh include/H265VideoStreamFramer.hh include/Base64.hh include/BitVector.hh include/H264VideoRTPSource.hh +include/H265VideoRTPSink.hh: include/H264or5VideoRTPSink.hh +DVVideoRTPSink.$(CPP): include/DVVideoRTPSink.hh +include/DVVideoRTPSink.hh: include/VideoRTPSink.hh include/DVVideoStreamFramer.hh +include/DVVideoStreamFramer.hh: include/FramedFilter.hh +AC3AudioRTPSink.$(CPP): include/AC3AudioRTPSink.hh +include/AC3AudioRTPSink.hh: include/AudioRTPSink.hh +VorbisAudioRTPSink.$(CPP): include/VorbisAudioRTPSink.hh include/Base64.hh include/VorbisAudioRTPSource.hh +include/VorbisAudioRTPSink.hh: include/AudioRTPSink.hh +TheoraVideoRTPSink.$(CPP): include/TheoraVideoRTPSink.hh include/Base64.hh include/VorbisAudioRTPSource.hh include/VorbisAudioRTPSink.hh +include/TheoraVideoRTPSink.hh: include/VideoRTPSink.hh +VP8VideoRTPSink.$(CPP): include/VP8VideoRTPSink.hh +include/VP8VideoRTPSink.hh: include/VideoRTPSink.hh +GSMAudioRTPSink.$(CPP): include/GSMAudioRTPSink.hh +include/GSMAudioRTPSink.hh: include/AudioRTPSink.hh +JPEGVideoRTPSink.$(CPP): include/JPEGVideoRTPSink.hh include/JPEGVideoSource.hh +include/JPEGVideoRTPSink.hh: include/VideoRTPSink.hh +SimpleRTPSink.$(CPP): include/SimpleRTPSink.hh +include/SimpleRTPSink.hh: include/MultiFramedRTPSink.hh +AMRAudioRTPSink.$(CPP): include/AMRAudioRTPSink.hh include/AMRAudioSource.hh +include/AMRAudioRTPSink.hh: include/AudioRTPSink.hh +T140TextRTPSink.$(CPP): include/T140TextRTPSink.hh +include/T140TextRTPSink.hh: include/TextRTPSink.hh include/FramedFilter.hh +TCPStreamSink.$(CPP): include/TCPStreamSink.hh include/RTSPCommon.hh +include/TCPStreamSink.hh: include/MediaSink.hh +OutputFile.$(CPP): include/OutputFile.hh +uLawAudioFilter.$(CPP): include/uLawAudioFilter.hh +include/uLawAudioFilter.hh: include/FramedFilter.hh +MPEG2IndexFromTransportStream.$(CPP): include/MPEG2IndexFromTransportStream.hh +include/MPEG2IndexFromTransportStream.hh: include/FramedFilter.hh +MPEG2TransportStreamIndexFile.$(CPP): include/MPEG2TransportStreamIndexFile.hh include/InputFile.hh +include/MPEG2TransportStreamIndexFile.hh: include/Media.hh +MPEG2TransportStreamTrickModeFilter.$(CPP): include/MPEG2TransportStreamTrickModeFilter.hh include/ByteStreamFileSource.hh +include/MPEG2TransportStreamTrickModeFilter.hh: include/FramedFilter.hh include/MPEG2TransportStreamIndexFile.hh +RTCP.$(CPP): include/RTCP.hh rtcp_from_spec.h +include/RTCP.hh: include/RTPSink.hh include/RTPSource.hh +rtcp_from_spec.$(C): rtcp_from_spec.h +RTSPServer.$(CPP): include/RTSPServer.hh include/RTSPCommon.hh include/RTSPRegisterSender.hh include/ProxyServerMediaSession.hh include/Base64.hh +include/RTSPServer.hh: include/ServerMediaSession.hh include/DigestAuthentication.hh include/RTSPCommon.hh +include/ServerMediaSession.hh: include/Media.hh include/FramedSource.hh include/RTPInterface.hh +RTSPClient.$(CPP): include/RTSPClient.hh include/RTSPCommon.hh include/Base64.hh include/Locale.hh include/ourMD5.hh +include/RTSPClient.hh: include/MediaSession.hh include/DigestAuthentication.hh +RTSPCommon.$(CPP): include/RTSPCommon.hh include/Locale.hh +RTSPServerSupportingHTTPStreaming.$(CPP): include/RTSPServerSupportingHTTPStreaming.hh include/RTSPCommon.hh +include/RTSPServerSupportingHTTPStreaming.hh: include/RTSPServer.hh include/ByteStreamMemoryBufferSource.hh include/TCPStreamSink.hh +RTSPRegisterSender.$(CPP): include/RTSPRegisterSender.hh +include/RTSPRegisterSender.hh: include/RTSPClient.hh +SIPClient.$(CPP): include/SIPClient.hh +include/SIPClient.hh: include/MediaSession.hh include/DigestAuthentication.hh +MediaSession.$(CPP): include/liveMedia.hh include/Locale.hh +include/MediaSession.hh: include/RTCP.hh include/FramedFilter.hh +ServerMediaSession.$(CPP): include/ServerMediaSession.hh +PassiveServerMediaSubsession.$(CPP): include/PassiveServerMediaSubsession.hh +include/PassiveServerMediaSubsession.hh: include/ServerMediaSession.hh include/RTPSink.hh include/RTCP.hh +OnDemandServerMediaSubsession.$(CPP): include/OnDemandServerMediaSubsession.hh +include/OnDemandServerMediaSubsession.hh: include/ServerMediaSession.hh include/RTPSink.hh include/BasicUDPSink.hh include/RTCP.hh +FileServerMediaSubsession.$(CPP): include/FileServerMediaSubsession.hh +include/FileServerMediaSubsession.hh: include/OnDemandServerMediaSubsession.hh +MPEG4VideoFileServerMediaSubsession.$(CPP): include/MPEG4VideoFileServerMediaSubsession.hh include/MPEG4ESVideoRTPSink.hh include/ByteStreamFileSource.hh include/MPEG4VideoStreamFramer.hh +include/MPEG4VideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +H264VideoFileServerMediaSubsession.$(CPP): include/H264VideoFileServerMediaSubsession.hh include/H264VideoRTPSink.hh include/ByteStreamFileSource.hh include/H264VideoStreamFramer.hh +include/H264VideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +H265VideoFileServerMediaSubsession.$(CPP): include/H265VideoFileServerMediaSubsession.hh include/H265VideoRTPSink.hh include/ByteStreamFileSource.hh include/H265VideoStreamFramer.hh +include/H265VideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +H263plusVideoFileServerMediaSubsession.$(CPP): include/H263plusVideoFileServerMediaSubsession.hh include/H263plusVideoRTPSink.hh include/ByteStreamFileSource.hh include/H263plusVideoStreamFramer.hh +include/H263plusVideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +WAVAudioFileServerMediaSubsession.$(CPP): include/WAVAudioFileServerMediaSubsession.hh include/WAVAudioFileSource.hh include/uLawAudioFilter.hh include/SimpleRTPSink.hh +include/WAVAudioFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +AMRAudioFileServerMediaSubsession.$(CPP): include/AMRAudioFileServerMediaSubsession.hh include/AMRAudioRTPSink.hh include/AMRAudioFileSource.hh +include/AMRAudioFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +MP3AudioFileServerMediaSubsession.$(CPP): include/MP3AudioFileServerMediaSubsession.hh include/MPEG1or2AudioRTPSink.hh include/MP3ADURTPSink.hh include/MP3FileSource.hh include/MP3ADU.hh +include/MP3AudioFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh include/MP3ADUinterleaving.hh +MPEG1or2VideoFileServerMediaSubsession.$(CPP): include/MPEG1or2VideoFileServerMediaSubsession.hh include/MPEG1or2VideoRTPSink.hh include/ByteStreamFileSource.hh include/MPEG1or2VideoStreamFramer.hh +include/MPEG1or2VideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +MPEG1or2FileServerDemux.$(CPP): include/MPEG1or2FileServerDemux.hh include/MPEG1or2DemuxedServerMediaSubsession.hh include/ByteStreamFileSource.hh +include/MPEG1or2FileServerDemux.hh: include/ServerMediaSession.hh include/MPEG1or2DemuxedElementaryStream.hh +MPEG1or2DemuxedServerMediaSubsession.$(CPP): include/MPEG1or2DemuxedServerMediaSubsession.hh include/MPEG1or2AudioStreamFramer.hh include/MPEG1or2AudioRTPSink.hh include/MPEG1or2VideoStreamFramer.hh include/MPEG1or2VideoRTPSink.hh include/AC3AudioStreamFramer.hh include/AC3AudioRTPSink.hh include/ByteStreamFileSource.hh +include/MPEG1or2DemuxedServerMediaSubsession.hh: include/OnDemandServerMediaSubsession.hh include/MPEG1or2FileServerDemux.hh +MPEG2TransportFileServerMediaSubsession.$(CPP): include/MPEG2TransportFileServerMediaSubsession.hh include/SimpleRTPSink.hh +include/MPEG2TransportFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh include/MPEG2TransportStreamFramer.hh include/ByteStreamFileSource.hh include/MPEG2TransportStreamTrickModeFilter.hh include/MPEG2TransportStreamFromESSource.hh +ADTSAudioFileServerMediaSubsession.$(CPP): include/ADTSAudioFileServerMediaSubsession.hh include/ADTSAudioFileSource.hh include/MPEG4GenericRTPSink.hh +include/ADTSAudioFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +DVVideoFileServerMediaSubsession.$(CPP): include/DVVideoFileServerMediaSubsession.hh include/DVVideoRTPSink.hh include/ByteStreamFileSource.hh include/DVVideoStreamFramer.hh +include/DVVideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +AC3AudioFileServerMediaSubsession.$(CPP): include/AC3AudioFileServerMediaSubsession.hh include/AC3AudioRTPSink.hh include/ByteStreamFileSource.hh include/AC3AudioStreamFramer.hh +include/AC3AudioFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +MPEG2TransportUDPServerMediaSubsession.$(CPP): include/MPEG2TransportUDPServerMediaSubsession.hh include/BasicUDPSource.hh include/SimpleRTPSource.hh include/MPEG2TransportStreamFramer.hh include/SimpleRTPSink.hh +include/MPEG2TransportUDPServerMediaSubsession.hh: include/OnDemandServerMediaSubsession.hh +ProxyServerMediaSession.$(CPP): include/liveMedia.hh include/RTSPCommon.hh +include/ProxyServerMediaSession.hh: include/ServerMediaSession.hh include/MediaSession.hh include/RTSPClient.hh +QuickTimeFileSink.$(CPP): include/QuickTimeFileSink.hh include/InputFile.hh include/OutputFile.hh include/QuickTimeGenericRTPSource.hh include/H263plusVideoRTPSource.hh include/MPEG4GenericRTPSource.hh include/MPEG4LATMAudioRTPSource.hh +include/QuickTimeFileSink.hh: include/MediaSession.hh +QuickTimeGenericRTPSource.$(CPP): include/QuickTimeGenericRTPSource.hh +include/QuickTimeGenericRTPSource.hh: include/MultiFramedRTPSource.hh +AVIFileSink.$(CPP): include/AVIFileSink.hh include/InputFile.hh include/OutputFile.hh +include/AVIFileSink.hh: include/MediaSession.hh +MatroskaFile.$(CPP): MatroskaFileParser.hh MatroskaDemuxedTrack.hh include/ByteStreamFileSource.hh include/H264VideoStreamDiscreteFramer.hh include/H265VideoStreamDiscreteFramer.hh include/MPEG1or2AudioRTPSink.hh include/MPEG4GenericRTPSink.hh include/AC3AudioRTPSink.hh include/SimpleRTPSink.hh include/VorbisAudioRTPSink.hh include/H264VideoRTPSink.hh include/H265VideoRTPSink.hh include/VP8VideoRTPSink.hh include/T140TextRTPSink.hh +MatroskaFileParser.hh: StreamParser.hh include/MatroskaFile.hh EBMLNumber.hh +include/MatroskaFile.hh: include/RTPSink.hh +MatroskaDemuxedTrack.hh: include/FramedSource.hh +MatroskaFileParser.$(CPP): MatroskaFileParser.hh MatroskaDemuxedTrack.hh include/ByteStreamFileSource.hh +EBMLNumber.$(CPP): EBMLNumber.hh +MatroskaDemuxedTrack.$(CPP): MatroskaDemuxedTrack.hh include/MatroskaFile.hh +MatroskaFileServerMediaSubsession.$(CPP): MatroskaFileServerMediaSubsession.hh MatroskaDemuxedTrack.hh include/FramedFilter.hh +MatroskaFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh include/MatroskaFileServerDemux.hh +MP3AudioMatroskaFileServerMediaSubsession.$(CPP): MP3AudioMatroskaFileServerMediaSubsession.hh MatroskaDemuxedTrack.hh +MP3AudioMatroskaFileServerMediaSubsession.hh: include/MP3AudioFileServerMediaSubsession.hh include/MatroskaFileServerDemux.hh +MatroskaFileServerDemux.$(CPP): include/MatroskaFileServerDemux.hh MP3AudioMatroskaFileServerMediaSubsession.hh MatroskaFileServerMediaSubsession.hh +include/MatroskaFileServerDemux.hh: include/ServerMediaSession.hh include/MatroskaFile.hh +OggFile.$(CPP): OggFileParser.hh OggDemuxedTrack.hh include/ByteStreamFileSource.hh include/VorbisAudioRTPSink.hh include/SimpleRTPSink.hh include/TheoraVideoRTPSink.hh +OggFileParser.hh: StreamParser.hh include/OggFile.hh +include/OggFile.hh: include/RTPSink.hh +OggDemuxedTrack.hh: include/FramedSource.hh +OggFileParser.$(CPP): OggFileParser.hh OggDemuxedTrack.hh +OggDemuxedTrack.$(CPP): OggDemuxedTrack.hh include/OggFile.hh +OggFileServerMediaSubsession.$(CPP): OggFileServerMediaSubsession.hh OggDemuxedTrack.hh include/FramedFilter.hh +OggFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh include/OggFileServerDemux.hh +OggFileServerDemux.$(CPP): include/OggFileServerDemux.hh OggFileServerMediaSubsession.hh +include/OggFileServerDemux.hh: include/ServerMediaSession.hh include/OggFile.hh +DarwinInjector.$(CPP): include/DarwinInjector.hh +include/DarwinInjector.hh: include/RTSPClient.hh include/RTCP.hh +BitVector.$(CPP): include/BitVector.hh +StreamParser.$(CPP): StreamParser.hh +DigestAuthentication.$(CPP): include/DigestAuthentication.hh include/ourMD5.hh +ourMD5.$(CPP): include/ourMD5.hh +Base64.$(CPP): include/Base64.hh +Locale.$(CPP): include/Locale.hh + +include/liveMedia.hh:: include/MPEG1or2AudioRTPSink.hh include/MP3ADURTPSink.hh include/MPEG1or2VideoRTPSink.hh include/MPEG4ESVideoRTPSink.hh include/BasicUDPSink.hh include/AMRAudioFileSink.hh include/H264VideoFileSink.hh include/H265VideoFileSink.hh include/OggFileSink.hh include/GSMAudioRTPSink.hh include/H263plusVideoRTPSink.hh include/H264VideoRTPSink.hh include/H265VideoRTPSink.hh include/DVVideoRTPSource.hh include/DVVideoRTPSink.hh include/DVVideoStreamFramer.hh include/H264VideoStreamFramer.hh include/H265VideoStreamFramer.hh include/H264VideoStreamDiscreteFramer.hh include/H265VideoStreamDiscreteFramer.hh include/JPEGVideoRTPSink.hh include/SimpleRTPSink.hh include/uLawAudioFilter.hh include/MPEG2IndexFromTransportStream.hh include/MPEG2TransportStreamTrickModeFilter.hh include/ByteStreamMultiFileSource.hh include/ByteStreamMemoryBufferSource.hh include/BasicUDPSource.hh include/SimpleRTPSource.hh include/MPEG1or2AudioRTPSource.hh include/MPEG4LATMAudioRTPSource.hh include/MPEG4LATMAudioRTPSink.hh include/MPEG4ESVideoRTPSource.hh include/MPEG4GenericRTPSource.hh include/MP3ADURTPSource.hh include/QCELPAudioRTPSource.hh include/AMRAudioRTPSource.hh include/JPEGVideoRTPSource.hh include/JPEGVideoSource.hh include/MPEG1or2VideoRTPSource.hh include/VorbisAudioRTPSource.hh include/TheoraVideoRTPSource.hh include/VP8VideoRTPSource.hh + +include/liveMedia.hh:: include/MPEG2TransportStreamFromPESSource.hh include/MPEG2TransportStreamFromESSource.hh include/MPEG2TransportStreamFramer.hh include/ADTSAudioFileSource.hh include/H261VideoRTPSource.hh include/H263plusVideoRTPSource.hh include/H264VideoRTPSource.hh include/H265VideoRTPSource.hh include/MP3FileSource.hh include/MP3ADU.hh include/MP3ADUinterleaving.hh include/MP3Transcoder.hh include/MPEG1or2DemuxedElementaryStream.hh include/MPEG1or2AudioStreamFramer.hh include/MPEG1or2VideoStreamDiscreteFramer.hh include/MPEG4VideoStreamDiscreteFramer.hh include/H263plusVideoStreamFramer.hh include/AC3AudioStreamFramer.hh include/AC3AudioRTPSource.hh include/AC3AudioRTPSink.hh include/VorbisAudioRTPSink.hh include/TheoraVideoRTPSink.hh include/VP8VideoRTPSink.hh include/MPEG4GenericRTPSink.hh include/DeviceSource.hh include/AudioInputDevice.hh include/WAVAudioFileSource.hh include/StreamReplicator.hh include/RTSPRegisterSender.hh + +include/liveMedia.hh:: include/RTSPServerSupportingHTTPStreaming.hh include/RTSPClient.hh include/SIPClient.hh include/QuickTimeFileSink.hh include/QuickTimeGenericRTPSource.hh include/AVIFileSink.hh include/PassiveServerMediaSubsession.hh include/MPEG4VideoFileServerMediaSubsession.hh include/H264VideoFileServerMediaSubsession.hh include/H265VideoFileServerMediaSubsession.hh include/WAVAudioFileServerMediaSubsession.hh include/AMRAudioFileServerMediaSubsession.hh include/AMRAudioFileSource.hh include/AMRAudioRTPSink.hh include/T140TextRTPSink.hh include/TCPStreamSink.hh include/MP3AudioFileServerMediaSubsession.hh include/MPEG1or2VideoFileServerMediaSubsession.hh include/MPEG1or2FileServerDemux.hh include/MPEG2TransportFileServerMediaSubsession.hh include/H263plusVideoFileServerMediaSubsession.hh include/ADTSAudioFileServerMediaSubsession.hh include/DVVideoFileServerMediaSubsession.hh include/AC3AudioFileServerMediaSubsession.hh include/MPEG2TransportUDPServerMediaSubsession.hh include/MatroskaFileServerDemux.hh include/OggFileServerDemux.hh include/ProxyServerMediaSession.hh include/DarwinInjector.hh + +clean: + -rm -rf *.$(OBJ) $(ALL) core *.core *~ include/*~ + +install: install1 $(INSTALL2) +install1: $(LIVEMEDIA_LIB) + install -d $(DESTDIR)$(PREFIX)/include/liveMedia $(DESTDIR)$(LIBDIR) + install -m 644 include/*.hh $(DESTDIR)$(PREFIX)/include/liveMedia + install -m 644 $(LIVEMEDIA_LIB) $(DESTDIR)$(LIBDIR) +install_shared_libraries: $(LIVEMEDIA_LIB) + ln -s $(NAME).$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/$(NAME).$(SHORT_LIB_SUFFIX) + ln -s $(NAME).$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/$(NAME).so + +##### Any additional, platform-specific rules come here: diff --git a/AnyCore/lib_rtsp/liveMedia/MatroskaDemuxedTrack.cpp b/AnyCore/lib_rtsp/liveMedia/MatroskaDemuxedTrack.cpp new file mode 100644 index 0000000..896e6e8 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MatroskaDemuxedTrack.cpp @@ -0,0 +1,47 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A media track, demultiplexed from a Matroska file +// Implementation + +#include "MatroskaDemuxedTrack.hh" +#include "MatroskaFile.hh" + +void MatroskaDemuxedTrack::seekToTime(double& seekNPT) { + fOurSourceDemux.seekToTime(seekNPT); +} + +MatroskaDemuxedTrack::MatroskaDemuxedTrack(UsageEnvironment& env, unsigned trackNumber, MatroskaDemux& sourceDemux) + : FramedSource(env), + fOurTrackNumber(trackNumber), fOurSourceDemux(sourceDemux), fDurationImbalance(0), + fOpusTrackNumber(0) { + fPrevPresentationTime.tv_sec = 0; fPrevPresentationTime.tv_usec = 0; +} + +MatroskaDemuxedTrack::~MatroskaDemuxedTrack() { + fOurSourceDemux.removeTrack(fOurTrackNumber); +} + +void MatroskaDemuxedTrack::doGetNextFrame() { + fOurSourceDemux.continueReading(); +} + +char const* MatroskaDemuxedTrack::MIMEtype() const { + MatroskaTrack* track = fOurSourceDemux.fOurFile.lookup(fOurTrackNumber); + if (track == NULL) return "(unknown)"; // shouldn't happen + return track->mimeType; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MatroskaDemuxedTrack.hh b/AnyCore/lib_rtsp/liveMedia/MatroskaDemuxedTrack.hh new file mode 100644 index 0000000..9440236 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MatroskaDemuxedTrack.hh @@ -0,0 +1,64 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A media track, demultiplexed from a Matroska file +// C++ header + +#ifndef _MATROSKA_DEMUXED_TRACK_HH +#define _MATROSKA_DEMUXED_TRACK_HH + +#ifndef _FRAMED_SOURCE_HH +#include "FramedSource.hh" +#endif + +class MatroskaDemux; // forward + +class MatroskaDemuxedTrack: public FramedSource { +public: + void seekToTime(double& seekNPT); + +private: // We are created only by a MatroskaDemux (a friend) + friend class MatroskaDemux; + MatroskaDemuxedTrack(UsageEnvironment& env, unsigned trackNumber, MatroskaDemux& sourceDemux); + virtual ~MatroskaDemuxedTrack(); + +private: + // redefined virtual functions: + virtual void doGetNextFrame(); + virtual char const* MIMEtype() const; + +private: // We are accessed only by MatroskaDemux and by MatroskaFileParser (a friend) + friend class MatroskaFileParser; + unsigned char* to() { return fTo; } + unsigned maxSize() { return fMaxSize; } + unsigned& frameSize() { return fFrameSize; } + unsigned& numTruncatedBytes() { return fNumTruncatedBytes; } + struct timeval& presentationTime() { return fPresentationTime; } + unsigned& durationInMicroseconds() { return fDurationInMicroseconds; } + + struct timeval& prevPresentationTime() { return fPrevPresentationTime; } + int& durationImbalance() { return fDurationImbalance; } + +private: + unsigned fOurTrackNumber; + MatroskaDemux& fOurSourceDemux; + struct timeval fPrevPresentationTime; + int fDurationImbalance; + unsigned fOpusTrackNumber; // hack for Opus audio +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/MatroskaFile.cpp b/AnyCore/lib_rtsp/liveMedia/MatroskaFile.cpp new file mode 100644 index 0000000..8e1045f --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MatroskaFile.cpp @@ -0,0 +1,879 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A class that encapsulates a Matroska file. +// Implementation + +#include "MatroskaFileParser.hh" +#include "MatroskaDemuxedTrack.hh" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +////////// CuePoint definition ////////// + +class CuePoint { +public: + CuePoint(double cueTime, u_int64_t clusterOffsetInFile, unsigned blockNumWithinCluster/* 1-based */); + virtual ~CuePoint(); + + static void addCuePoint(CuePoint*& root, double cueTime, u_int64_t clusterOffsetInFile, unsigned blockNumWithinCluster/* 1-based */, + Boolean& needToReviseBalanceOfParent); + // If "cueTime" == "root.fCueTime", replace the existing data, otherwise add to the left or right subtree. + // (Note that this is a static member function because - as a result of tree rotation - "root" might change.) + + Boolean lookup(double& cueTime, u_int64_t& resultClusterOffsetInFile, unsigned& resultBlockNumWithinCluster); + + static void fprintf(FILE* fid, CuePoint* cuePoint); // used for debugging; it's static to allow for "cuePoint == NULL" + +private: + // The "CuePoint" tree is implemented as an AVL Tree, to keep it balanced (for efficient lookup). + CuePoint* fSubTree[2]; // 0 => left; 1 => right + CuePoint* left() const { return fSubTree[0]; } + CuePoint* right() const { return fSubTree[1]; } + char fBalance; // height of right subtree - height of left subtree + + static void rotate(unsigned direction/*0 => left; 1 => right*/, CuePoint*& root); // used to keep the tree in balance + + double fCueTime; + u_int64_t fClusterOffsetInFile; + unsigned fBlockNumWithinCluster; // 0-based +}; + +UsageEnvironment& operator<<(UsageEnvironment& env, const CuePoint* cuePoint); // used for debugging + + +////////// MatroskaTrackTable definition ///////// + +// For looking up and iterating over the file's tracks: +class MatroskaTrackTable { +public: + MatroskaTrackTable(); + virtual ~MatroskaTrackTable(); + + void add(MatroskaTrack* newTrack, unsigned trackNumber); + MatroskaTrack* lookup(unsigned trackNumber); + + unsigned numTracks() const; + + class Iterator { + public: + Iterator(MatroskaTrackTable& ourTable); + virtual ~Iterator(); + MatroskaTrack* next(); + private: + HashTable::Iterator* fIter; + }; + +private: + friend class Iterator; + HashTable* fTable; +}; + + + +////////// MatroskaFile implementation ////////// + +void MatroskaFile +::createNew(UsageEnvironment& env, char const* fileName, onCreationFunc* onCreation, void* onCreationClientData, + char const* preferredLanguage) { + new MatroskaFile(env, fileName, onCreation, onCreationClientData, preferredLanguage); +} + +MatroskaFile::MatroskaFile(UsageEnvironment& env, char const* fileName, onCreationFunc* onCreation, void* onCreationClientData, + char const* preferredLanguage) + : Medium(env), + fFileName(strDup(fileName)), fOnCreation(onCreation), fOnCreationClientData(onCreationClientData), + fPreferredLanguage(strDup(preferredLanguage)), + fTimecodeScale(1000000), fSegmentDuration(0.0), fSegmentDataOffset(0), fClusterOffset(0), fCuesOffset(0), fCuePoints(NULL), + fChosenVideoTrackNumber(0), fChosenAudioTrackNumber(0), fChosenSubtitleTrackNumber(0) { + fTrackTable = new MatroskaTrackTable; + fDemuxesTable = HashTable::create(ONE_WORD_HASH_KEYS); + + FramedSource* inputSource = ByteStreamFileSource::createNew(envir(), fileName); + if (inputSource == NULL) { + // The specified input file does not exist! + fParserForInitialization = NULL; + handleEndOfTrackHeaderParsing(); // we have no file, and thus no tracks, but we still need to signal this + } else { + // Initialize ourselves by parsing the file's 'Track' headers: + fParserForInitialization = new MatroskaFileParser(*this, inputSource, handleEndOfTrackHeaderParsing, this, NULL); + } +} + +MatroskaFile::~MatroskaFile() { + delete fParserForInitialization; + delete fCuePoints; + + // Delete any outstanding "MatroskaDemux"s, and the table for them: + MatroskaDemux* demux; + while ((demux = (MatroskaDemux*)fDemuxesTable->RemoveNext()) != NULL) { + delete demux; + } + delete fDemuxesTable; + delete fTrackTable; + + delete[] (char*)fPreferredLanguage; + delete[] (char*)fFileName; +} + +void MatroskaFile::handleEndOfTrackHeaderParsing(void* clientData) { + ((MatroskaFile*)clientData)->handleEndOfTrackHeaderParsing(); +} + +class TrackChoiceRecord { +public: + unsigned trackNumber; + u_int8_t trackType; + unsigned choiceFlags; +}; + +void MatroskaFile::handleEndOfTrackHeaderParsing() { + // Having parsed all of our track headers, iterate through the tracks to figure out which ones should be played. + // The Matroska 'specification' is rather imprecise about this (as usual). However, we use the following algorithm: + // - Use one (but no more) enabled track of each type (video, audio, subtitle). (Ignore all tracks that are not 'enabled'.) + // - For each track type, choose the one that's 'forced'. + // - If more than one is 'forced', choose the first one that matches our preferred language, or the first if none matches. + // - If none is 'forced', choose the one that's 'default'. + // - If more than one is 'default', choose the first one that matches our preferred language, or the first if none matches. + // - If none is 'default', choose the first one that matches our preferred language, or the first if none matches. + unsigned numTracks = fTrackTable->numTracks(); + if (numTracks > 0) { + TrackChoiceRecord* trackChoice = new TrackChoiceRecord[numTracks]; + unsigned numEnabledTracks = 0; + MatroskaTrackTable::Iterator iter(*fTrackTable); + MatroskaTrack* track; + while ((track = iter.next()) != NULL) { + if (!track->isEnabled || track->trackType == 0 || track->mimeType[0] == '\0') continue; // track not enabled, or not fully-defined + + trackChoice[numEnabledTracks].trackNumber = track->trackNumber; + trackChoice[numEnabledTracks].trackType = track->trackType; + + // Assign flags for this track so that, when sorted, the largest value becomes our choice: + unsigned choiceFlags = 0; + if (fPreferredLanguage != NULL && track->language != NULL && strcmp(fPreferredLanguage, track->language) == 0) { + // This track matches our preferred language: + choiceFlags |= 1; + } + if (track->isForced) { + choiceFlags |= 4; + } else if (track->isDefault) { + choiceFlags |= 2; + } + trackChoice[numEnabledTracks].choiceFlags = choiceFlags; + + ++numEnabledTracks; + } + + // Choose the desired track for each track type: + for (u_int8_t trackType = 0x01; trackType != MATROSKA_TRACK_TYPE_OTHER; trackType <<= 1) { + int bestNum = -1; + int bestChoiceFlags = -1; + for (unsigned i = 0; i < numEnabledTracks; ++i) { + if (trackChoice[i].trackType == trackType && (int)trackChoice[i].choiceFlags > bestChoiceFlags) { + bestNum = i; + bestChoiceFlags = (int)trackChoice[i].choiceFlags; + } + } + if (bestChoiceFlags >= 0) { // There is a track for this track type + if (trackType == MATROSKA_TRACK_TYPE_VIDEO) fChosenVideoTrackNumber = trackChoice[bestNum].trackNumber; + else if (trackType == MATROSKA_TRACK_TYPE_AUDIO) fChosenAudioTrackNumber = trackChoice[bestNum].trackNumber; + else fChosenSubtitleTrackNumber = trackChoice[bestNum].trackNumber; + } + } + + delete[] trackChoice; + } + +#ifdef DEBUG + if (fChosenVideoTrackNumber > 0) fprintf(stderr, "Chosen video track: #%d\n", fChosenVideoTrackNumber); else fprintf(stderr, "No chosen video track\n"); + if (fChosenAudioTrackNumber > 0) fprintf(stderr, "Chosen audio track: #%d\n", fChosenAudioTrackNumber); else fprintf(stderr, "No chosen audio track\n"); + if (fChosenSubtitleTrackNumber > 0) fprintf(stderr, "Chosen subtitle track: #%d\n", fChosenSubtitleTrackNumber); else fprintf(stderr, "No chosen subtitle track\n"); +#endif + + // Delete our parser, because it's done its job now: + delete fParserForInitialization; fParserForInitialization = NULL; + + // Finally, signal our caller that we've been created and initialized: + if (fOnCreation != NULL) (*fOnCreation)(this, fOnCreationClientData); +} + +MatroskaTrack* MatroskaFile::lookup(unsigned trackNumber) const { + return fTrackTable->lookup(trackNumber); +} + +MatroskaDemux* MatroskaFile::newDemux() { + MatroskaDemux* demux = new MatroskaDemux(*this); + fDemuxesTable->Add((char const*)demux, demux); + + return demux; +} + +void MatroskaFile::removeDemux(MatroskaDemux* demux) { + fDemuxesTable->Remove((char const*)demux); +} + +float MatroskaFile::fileDuration() { + if (fCuePoints == NULL) return 0.0; // Hack, because the RTSP server code assumes that duration > 0 => seekable. (fix this) ##### + + return segmentDuration()*(timecodeScale()/1000000000.0f); +} + +FramedSource* MatroskaFile +::createSourceForStreaming(FramedSource* baseSource, unsigned trackNumber, + unsigned& estBitrate, unsigned& numFiltersInFrontOfTrack) { + if (baseSource == NULL) return NULL; + + FramedSource* result = baseSource; // by default + estBitrate = 100; // by default + numFiltersInFrontOfTrack = 0; // by default + + // Look at the track's MIME type to set its estimated bitrate (for use by RTCP). + // (Later, try to be smarter about figuring out the bitrate.) ##### + // Some MIME types also require adding a special 'framer' in front of the source. + MatroskaTrack* track = lookup(trackNumber); + if (track != NULL) { // should always be true + if (strcmp(track->mimeType, "audio/MPEG") == 0) { + estBitrate = 128; + } else if (strcmp(track->mimeType, "audio/AAC") == 0) { + estBitrate = 96; + } else if (strcmp(track->mimeType, "audio/AC3") == 0) { + estBitrate = 48; + } else if (strcmp(track->mimeType, "audio/VORBIS") == 0) { + estBitrate = 96; + } else if (strcmp(track->mimeType, "video/H264") == 0) { + estBitrate = 500; + // Allow for the possibility of very large NAL units being fed to the sink object: + OutPacketBuffer::increaseMaxSizeTo(300000); // bytes + + // Add a framer in front of the source: + result = H264VideoStreamDiscreteFramer::createNew(envir(), result); + ++numFiltersInFrontOfTrack; + } else if (strcmp(track->mimeType, "video/H265") == 0) { + estBitrate = 500; + // Allow for the possibility of very large NAL units being fed to the sink object: + OutPacketBuffer::increaseMaxSizeTo(300000); // bytes + + // Add a framer in front of the source: + result = H265VideoStreamDiscreteFramer::createNew(envir(), result); + ++numFiltersInFrontOfTrack; + } else if (strcmp(track->mimeType, "video/VP8") == 0) { + estBitrate = 500; + } else if (strcmp(track->mimeType, "video/THEORA") == 0) { + estBitrate = 500; + } else if (strcmp(track->mimeType, "text/T140") == 0) { + estBitrate = 48; + } + } + + return result; +} + +#define getPrivByte(b) if (n == 0) break; else do {b = *p++; --n;} while (0) /* Vorbis/Theora configuration header parsing */ +#define CHECK_PTR if (ptr >= limit) break /* H.264/H.265 parsing */ +#define NUM_BYTES_REMAINING (unsigned)(limit - ptr) /* H.264/H.265 parsing */ + +RTPSink* MatroskaFile +::createRTPSinkForTrackNumber(unsigned trackNumber, Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic) { + RTPSink* result = NULL; // default value, if an error occurs + + do { + MatroskaTrack* track = lookup(trackNumber); + if (track == NULL) break; + + if (strcmp(track->mimeType, "audio/MPEG") == 0) { + result = MPEG1or2AudioRTPSink::createNew(envir(), rtpGroupsock); + } else if (strcmp(track->mimeType, "audio/AAC") == 0) { + // The Matroska file's 'Codec Private' data is assumed to be the AAC configuration + // information. Use this to generate a hexadecimal 'config' string for the new RTP sink: + char* configStr = new char[2*track->codecPrivateSize + 1]; if (configStr == NULL) break; + // 2 hex digits per byte, plus the trailing '\0' + for (unsigned i = 0; i < track->codecPrivateSize; ++i) { + sprintf(&configStr[2*i], "%02X", track->codecPrivate[i]); + } + + result = MPEG4GenericRTPSink::createNew(envir(), rtpGroupsock, + rtpPayloadTypeIfDynamic, + track->samplingFrequency, + "audio", "AAC-hbr", configStr, + track->numChannels); + delete[] configStr; + } else if (strcmp(track->mimeType, "audio/AC3") == 0) { + result = AC3AudioRTPSink + ::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, track->samplingFrequency); + } else if (strcmp(track->mimeType, "audio/OPUS") == 0) { + result = SimpleRTPSink + ::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + 48000, "audio", "OPUS", 2, False/*only 1 Opus 'packet' in each RTP packet*/); + } else if (strcmp(track->mimeType, "audio/VORBIS") == 0 || strcmp(track->mimeType, "video/THEORA") == 0) { + // The Matroska file's 'Codec Private' data is assumed to be the codec configuration + // information, containing the "Identification", "Comment", and "Setup" headers. + // Extract these headers now: + u_int8_t* identificationHeader = NULL; unsigned identificationHeaderSize = 0; + u_int8_t* commentHeader = NULL; unsigned commentHeaderSize = 0; + u_int8_t* setupHeader = NULL; unsigned setupHeaderSize = 0; + Boolean isTheora = strcmp(track->mimeType, "video/THEORA") == 0; // otherwise, Vorbis + + do { + u_int8_t* p = track->codecPrivate; + unsigned n = track->codecPrivateSize; + if (n == 0 || p == NULL) break; // we have no 'Codec Private' data + + u_int8_t numHeaders; + getPrivByte(numHeaders); + unsigned headerSize[3]; // we don't handle any more than 2+1 headers + + // Extract the sizes of each of these headers: + unsigned sizesSum = 0; + Boolean success = True; + unsigned i; + for (i = 0; i < numHeaders && i < 3; ++i) { + unsigned len = 0; + u_int8_t c; + + do { + success = False; + getPrivByte(c); + success = True; + + len += c; + } while (c == 255); + if (!success || len == 0) break; + + headerSize[i] = len; + sizesSum += len; + } + if (!success) break; + + // Compute the implicit size of the final header: + if (numHeaders < 3) { + int finalHeaderSize = n - sizesSum; + if (finalHeaderSize <= 0) break; // error in data; give up + + headerSize[numHeaders] = (unsigned)finalHeaderSize; + ++numHeaders; // include the final header now + } else { + numHeaders = 3; // The maximum number of headers that we handle + } + + // Then, extract and classify each header: + for (i = 0; i < numHeaders; ++i) { + success = False; + unsigned newHeaderSize = headerSize[i]; + u_int8_t* newHeader = new u_int8_t[newHeaderSize]; + if (newHeader == NULL) break; + + u_int8_t* hdr = newHeader; + while (newHeaderSize-- > 0) { + success = False; + getPrivByte(*hdr++); + success = True; + } + if (!success) { + delete[] newHeader; + break; + } + + u_int8_t headerType = newHeader[0]; + if (headerType == 1 || (isTheora && headerType == 0x80)) { // "identification" header + delete[] identificationHeader; identificationHeader = newHeader; + identificationHeaderSize = headerSize[i]; + } else if (headerType == 3 || (isTheora && headerType == 0x81)) { // "comment" header + delete[] commentHeader; commentHeader = newHeader; + commentHeaderSize = headerSize[i]; + } else if (headerType == 5 || (isTheora && headerType == 0x82)) { // "setup" header + delete[] setupHeader; setupHeader = newHeader; + setupHeaderSize = headerSize[i]; + } else { + delete[] newHeader; // because it was a header type that we don't understand + } + } + if (!success) break; + + if (isTheora) { + result = TheoraVideoRTPSink + ::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + identificationHeader, identificationHeaderSize, + commentHeader, commentHeaderSize, + setupHeader, setupHeaderSize); + } else { // Vorbis + result = VorbisAudioRTPSink + ::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + track->samplingFrequency, track->numChannels, + identificationHeader, identificationHeaderSize, + commentHeader, commentHeaderSize, + setupHeader, setupHeaderSize); + } + } while (0); + + delete[] identificationHeader; delete[] commentHeader; delete[] setupHeader; + } else if (strcmp(track->mimeType, "video/H264") == 0) { + // Use our track's 'Codec Private' data: Bytes 5 and beyond contain SPS and PPSs: + u_int8_t* SPS = NULL; unsigned SPSSize = NULL; + u_int8_t* PPS = NULL; unsigned PPSSize = NULL; + u_int8_t* SPSandPPSBytes = NULL; unsigned numSPSandPPSBytes = 0; + + do { + if (track->codecPrivateSize < 6) break; + + numSPSandPPSBytes = track->codecPrivateSize - 5; + SPSandPPSBytes = &track->codecPrivate[5]; + + // Extract, from "SPSandPPSBytes", one SPS NAL unit, and one PPS NAL unit. + // (I hope one is all we need of each.) + unsigned i; + u_int8_t* ptr = SPSandPPSBytes; + u_int8_t* limit = &SPSandPPSBytes[numSPSandPPSBytes]; + + unsigned numSPSs = (*ptr++)&0x1F; CHECK_PTR; + for (i = 0; i < numSPSs; ++i) { + unsigned spsSize = (*ptr++)<<8; CHECK_PTR; + spsSize |= *ptr++; CHECK_PTR; + + if (spsSize > NUM_BYTES_REMAINING) break; + u_int8_t nal_unit_type = ptr[0]&0x1F; + if (SPS == NULL && nal_unit_type == 7/*sanity check*/) { // save the first one + SPSSize = spsSize; + SPS = new u_int8_t[spsSize]; + memmove(SPS, ptr, spsSize); + } + ptr += spsSize; + } + + unsigned numPPSs = (*ptr++)&0x1F; CHECK_PTR; + for (i = 0; i < numPPSs; ++i) { + unsigned ppsSize = (*ptr++)<<8; CHECK_PTR; + ppsSize |= *ptr++; CHECK_PTR; + + if (ppsSize > NUM_BYTES_REMAINING) break; + u_int8_t nal_unit_type = ptr[0]&0x1F; + if (PPS == NULL && nal_unit_type == 8/*sanity check*/) { // save the first one + PPSSize = ppsSize; + PPS = new u_int8_t[ppsSize]; + memmove(PPS, ptr, ppsSize); + } + ptr += ppsSize; + } + } while (0); + + result = H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + SPS, SPSSize, PPS, PPSSize); + + delete[] SPS; delete[] PPS; + } else if (strcmp(track->mimeType, "video/H265") == 0) { + u_int8_t* VPS = NULL; unsigned VPSSize = NULL; + u_int8_t* SPS = NULL; unsigned SPSSize = NULL; + u_int8_t* PPS = NULL; unsigned PPSSize = NULL; + u_int8_t* VPS_SPS_PPSBytes = NULL; unsigned numVPS_SPS_PPSBytes = 0; + unsigned i; + + do { + if (track->codecPrivateUsesH264FormatForH265) { + // The data uses the H.264-style format (but including VPS NAL unit(s)). + // The VPS,SPS,PPS NAL unit information starts at byte #5: + if (track->codecPrivateSize >= 6) { + numVPS_SPS_PPSBytes = track->codecPrivateSize - 5; + VPS_SPS_PPSBytes = &track->codecPrivate[5]; + } + } else { + // The data uses the proper H.265-style format. + // The VPS,SPS,PPS NAL unit information starts at byte #22: + if (track->codecPrivateSize >= 23) { + numVPS_SPS_PPSBytes = track->codecPrivateSize - 22; + VPS_SPS_PPSBytes = &track->codecPrivate[22]; + } + } + + // Extract, from "VPS_SPS_PPSBytes", one VPS NAL unit, one SPS NAL unit, and one PPS NAL unit. + // (I hope one is all we need of each.) + if (numVPS_SPS_PPSBytes == 0 || VPS_SPS_PPSBytes == NULL) break; // sanity check + u_int8_t* ptr = VPS_SPS_PPSBytes; + u_int8_t* limit = &VPS_SPS_PPSBytes[numVPS_SPS_PPSBytes]; + + if (track->codecPrivateUsesH264FormatForH265) { + // The data uses the H.264-style format (but including VPS NAL unit(s)). + while (NUM_BYTES_REMAINING > 0) { + unsigned numNALUnits = (*ptr++)&0x1F; CHECK_PTR; + for (i = 0; i < numNALUnits; ++i) { + unsigned nalUnitLength = (*ptr++)<<8; CHECK_PTR; + nalUnitLength |= *ptr++; CHECK_PTR; + + if (nalUnitLength > NUM_BYTES_REMAINING) break; + u_int8_t nal_unit_type = (ptr[0]&0x7E)>>1; + if (nal_unit_type == 32) { // VPS + VPSSize = nalUnitLength; + delete[] VPS; VPS = new u_int8_t[nalUnitLength]; + memmove(VPS, ptr, nalUnitLength); + } else if (nal_unit_type == 33) { // SPS + SPSSize = nalUnitLength; + delete[] SPS; SPS = new u_int8_t[nalUnitLength]; + memmove(SPS, ptr, nalUnitLength); + } else if (nal_unit_type == 34) { // PPS + PPSSize = nalUnitLength; + delete[] PPS; PPS = new u_int8_t[nalUnitLength]; + memmove(PPS, ptr, nalUnitLength); + } + ptr += nalUnitLength; + } + } + } else { + // The data uses the proper H.265-style format. + unsigned numOfArrays = *ptr++; CHECK_PTR; + for (unsigned j = 0; j < numOfArrays; ++j) { + ++ptr; CHECK_PTR; // skip the 'array_completeness'|'reserved'|'NAL_unit_type' byte + + unsigned numNalus = (*ptr++)<<8; CHECK_PTR; + numNalus |= *ptr++; CHECK_PTR; + + for (i = 0; i < numNalus; ++i) { + unsigned nalUnitLength = (*ptr++)<<8; CHECK_PTR; + nalUnitLength |= *ptr++; CHECK_PTR; + + if (nalUnitLength > NUM_BYTES_REMAINING) break; + u_int8_t nal_unit_type = (ptr[0]&0x7E)>>1; + if (nal_unit_type == 32) { // VPS + VPSSize = nalUnitLength; + delete[] VPS; VPS = new u_int8_t[nalUnitLength]; + memmove(VPS, ptr, nalUnitLength); + } else if (nal_unit_type == 33) { // SPS + SPSSize = nalUnitLength; + delete[] SPS; SPS = new u_int8_t[nalUnitLength]; + memmove(SPS, ptr, nalUnitLength); + } else if (nal_unit_type == 34) { // PPS + PPSSize = nalUnitLength; + delete[] PPS; PPS = new u_int8_t[nalUnitLength]; + memmove(PPS, ptr, nalUnitLength); + } + ptr += nalUnitLength; + } + } + } + } while (0); + + result = H265VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + VPS, VPSSize, SPS, SPSSize, PPS, PPSSize); + delete[] VPS; delete[] SPS; delete[] PPS; + } else if (strcmp(track->mimeType, "video/VP8") == 0) { + result = VP8VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); + } else if (strcmp(track->mimeType, "text/T140") == 0) { + result = T140TextRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); + } + } while (0); + + return result; +} + +void MatroskaFile::addTrack(MatroskaTrack* newTrack, unsigned trackNumber) { + fTrackTable->add(newTrack, trackNumber); +} + +void MatroskaFile::addCuePoint(double cueTime, u_int64_t clusterOffsetInFile, unsigned blockNumWithinCluster) { + Boolean dummy = False; // not used + CuePoint::addCuePoint(fCuePoints, cueTime, clusterOffsetInFile, blockNumWithinCluster, dummy); +} + +Boolean MatroskaFile::lookupCuePoint(double& cueTime, u_int64_t& resultClusterOffsetInFile, unsigned& resultBlockNumWithinCluster) { + if (fCuePoints == NULL) return False; + + (void)fCuePoints->lookup(cueTime, resultClusterOffsetInFile, resultBlockNumWithinCluster); + return True; +} + +void MatroskaFile::printCuePoints(FILE* fid) { + CuePoint::fprintf(fid, fCuePoints); +} + + +////////// MatroskaTrackTable implementation ////////// + +MatroskaTrackTable::MatroskaTrackTable() + : fTable(HashTable::create(ONE_WORD_HASH_KEYS)) { +} + +MatroskaTrackTable::~MatroskaTrackTable() { + // Remove and delete all of our "MatroskaTrack" descriptors, and the hash table itself: + MatroskaTrack* track; + while ((track = (MatroskaTrack*)fTable->RemoveNext()) != NULL) { + delete track; + } + delete fTable; +} + +void MatroskaTrackTable::add(MatroskaTrack* newTrack, unsigned trackNumber) { + if (newTrack != NULL && newTrack->trackNumber != 0) fTable->Remove((char const*)newTrack->trackNumber); + MatroskaTrack* existingTrack = (MatroskaTrack*)fTable->Add((char const*)trackNumber, newTrack); + delete existingTrack; // in case it wasn't NULL +} + +MatroskaTrack* MatroskaTrackTable::lookup(unsigned trackNumber) { + return (MatroskaTrack*)fTable->Lookup((char const*)trackNumber); +} + +unsigned MatroskaTrackTable::numTracks() const { return fTable->numEntries(); } + +MatroskaTrackTable::Iterator::Iterator(MatroskaTrackTable& ourTable) { + fIter = HashTable::Iterator::create(*(ourTable.fTable)); +} + +MatroskaTrackTable::Iterator::~Iterator() { + delete fIter; +} + +MatroskaTrack* MatroskaTrackTable::Iterator::next() { + char const* key; + return (MatroskaTrack*)fIter->next(key); +} + + +////////// MatroskaTrack implementation ////////// + +MatroskaTrack::MatroskaTrack() + : trackNumber(0/*not set*/), trackType(0/*unknown*/), + isEnabled(True), isDefault(True), isForced(False), + defaultDuration(0), + name(NULL), language(NULL), codecID(NULL), + samplingFrequency(0), numChannels(2), mimeType(""), + codecPrivateSize(0), codecPrivate(NULL), + codecPrivateUsesH264FormatForH265(False), codecIsOpus(False), + headerStrippedBytesSize(0), headerStrippedBytes(NULL), + subframeSizeSize(0) { +} + +MatroskaTrack::~MatroskaTrack() { + delete[] name; delete[] language; delete[] codecID; + delete[] codecPrivate; + delete[] headerStrippedBytes; +} + + +////////// MatroskaDemux implementation ////////// + +MatroskaDemux::MatroskaDemux(MatroskaFile& ourFile) + : Medium(ourFile.envir()), + fOurFile(ourFile), fDemuxedTracksTable(HashTable::create(ONE_WORD_HASH_KEYS)), + fNextTrackTypeToCheck(0x1) { + fOurParser = new MatroskaFileParser(ourFile, ByteStreamFileSource::createNew(envir(), ourFile.fileName()), + handleEndOfFile, this, this); +} + +MatroskaDemux::~MatroskaDemux() { + // Begin by acting as if we've reached the end of the source file. This should cause all of our demuxed tracks to get closed. + handleEndOfFile(); + + // Then delete our table of "MatroskaDemuxedTrack"s + // - but not the "MatroskaDemuxedTrack"s themselves; that should have already happened: + delete fDemuxedTracksTable; + + delete fOurParser; + fOurFile.removeDemux(this); +} + +FramedSource* MatroskaDemux::newDemuxedTrack() { + unsigned dummyResultTrackNumber; + return newDemuxedTrack(dummyResultTrackNumber); +} + +FramedSource* MatroskaDemux::newDemuxedTrack(unsigned& resultTrackNumber) { + FramedSource* result; + resultTrackNumber = 0; + + for (result = NULL; result == NULL && fNextTrackTypeToCheck != MATROSKA_TRACK_TYPE_OTHER; + fNextTrackTypeToCheck <<= 1) { + if (fNextTrackTypeToCheck == MATROSKA_TRACK_TYPE_VIDEO) resultTrackNumber = fOurFile.chosenVideoTrackNumber(); + else if (fNextTrackTypeToCheck == MATROSKA_TRACK_TYPE_AUDIO) resultTrackNumber = fOurFile.chosenAudioTrackNumber(); + else if (fNextTrackTypeToCheck == MATROSKA_TRACK_TYPE_SUBTITLE) resultTrackNumber = fOurFile.chosenSubtitleTrackNumber(); + + result = newDemuxedTrackByTrackNumber(resultTrackNumber); + } + + return result; +} + +FramedSource* MatroskaDemux::newDemuxedTrackByTrackNumber(unsigned trackNumber) { + if (trackNumber == 0) return NULL; + + FramedSource* trackSource = new MatroskaDemuxedTrack(envir(), trackNumber, *this); + fDemuxedTracksTable->Add((char const*)trackNumber, trackSource); + return trackSource; +} + +MatroskaDemuxedTrack* MatroskaDemux::lookupDemuxedTrack(unsigned trackNumber) { + return (MatroskaDemuxedTrack*)fDemuxedTracksTable->Lookup((char const*)trackNumber); +} + +void MatroskaDemux::removeTrack(unsigned trackNumber) { + fDemuxedTracksTable->Remove((char const*)trackNumber); + if (fDemuxedTracksTable->numEntries() == 0) { + // We no longer have any demuxed tracks, so delete ourselves now: + Medium::close(this); + } +} + +void MatroskaDemux::continueReading() { + fOurParser->continueParsing(); +} + +void MatroskaDemux::seekToTime(double& seekNPT) { + if (fOurParser != NULL) fOurParser->seekToTime(seekNPT); +} + +void MatroskaDemux::handleEndOfFile(void* clientData) { + ((MatroskaDemux*)clientData)->handleEndOfFile(); +} + +void MatroskaDemux::handleEndOfFile() { + // Iterate through all of our 'demuxed tracks', handling 'end of input' on each one. + // Hack: Because this can cause the hash table to get modified underneath us, we don't call the handlers until after we've + // first iterated through all of the tracks. + unsigned numTracks = fDemuxedTracksTable->numEntries(); + if (numTracks == 0) return; + MatroskaDemuxedTrack** tracks = new MatroskaDemuxedTrack*[numTracks]; + + HashTable::Iterator* iter = HashTable::Iterator::create(*fDemuxedTracksTable); + unsigned i; + char const* trackNumber; + + for (i = 0; i < numTracks; ++i) { + tracks[i] = (MatroskaDemuxedTrack*)iter->next(trackNumber); + } + delete iter; + + for (i = 0; i < numTracks; ++i) { + if (tracks[i] == NULL) continue; // sanity check; shouldn't happen + tracks[i]->handleClosure(); + } + + delete[] tracks; +} + + +////////// CuePoint implementation ////////// + +CuePoint::CuePoint(double cueTime, u_int64_t clusterOffsetInFile, unsigned blockNumWithinCluster) + : fBalance(0), + fCueTime(cueTime), fClusterOffsetInFile(clusterOffsetInFile), fBlockNumWithinCluster(blockNumWithinCluster - 1) { + fSubTree[0] = fSubTree[1] = NULL; +} + +CuePoint::~CuePoint() { + delete fSubTree[0]; delete fSubTree[1]; +} + +void CuePoint::addCuePoint(CuePoint*& root, double cueTime, u_int64_t clusterOffsetInFile, unsigned blockNumWithinCluster, + Boolean& needToReviseBalanceOfParent) { + needToReviseBalanceOfParent = False; // by default; may get changed below + + if (root == NULL) { + root = new CuePoint(cueTime, clusterOffsetInFile, blockNumWithinCluster); + needToReviseBalanceOfParent = True; + } else if (cueTime == root->fCueTime) { + // Replace existing data: + root->fClusterOffsetInFile = clusterOffsetInFile; + root->fBlockNumWithinCluster = blockNumWithinCluster - 1; + } else { + // Add to our left or right subtree: + int direction = cueTime > root->fCueTime; // 0 (left) or 1 (right) + Boolean needToReviseOurBalance = False; + addCuePoint(root->fSubTree[direction], cueTime, clusterOffsetInFile, blockNumWithinCluster, needToReviseOurBalance); + + if (needToReviseOurBalance) { + // We need to change our 'balance' number, perhaps while also performing a rotation to bring ourself back into balance: + if (root->fBalance == 0) { + // We were balanced before, but now we're unbalanced (by 1) on the "direction" side: + root->fBalance = -1 + 2*direction; // -1 for "direction" 0; 1 for "direction" 1 + needToReviseBalanceOfParent = True; + } else if (root->fBalance == 1 - 2*direction) { // 1 for "direction" 0; -1 for "direction" 1 + // We were unbalanced (by 1) on the side opposite to where we added an entry, so now we're balanced: + root->fBalance = 0; + } else { + // We were unbalanced (by 1) on the side where we added an entry, so now we're unbalanced by 2, and have to rebalance: + if (root->fSubTree[direction]->fBalance == -1 + 2*direction) { // -1 for "direction" 0; 1 for "direction" 1 + // We're 'doubly-unbalanced' on this side, so perform a single rotation in the opposite direction: + root->fBalance = root->fSubTree[direction]->fBalance = 0; + rotate(1-direction, root); + } else { + // This is the Left-Right case (for "direction" 0) or the Right-Left case (for "direction" 1); perform two rotations: + char newParentCurBalance = root->fSubTree[direction]->fSubTree[1-direction]->fBalance; + if (newParentCurBalance == 1 - 2*direction) { // 1 for "direction" 0; -1 for "direction" 1 + root->fBalance = 0; + root->fSubTree[direction]->fBalance = -1 + 2*direction; // -1 for "direction" 0; 1 for "direction" 1 + } else if (newParentCurBalance == 0) { + root->fBalance = 0; + root->fSubTree[direction]->fBalance = 0; + } else { + root->fBalance = 1 - 2*direction; // 1 for "direction" 0; -1 for "direction" 1 + root->fSubTree[direction]->fBalance = 0; + } + rotate(direction, root->fSubTree[direction]); + + root->fSubTree[direction]->fBalance = 0; // the new root will be balanced + rotate(1-direction, root); + } + } + } + } +} + +Boolean CuePoint::lookup(double& cueTime, u_int64_t& resultClusterOffsetInFile, unsigned& resultBlockNumWithinCluster) { + if (cueTime < fCueTime) { + if (left() == NULL) { + resultClusterOffsetInFile = 0; + resultBlockNumWithinCluster = 0; + return False; + } else { + return left()->lookup(cueTime, resultClusterOffsetInFile, resultBlockNumWithinCluster); + } + } else { + if (right() == NULL || !right()->lookup(cueTime, resultClusterOffsetInFile, resultBlockNumWithinCluster)) { + // Use this record: + cueTime = fCueTime; + resultClusterOffsetInFile = fClusterOffsetInFile; + resultBlockNumWithinCluster = fBlockNumWithinCluster; + } + return True; + } +} + +void CuePoint::fprintf(FILE* fid, CuePoint* cuePoint) { + if (cuePoint != NULL) { + ::fprintf(fid, "["); + fprintf(fid, cuePoint->left()); + + ::fprintf(fid, ",%.1f{%d},", cuePoint->fCueTime, cuePoint->fBalance); + + fprintf(fid, cuePoint->right()); + ::fprintf(fid, "]"); + } +} + +void CuePoint::rotate(unsigned direction/*0 => left; 1 => right*/, CuePoint*& root) { + CuePoint* pivot = root->fSubTree[1-direction]; // ASSERT: pivot != NULL + root->fSubTree[1-direction] = pivot->fSubTree[direction]; + pivot->fSubTree[direction] = root; + root = pivot; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MatroskaFileParser.cpp b/AnyCore/lib_rtsp/liveMedia/MatroskaFileParser.cpp new file mode 100644 index 0000000..a8823dd --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MatroskaFileParser.cpp @@ -0,0 +1,1433 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A parser for a Matroska file. +// Implementation + +#include "MatroskaFileParser.hh" +#include "MatroskaDemuxedTrack.hh" +#include +#include // for "gettimeofday() + +MatroskaFileParser::MatroskaFileParser(MatroskaFile& ourFile, FramedSource* inputSource, + FramedSource::onCloseFunc* onEndFunc, void* onEndClientData, + MatroskaDemux* ourDemux) + : StreamParser(inputSource, onEndFunc, onEndClientData, continueParsing, this), + fOurFile(ourFile), fInputSource(inputSource), + fOnEndFunc(onEndFunc), fOnEndClientData(onEndClientData), + fOurDemux(ourDemux), + fCurOffsetInFile(0), fSavedCurOffsetInFile(0), fLimitOffsetInFile(0), + fNumHeaderBytesToSkip(0), fClusterTimecode(0), fBlockTimecode(0), + fFrameSizesWithinBlock(NULL), + fPresentationTimeOffset(0.0) { + if (ourDemux == NULL) { + // Initialization + fCurrentParseState = PARSING_START_OF_FILE; + continueParsing(); + } else { + fCurrentParseState = LOOKING_FOR_CLUSTER; + // In this case, parsing (of track data) doesn't start until a client starts reading from a track. + } +} + +MatroskaFileParser::~MatroskaFileParser() { + delete[] fFrameSizesWithinBlock; + Medium::close(fInputSource); +} + +void MatroskaFileParser::seekToTime(double& seekNPT) { +#ifdef DEBUG + fprintf(stderr, "seekToTime(%f)\n", seekNPT); +#endif + if (seekNPT <= 0.0) { +#ifdef DEBUG + fprintf(stderr, "\t=> start of file\n"); +#endif + seekNPT = 0.0; + seekToFilePosition(0); + } else if (seekNPT >= fOurFile.fileDuration()) { +#ifdef DEBUG + fprintf(stderr, "\t=> end of file\n"); +#endif + seekNPT = fOurFile.fileDuration(); + seekToEndOfFile(); + } else { + u_int64_t clusterOffsetInFile; + unsigned blockNumWithinCluster; + if (!fOurFile.lookupCuePoint(seekNPT, clusterOffsetInFile, blockNumWithinCluster)) { +#ifdef DEBUG + fprintf(stderr, "\t=> not supported\n"); +#endif + return; // seeking not supported + } + +#ifdef DEBUG + fprintf(stderr, "\t=> seek time %f, file position %llu, block number within cluster %d\n", seekNPT, clusterOffsetInFile, blockNumWithinCluster); +#endif + seekToFilePosition(clusterOffsetInFile); + fCurrentParseState = LOOKING_FOR_BLOCK; + // LATER handle "blockNumWithinCluster"; for now, we assume that it's 0 ##### + } +} + +void MatroskaFileParser +::continueParsing(void* clientData, unsigned char* /*ptr*/, unsigned /*size*/, struct timeval /*presentationTime*/) { + ((MatroskaFileParser*)clientData)->continueParsing(); +} + +void MatroskaFileParser::continueParsing() { + if (fInputSource != NULL) { + if (fInputSource->isCurrentlyAwaitingData()) return; // Our input source is currently being read. Wait until that read completes + + if (!parse()) { + // We didn't complete the parsing, because we had to read more data from the source, or because we're waiting for + // another read from downstream. Once that happens, we'll get called again. + return; + } + } + + // We successfully parsed the file. Call our 'done' function now: + if (fOnEndFunc != NULL) (*fOnEndFunc)(fOnEndClientData); +} + +Boolean MatroskaFileParser::parse() { + Boolean areDone = False; + + try { + skipRemainingHeaderBytes(True); // if any + do { + switch (fCurrentParseState) { + case PARSING_START_OF_FILE: { + areDone = parseStartOfFile(); + break; + } + case LOOKING_FOR_TRACKS: { + lookForNextTrack(); + break; + } + case PARSING_TRACK: { + areDone = parseTrack(); + if (areDone && fOurFile.fCuesOffset > 0) { + // We've finished parsing the 'Track' information. There are also 'Cues' in the file, so parse those before finishing: + // Seek to the specified position in the file. We were already told that the 'Cues' begins there: +#ifdef DEBUG + fprintf(stderr, "Seeking to file position %llu (the previously-reported location of 'Cues')\n", fOurFile.fCuesOffset); +#endif + seekToFilePosition(fOurFile.fCuesOffset); + fCurrentParseState = PARSING_CUES; + areDone = False; + } + break; + } + case PARSING_CUES: { + areDone = parseCues(); + break; + } + case LOOKING_FOR_CLUSTER: { + if (fOurFile.fClusterOffset > 0) { + // Optimization: Seek to the specified position in the file. We were already told that the 'Cluster' begins there: +#ifdef DEBUG + fprintf(stderr, "Optimization: Seeking to file position %llu (the previously-reported location of a 'Cluster')\n", fOurFile.fClusterOffset); +#endif + seekToFilePosition(fOurFile.fClusterOffset); + } + fCurrentParseState = LOOKING_FOR_BLOCK; + break; + } + case LOOKING_FOR_BLOCK: { + lookForNextBlock(); + break; + } + case PARSING_BLOCK: { + parseBlock(); + break; + } + case DELIVERING_FRAME_WITHIN_BLOCK: { + if (!deliverFrameWithinBlock()) return False; + break; + } + case DELIVERING_FRAME_BYTES: { + deliverFrameBytes(); + return False; // Halt parsing for now. A new 'read' from downstream will cause parsing to resume. + break; + } + } + } while (!areDone); + + return True; + } catch (int /*e*/) { +#ifdef DEBUG + fprintf(stderr, "MatroskaFileParser::parse() EXCEPTION (This is normal behavior - *not* an error)\n"); +#endif + return False; // the parsing got interrupted + } +} + +Boolean MatroskaFileParser::parseStartOfFile() { +#ifdef DEBUG + fprintf(stderr, "parsing start of file\n"); +#endif + EBMLId id; + EBMLDataSize size; + + // The file must begin with the standard EBML header (which we skip): + if (!parseEBMLIdAndSize(id, size) || id != MATROSKA_ID_EBML) { + fOurFile.envir() << "ERROR: File does not begin with an EBML header\n"; + return True; // We're done with the file, because it's not valid + } +#ifdef DEBUG + fprintf(stderr, "MatroskaFileParser::parseStartOfFile(): Parsed id 0x%s (%s), size: %lld\n", id.hexString(), id.stringName(), size.val()); +#endif + + fCurrentParseState = LOOKING_FOR_TRACKS; + skipHeader(size); + + return False; // because we have more parsing to do - inside the 'Track' header +} + +void MatroskaFileParser::lookForNextTrack() { +#ifdef DEBUG + fprintf(stderr, "looking for Track\n"); +#endif + EBMLId id; + EBMLDataSize size; + + // Read and skip over (or enter) each Matroska header, until we get to a 'Track'. + while (fCurrentParseState == LOOKING_FOR_TRACKS) { + while (!parseEBMLIdAndSize(id, size)) {} +#ifdef DEBUG + fprintf(stderr, "MatroskaFileParser::lookForNextTrack(): Parsed id 0x%s (%s), size: %lld\n", id.hexString(), id.stringName(), size.val()); +#endif + switch (id.val()) { + case MATROSKA_ID_SEGMENT: { // 'Segment' header: enter this + // Remember the position, within the file, of the start of Segment data, because Seek Positions are relative to this: + fOurFile.fSegmentDataOffset = fCurOffsetInFile; + break; + } + case MATROSKA_ID_SEEK_HEAD: { // 'Seek Head' header: enter this + break; + } + case MATROSKA_ID_SEEK: { // 'Seek' header: enter this + break; + } + case MATROSKA_ID_SEEK_ID: { // 'Seek ID' header: get this value + if (parseEBMLNumber(fLastSeekId)) { +#ifdef DEBUG + fprintf(stderr, "\tSeek ID 0x%s:\t%s\n", fLastSeekId.hexString(), fLastSeekId.stringName()); +#endif + } + break; + } + case MATROSKA_ID_SEEK_POSITION: { // 'Seek Position' header: get this value + u_int64_t seekPosition; + if (parseEBMLVal_unsigned64(size, seekPosition)) { + u_int64_t offsetInFile = fOurFile.fSegmentDataOffset + seekPosition; +#ifdef DEBUG + fprintf(stderr, "\tSeek Position %llu (=> offset within the file: %llu (0x%llx))\n", seekPosition, offsetInFile, offsetInFile); +#endif + // The only 'Seek Position's that we care about are for 'Cluster' and 'Cues': + if (fLastSeekId == MATROSKA_ID_CLUSTER) { + fOurFile.fClusterOffset = offsetInFile; + } else if (fLastSeekId == MATROSKA_ID_CUES) { + fOurFile.fCuesOffset = offsetInFile; + } + } + break; + } + case MATROSKA_ID_INFO: { // 'Segment Info' header: enter this + break; + } + case MATROSKA_ID_TIMECODE_SCALE: { // 'Timecode Scale' header: get this value + unsigned timecodeScale; + if (parseEBMLVal_unsigned(size, timecodeScale) && timecodeScale > 0) { + fOurFile.fTimecodeScale = timecodeScale; +#ifdef DEBUG + fprintf(stderr, "\tTimecode Scale %u ns (=> Segment Duration == %f seconds)\n", + fOurFile.timecodeScale(), fOurFile.segmentDuration()*(fOurFile.fTimecodeScale/1000000000.0f)); +#endif + } + break; + } + case MATROSKA_ID_DURATION: { // 'Segment Duration' header: get this value + if (parseEBMLVal_float(size, fOurFile.fSegmentDuration)) { +#ifdef DEBUG + fprintf(stderr, "\tSegment Duration %f (== %f seconds)\n", + fOurFile.segmentDuration(), fOurFile.segmentDuration()*(fOurFile.fTimecodeScale/1000000000.0f)); +#endif + } + break; + } +#ifdef DEBUG + case MATROSKA_ID_TITLE: { // 'Segment Title': display this value + char* title; + if (parseEBMLVal_string(size, title)) { +#ifdef DEBUG + fprintf(stderr, "\tTitle: %s\n", title); +#endif + delete[] title; + } + break; + } +#endif + case MATROSKA_ID_TRACKS: { // enter this, and move on to parsing 'Tracks' + fLimitOffsetInFile = fCurOffsetInFile + size.val(); // Make sure we don't read past the end of this header + fCurrentParseState = PARSING_TRACK; + break; + } + default: { // skip over this header + skipHeader(size); + break; + } + } + setParseState(); + } +} + +Boolean MatroskaFileParser::parseTrack() { +#ifdef DEBUG + fprintf(stderr, "parsing Track\n"); +#endif + // Read and process each Matroska header, until we get to the end of the Track: + MatroskaTrack* track = NULL; + EBMLId id; + EBMLDataSize size; + while (fCurOffsetInFile < fLimitOffsetInFile) { + while (!parseEBMLIdAndSize(id, size)) {} +#ifdef DEBUG + if (id == MATROSKA_ID_TRACK_ENTRY) fprintf(stderr, "\n"); // makes debugging output easier to read + fprintf(stderr, "MatroskaFileParser::parseTrack(): Parsed id 0x%s (%s), size: %lld\n", id.hexString(), id.stringName(), size.val()); +#endif + switch (id.val()) { + case MATROSKA_ID_TRACK_ENTRY: { // 'Track Entry' header: enter this + // Create a new "MatroskaTrack" object for this entry: + if (track != NULL && track->trackNumber == 0) delete track; // We had a previous "MatroskaTrack" object that was never used + track = new MatroskaTrack; + break; + } + case MATROSKA_ID_TRACK_NUMBER: { + unsigned trackNumber; + if (parseEBMLVal_unsigned(size, trackNumber)) { +#ifdef DEBUG + fprintf(stderr, "\tTrack Number %d\n", trackNumber); +#endif + if (track != NULL && trackNumber != 0) { + track->trackNumber = trackNumber; + fOurFile.addTrack(track, trackNumber); + } + } + break; + } + case MATROSKA_ID_TRACK_TYPE: { + unsigned trackType; + if (parseEBMLVal_unsigned(size, trackType) && track != NULL) { + // We convert the Matroska 'track type' code into our own code (which we can use as a bitmap): + track->trackType + = trackType == 1 ? MATROSKA_TRACK_TYPE_VIDEO : trackType == 2 ? MATROSKA_TRACK_TYPE_AUDIO + : trackType == 0x11 ? MATROSKA_TRACK_TYPE_SUBTITLE : MATROSKA_TRACK_TYPE_OTHER; +#ifdef DEBUG + fprintf(stderr, "\tTrack Type 0x%02x (%s)\n", trackType, + track->trackType == MATROSKA_TRACK_TYPE_VIDEO ? "video" : + track->trackType == MATROSKA_TRACK_TYPE_AUDIO ? "audio" : + track->trackType == MATROSKA_TRACK_TYPE_SUBTITLE ? "subtitle" : + ""); +#endif + } + break; + } + case MATROSKA_ID_FLAG_ENABLED: { + unsigned flagEnabled; + if (parseEBMLVal_unsigned(size, flagEnabled)) { +#ifdef DEBUG + fprintf(stderr, "\tTrack is Enabled: %d\n", flagEnabled); +#endif + if (track != NULL) track->isEnabled = flagEnabled != 0; + } + break; + } + case MATROSKA_ID_FLAG_DEFAULT: { + unsigned flagDefault; + if (parseEBMLVal_unsigned(size, flagDefault)) { +#ifdef DEBUG + fprintf(stderr, "\tTrack is Default: %d\n", flagDefault); +#endif + if (track != NULL) track->isDefault = flagDefault != 0; + } + break; + } + case MATROSKA_ID_FLAG_FORCED: { + unsigned flagForced; + if (parseEBMLVal_unsigned(size, flagForced)) { +#ifdef DEBUG + fprintf(stderr, "\tTrack is Forced: %d\n", flagForced); +#endif + if (track != NULL) track->isForced = flagForced != 0; + } + break; + } + case MATROSKA_ID_DEFAULT_DURATION: { + unsigned defaultDuration; + if (parseEBMLVal_unsigned(size, defaultDuration)) { +#ifdef DEBUG + fprintf(stderr, "\tDefault duration %f ms\n", defaultDuration/1000000.0); +#endif + if (track != NULL) track->defaultDuration = defaultDuration; + } + break; + } + case MATROSKA_ID_MAX_BLOCK_ADDITION_ID: { + unsigned maxBlockAdditionID; + if (parseEBMLVal_unsigned(size, maxBlockAdditionID)) { +#ifdef DEBUG + fprintf(stderr, "\tMax Block Addition ID: %u\n", maxBlockAdditionID); +#endif + } + break; + } + case MATROSKA_ID_NAME: { + char* name; + if (parseEBMLVal_string(size, name)) { +#ifdef DEBUG + fprintf(stderr, "\tName: %s\n", name); +#endif + if (track != NULL) { + delete[] track->name; track->name = name; + } else { + delete[] name; + } + } + break; + } + case MATROSKA_ID_LANGUAGE: { + char* language; + if (parseEBMLVal_string(size, language)) { +#ifdef DEBUG + fprintf(stderr, "\tLanguage: %s\n", language); +#endif + if (track != NULL) { + delete[] track->language; track->language = language; + } else { + delete[] language; + } + } + break; + } + case MATROSKA_ID_CODEC: { + char* codecID; + if (parseEBMLVal_string(size, codecID)) { +#ifdef DEBUG + fprintf(stderr, "\tCodec ID: %s\n", codecID); +#endif + if (track != NULL) { + delete[] track->codecID; track->codecID = codecID; + + // Also set the track's "mimeType" field, if we can deduce it from the "codecID": + if (strncmp(codecID, "A_MPEG", 6) == 0) { + track->mimeType = "audio/MPEG"; + } else if (strncmp(codecID, "A_AAC", 5) == 0) { + track->mimeType = "audio/AAC"; + } else if (strncmp(codecID, "A_AC3", 5) == 0) { + track->mimeType = "audio/AC3"; + } else if (strncmp(codecID, "A_VORBIS", 8) == 0) { + track->mimeType = "audio/VORBIS"; + } else if (strcmp(codecID, "A_OPUS") == 0) { + track->mimeType = "audio/OPUS"; + track->codecIsOpus = True; + } else if (strcmp(codecID, "V_MPEG4/ISO/AVC") == 0) { + track->mimeType = "video/H264"; + } else if (strcmp(codecID, "V_MPEGH/ISO/HEVC") == 0) { + track->mimeType = "video/H265"; + } else if (strncmp(codecID, "V_VP8", 5) == 0) { + track->mimeType = "video/VP8"; + } else if (strncmp(codecID, "V_THEORA", 8) == 0) { + track->mimeType = "video/THEORA"; + } else if (strncmp(codecID, "S_TEXT", 6) == 0) { + track->mimeType = "text/T140"; + } + } else { + delete[] codecID; + } + } + break; + } + case MATROSKA_ID_CODEC_PRIVATE: { + u_int8_t* codecPrivate; + unsigned codecPrivateSize; + if (parseEBMLVal_binary(size, codecPrivate)) { + codecPrivateSize = (unsigned)size.val(); +#ifdef DEBUG + fprintf(stderr, "\tCodec Private: "); + for (unsigned i = 0; i < codecPrivateSize; ++i) fprintf(stderr, "%02x:", codecPrivate[i]); + fprintf(stderr, "\n"); +#endif + if (track != NULL) { + delete[] track->codecPrivate; track->codecPrivate = codecPrivate; + track->codecPrivateSize = codecPrivateSize; + + // Hack for H.264 and H.265: The 'codec private' data contains + // the size of NAL unit lengths: + if (track->codecID != NULL) { + if (strcmp(track->codecID, "V_MPEG4/ISO/AVC") == 0) { // H.264 + // Byte 4 of the 'codec private' data contains 'lengthSizeMinusOne': + if (codecPrivateSize >= 5) track->subframeSizeSize = (codecPrivate[4])&0x3 + 1; + } else if (strcmp(track->codecID, "V_MPEGH/ISO/HEVC") == 0) { // H.265 + // H.265 'codec private' data is *supposed* to use the format that's described in + // http://lists.matroska.org/pipermail/matroska-devel/2013-September/004567.html + // However, some Matroska files use the same format that was used for H.264. + // We check for this here, by checking various fields that are supposed to be + // 'all-1' in the 'correct' format: + if (codecPrivateSize < 23 || (codecPrivate[13]&0xF0) != 0xF0 || + (codecPrivate[15]&0xFC) != 0xFC || (codecPrivate[16]&0xFC) != 0xFC || + (codecPrivate[17]&0xF8) != 0xF8 || (codecPrivate[18]&0xF8) != 0xF8) { + // The 'correct' format isn't being used, so assume the H.264 format instead: + track->codecPrivateUsesH264FormatForH265 = True; + + // Byte 4 of the 'codec private' data contains 'lengthSizeMinusOne': + if (codecPrivateSize >= 5) track->subframeSizeSize = (codecPrivate[4])&0x3 + 1; + } else { + // This looks like the 'correct' format: + track->codecPrivateUsesH264FormatForH265 = False; + + // Byte 21 of the 'codec private' data contains 'lengthSizeMinusOne': + track->subframeSizeSize = (codecPrivate[21])&0x3 + 1; + } + } + } + } else { + delete[] codecPrivate; + } + } + break; + } + case MATROSKA_ID_VIDEO: { // 'Video settings' header: enter this + break; + } + case MATROSKA_ID_PIXEL_WIDTH: { + unsigned pixelWidth; + if (parseEBMLVal_unsigned(size, pixelWidth)) { +#ifdef DEBUG + fprintf(stderr, "\tPixel Width %d\n", pixelWidth); +#endif + } + break; + } + case MATROSKA_ID_PIXEL_HEIGHT: { + unsigned pixelHeight; + if (parseEBMLVal_unsigned(size, pixelHeight)) { +#ifdef DEBUG + fprintf(stderr, "\tPixel Height %d\n", pixelHeight); +#endif + } + break; + } + case MATROSKA_ID_DISPLAY_WIDTH: { + unsigned displayWidth; + if (parseEBMLVal_unsigned(size, displayWidth)) { +#ifdef DEBUG + fprintf(stderr, "\tDisplay Width %d\n", displayWidth); +#endif + } + break; + } + case MATROSKA_ID_DISPLAY_HEIGHT: { + unsigned displayHeight; + if (parseEBMLVal_unsigned(size, displayHeight)) { +#ifdef DEBUG + fprintf(stderr, "\tDisplay Height %d\n", displayHeight); +#endif + } + break; + } + case MATROSKA_ID_DISPLAY_UNIT: { + unsigned displayUnit; + if (parseEBMLVal_unsigned(size, displayUnit)) { +#ifdef DEBUG + fprintf(stderr, "\tDisplay Unit %d\n", displayUnit); +#endif + } + break; + } + case MATROSKA_ID_AUDIO: { // 'Audio settings' header: enter this + break; + } + case MATROSKA_ID_SAMPLING_FREQUENCY: { + float samplingFrequency; + if (parseEBMLVal_float(size, samplingFrequency)) { + if (track != NULL) { + track->samplingFrequency = (unsigned)samplingFrequency; +#ifdef DEBUG + fprintf(stderr, "\tSampling frequency %f (->%d)\n", samplingFrequency, track->samplingFrequency); +#endif + } + } + break; + } + case MATROSKA_ID_OUTPUT_SAMPLING_FREQUENCY: { + float outputSamplingFrequency; + if (parseEBMLVal_float(size, outputSamplingFrequency)) { +#ifdef DEBUG + fprintf(stderr, "\tOutput sampling frequency %f\n", outputSamplingFrequency); +#endif + } + break; + } + case MATROSKA_ID_CHANNELS: { + unsigned numChannels; + if (parseEBMLVal_unsigned(size, numChannels)) { +#ifdef DEBUG + fprintf(stderr, "\tChannels %d\n", numChannels); +#endif + if (track != NULL) track->numChannels = numChannels; + } + break; + } + case MATROSKA_ID_BIT_DEPTH: { + unsigned bitDepth; + if (parseEBMLVal_unsigned(size, bitDepth)) { +#ifdef DEBUG + fprintf(stderr, "\tBit Depth %d\n", bitDepth); +#endif + } + break; + } + case MATROSKA_ID_CONTENT_ENCODINGS: + case MATROSKA_ID_CONTENT_ENCODING: { // 'Content Encodings' or 'Content Encoding' header: enter this + break; + } + case MATROSKA_ID_CONTENT_COMPRESSION: { // 'Content Compression' header: enter this + // Note: We currently support only 'Header Stripping' compression, not 'zlib' compression (the default algorithm). + // Therefore, we disable this track, unless/until we later see that 'Header Stripping' is supported: + if (track != NULL) track->isEnabled = False; + break; + } + case MATROSKA_ID_CONTENT_COMP_ALGO: { + unsigned contentCompAlgo; + if (parseEBMLVal_unsigned(size, contentCompAlgo)) { +#ifdef DEBUG + fprintf(stderr, "\tContent Compression Algorithm %d (%s)\n", contentCompAlgo, + contentCompAlgo == 0 ? "zlib" : contentCompAlgo == 3 ? "Header Stripping" : ""); +#endif + // The only compression algorithm that we support is #3: Header Stripping; disable the track otherwise + if (track != NULL) track->isEnabled = contentCompAlgo == 3; + } + break; + } + case MATROSKA_ID_CONTENT_COMP_SETTINGS: { + u_int8_t* headerStrippedBytes; + unsigned headerStrippedBytesSize; + if (parseEBMLVal_binary(size, headerStrippedBytes)) { + headerStrippedBytesSize = (unsigned)size.val(); +#ifdef DEBUG + fprintf(stderr, "\tHeader Stripped Bytes: "); + for (unsigned i = 0; i < headerStrippedBytesSize; ++i) fprintf(stderr, "%02x:", headerStrippedBytes[i]); + fprintf(stderr, "\n"); +#endif + if (track != NULL) { + delete[] track->headerStrippedBytes; track->headerStrippedBytes = headerStrippedBytes; + track->headerStrippedBytesSize = headerStrippedBytesSize; + } else { + delete[] headerStrippedBytes; + } + } + break; + } + case MATROSKA_ID_CONTENT_ENCRYPTION: { // 'Content Encrpytion' header: skip this + // Note: We don't currently support encryption at all. Therefore, we disable this track: + if (track != NULL) track->isEnabled = False; + // Fall through to... + } + default: { // We don't process this header, so just skip over it: + skipHeader(size); + break; + } + } + setParseState(); + } + + fLimitOffsetInFile = 0; // reset + if (track != NULL && track->trackNumber == 0) delete track; // We had a previous "MatroskaTrack" object that was never used + return True; // we're done parsing track entries +} + +void MatroskaFileParser::lookForNextBlock() { +#ifdef DEBUG + fprintf(stderr, "looking for Block\n"); +#endif + // Read and skip over each Matroska header, until we get to a 'Cluster': + EBMLId id; + EBMLDataSize size; + while (fCurrentParseState == LOOKING_FOR_BLOCK) { + while (!parseEBMLIdAndSize(id, size)) {} +#ifdef DEBUG + fprintf(stderr, "MatroskaFileParser::lookForNextBlock(): Parsed id 0x%s (%s), size: %lld\n", id.hexString(), id.stringName(), size.val()); +#endif + switch (id.val()) { + case MATROSKA_ID_SEGMENT: { // 'Segment' header: enter this + break; + } + case MATROSKA_ID_CLUSTER: { // 'Cluster' header: enter this + break; + } + case MATROSKA_ID_TIMECODE: { // 'Timecode' header: get this value + unsigned timecode; + if (parseEBMLVal_unsigned(size, timecode)) { + fClusterTimecode = timecode; +#ifdef DEBUG + fprintf(stderr, "\tCluster timecode: %d (== %f seconds)\n", fClusterTimecode, fClusterTimecode*(fOurFile.fTimecodeScale/1000000000.0)); +#endif + } + break; + } + case MATROSKA_ID_BLOCK_GROUP: { // 'Block Group' header: enter this + break; + } + case MATROSKA_ID_SIMPLEBLOCK: + case MATROSKA_ID_BLOCK: { // 'SimpleBlock' or 'Block' header: enter this (and we're done) + fBlockSize = (unsigned)size.val(); + fCurrentParseState = PARSING_BLOCK; + break; + } + case MATROSKA_ID_BLOCK_DURATION: { // 'Block Duration' header: get this value (but we currently don't do anything with it) + unsigned blockDuration; + if (parseEBMLVal_unsigned(size, blockDuration)) { +#ifdef DEBUG + fprintf(stderr, "\tblock duration: %d (== %f ms)\n", blockDuration, (float)(blockDuration*fOurFile.fTimecodeScale/1000000.0)); +#endif + } + break; + } + // Attachments are parsed only if we're in DEBUG mode (otherwise we just skip over them): +#ifdef DEBUG + case MATROSKA_ID_ATTACHMENTS: { // 'Attachments': enter this + break; + } + case MATROSKA_ID_ATTACHED_FILE: { // 'Attached File': enter this + break; + } + case MATROSKA_ID_FILE_DESCRIPTION: { // 'File Description': get this value + char* fileDescription; + if (parseEBMLVal_string(size, fileDescription)) { +#ifdef DEBUG + fprintf(stderr, "\tFile Description: %s\n", fileDescription); +#endif + delete[] fileDescription; + } + break; + } + case MATROSKA_ID_FILE_NAME: { // 'File Name': get this value + char* fileName; + if (parseEBMLVal_string(size, fileName)) { +#ifdef DEBUG + fprintf(stderr, "\tFile Name: %s\n", fileName); +#endif + delete[] fileName; + } + break; + } + case MATROSKA_ID_FILE_MIME_TYPE: { // 'File MIME Type': get this value + char* fileMIMEType; + if (parseEBMLVal_string(size, fileMIMEType)) { +#ifdef DEBUG + fprintf(stderr, "\tFile MIME Type: %s\n", fileMIMEType); +#endif + delete[] fileMIMEType; + } + break; + } + case MATROSKA_ID_FILE_UID: { // 'File UID': get this value + unsigned fileUID; + if (parseEBMLVal_unsigned(size, fileUID)) { +#ifdef DEBUG + fprintf(stderr, "\tFile UID: 0x%x\n", fileUID); +#endif + } + break; + } +#endif + default: { // skip over this header + skipHeader(size); + break; + } + } + setParseState(); + } +} + +Boolean MatroskaFileParser::parseCues() { +#if defined(DEBUG) || defined(DEBUG_CUES) + fprintf(stderr, "parsing Cues\n"); +#endif + EBMLId id; + EBMLDataSize size; + + // Read the next header, which should be MATROSKA_ID_CUES: + if (!parseEBMLIdAndSize(id, size) || id != MATROSKA_ID_CUES) return True; // The header wasn't what we expected, so we're done + fLimitOffsetInFile = fCurOffsetInFile + size.val(); // Make sure we don't read past the end of this header + + double currentCueTime = 0.0; + u_int64_t currentClusterOffsetInFile = 0; + + while (fCurOffsetInFile < fLimitOffsetInFile) { + while (!parseEBMLIdAndSize(id, size)) {} +#ifdef DEBUG_CUES + if (id == MATROSKA_ID_CUE_POINT) fprintf(stderr, "\n"); // makes debugging output easier to read + fprintf(stderr, "MatroskaFileParser::parseCues(): Parsed id 0x%s (%s), size: %lld\n", id.hexString(), id.stringName(), size.val()); +#endif + switch (id.val()) { + case MATROSKA_ID_CUE_POINT: { // 'Cue Point' header: enter this + break; + } + case MATROSKA_ID_CUE_TIME: { // 'Cue Time' header: get this value + unsigned cueTime; + if (parseEBMLVal_unsigned(size, cueTime)) { + currentCueTime = cueTime*(fOurFile.fTimecodeScale/1000000000.0); +#ifdef DEBUG_CUES + fprintf(stderr, "\tCue Time %d (== %f seconds)\n", cueTime, currentCueTime); +#endif + } + break; + } + case MATROSKA_ID_CUE_TRACK_POSITIONS: { // 'Cue Track Positions' header: enter this + break; + } + case MATROSKA_ID_CUE_TRACK: { // 'Cue Track' header: get this value (but only for debugging; we don't do anything with it) + unsigned cueTrack; + if (parseEBMLVal_unsigned(size, cueTrack)) { +#ifdef DEBUG_CUES + fprintf(stderr, "\tCue Track %d\n", cueTrack); +#endif + } + break; + } + case MATROSKA_ID_CUE_CLUSTER_POSITION: { // 'Cue Cluster Position' header: get this value + u_int64_t cueClusterPosition; + if (parseEBMLVal_unsigned64(size, cueClusterPosition)) { + currentClusterOffsetInFile = fOurFile.fSegmentDataOffset + cueClusterPosition; +#ifdef DEBUG_CUES + fprintf(stderr, "\tCue Cluster Position %llu (=> offset within the file: %llu (0x%llx))\n", cueClusterPosition, currentClusterOffsetInFile, currentClusterOffsetInFile); +#endif + // Record this cue point: + fOurFile.addCuePoint(currentCueTime, currentClusterOffsetInFile, 1/*default block number within cluster*/); + } + break; + } + case MATROSKA_ID_CUE_BLOCK_NUMBER: { // 'Cue Block Number' header: get this value + unsigned cueBlockNumber; + if (parseEBMLVal_unsigned(size, cueBlockNumber) && cueBlockNumber != 0) { +#ifdef DEBUG_CUES + fprintf(stderr, "\tCue Block Number %d\n", cueBlockNumber); +#endif + // Record this cue point (overwriting any existing entry for this cue time): + fOurFile.addCuePoint(currentCueTime, currentClusterOffsetInFile, cueBlockNumber); + } + break; + } + default: { // We don't process this header, so just skip over it: + skipHeader(size); + break; + } + } + setParseState(); + } + + fLimitOffsetInFile = 0; // reset +#if defined(DEBUG) || defined(DEBUG_CUES) + fprintf(stderr, "done parsing Cues\n"); +#endif +#ifdef DEBUG_CUES + fprintf(stderr, "Cue Point tree: "); + fOurFile.printCuePoints(stderr); + fprintf(stderr, "\n"); +#endif + return True; // we're done parsing Cues +} + +typedef enum { NoLacing, XiphLacing, FixedSizeLacing, EBMLLacing } MatroskaLacingType; + +void MatroskaFileParser::parseBlock() { +#ifdef DEBUG + fprintf(stderr, "parsing SimpleBlock or Block\n"); +#endif + do { + unsigned blockStartPos = curOffset(); + + // The block begins with the track number: + EBMLNumber trackNumber; + if (!parseEBMLNumber(trackNumber)) break; + fBlockTrackNumber = (unsigned)trackNumber.val(); + + // If this track is not being read, then skip the rest of this block, and look for another one: + if (fOurDemux->lookupDemuxedTrack(fBlockTrackNumber) == NULL) { + unsigned headerBytesSeen = curOffset() - blockStartPos; + if (headerBytesSeen < fBlockSize) { + skipBytes(fBlockSize - headerBytesSeen); + } +#ifdef DEBUG + fprintf(stderr, "\tSkipped block for unused track number %d\n", fBlockTrackNumber); +#endif + fCurrentParseState = LOOKING_FOR_BLOCK; + setParseState(); + return; + } + + MatroskaTrack* track = fOurFile.lookup(fBlockTrackNumber); + if (track == NULL) break; // shouldn't happen + + // The next two bytes are the block's timecode (relative to the cluster timecode) + fBlockTimecode = (get1Byte()<<8)|get1Byte(); + + // The next byte indicates the type of 'lacing' used: + u_int8_t c = get1Byte(); + c &= 0x6; // we're interested in bits 5-6 only + MatroskaLacingType lacingType = (c==0x0)?NoLacing : (c==0x02)?XiphLacing : (c==0x04)?FixedSizeLacing : EBMLLacing; +#ifdef DEBUG + fprintf(stderr, "\ttrack number %d, timecode %d (=> %f seconds), %s lacing\n", fBlockTrackNumber, fBlockTimecode, (fClusterTimecode+fBlockTimecode)*(fOurFile.fTimecodeScale/1000000000.0), (lacingType==NoLacing)?"no" : (lacingType==XiphLacing)?"Xiph" : (lacingType==FixedSizeLacing)?"fixed-size" : "EBML"); +#endif + + if (lacingType == NoLacing) { + fNumFramesInBlock = 1; + } else { + // The next byte tells us how many frames are present in this block + fNumFramesInBlock = get1Byte() + 1; + } + delete[] fFrameSizesWithinBlock; fFrameSizesWithinBlock = new unsigned[fNumFramesInBlock]; + if (fFrameSizesWithinBlock == NULL) break; + + if (lacingType == NoLacing) { + unsigned headerBytesSeen = curOffset() - blockStartPos; + if (headerBytesSeen > fBlockSize) break; + + fFrameSizesWithinBlock[0] = fBlockSize - headerBytesSeen; + } else if (lacingType == FixedSizeLacing) { + unsigned headerBytesSeen = curOffset() - blockStartPos; + if (headerBytesSeen > fBlockSize) break; + + unsigned frameBytesAvailable = fBlockSize - headerBytesSeen; + unsigned constantFrameSize = frameBytesAvailable/fNumFramesInBlock; + + for (unsigned i = 0; i < fNumFramesInBlock; ++i) { + fFrameSizesWithinBlock[i] = constantFrameSize; + } + // If there are any bytes left over, assign them to the last frame: + fFrameSizesWithinBlock[fNumFramesInBlock-1] += frameBytesAvailable%fNumFramesInBlock; + } else { // EBML or Xiph lacing + unsigned curFrameSize = 0; + unsigned frameSizesTotal = 0; + unsigned i; + + for (i = 0; i < fNumFramesInBlock-1; ++i) { + if (lacingType == EBMLLacing) { + EBMLNumber frameSize; + if (!parseEBMLNumber(frameSize)) break; + unsigned fsv = (unsigned)frameSize.val(); + + if (i == 0) { + curFrameSize = fsv; + } else { + // The value we read is a signed value, that's added to the previous frame size, to get the current frame size: + unsigned toSubtract = (fsv>0xFFFFFF)?0x07FFFFFF : (fsv>0xFFFF)?0x0FFFFF : (fsv>0xFF)?0x1FFF : 0x3F; + int fsv_signed = fsv - toSubtract; + curFrameSize += fsv_signed; + if ((int)curFrameSize < 0) break; + } + } else { // Xiph lacing + curFrameSize = 0; + u_int8_t c; + do { + c = get1Byte(); + curFrameSize += c; + } while (c == 0xFF); + } + fFrameSizesWithinBlock[i] = curFrameSize; + frameSizesTotal += curFrameSize; + } + if (i != fNumFramesInBlock-1) break; // an error occurred within the "for" loop + + // Compute the size of the final frame within the block (from the block's size, and the frame sizes already computed):) + unsigned headerBytesSeen = curOffset() - blockStartPos; + if (headerBytesSeen + frameSizesTotal > fBlockSize) break; + fFrameSizesWithinBlock[i] = fBlockSize - (headerBytesSeen + frameSizesTotal); + } + + // We're done parsing headers within the block, and (as a result) we now know the sizes of all frames within the block. + // If we have 'stripped bytes' that are common to (the front of) all frames, then count them now: + if (track->headerStrippedBytesSize != 0) { + for (unsigned i = 0; i < fNumFramesInBlock; ++i) fFrameSizesWithinBlock[i] += track->headerStrippedBytesSize; + } +#ifdef DEBUG + fprintf(stderr, "\tThis block contains %d frame(s); size(s):", fNumFramesInBlock); + unsigned frameSizesTotal = 0; + for (unsigned i = 0; i < fNumFramesInBlock; ++i) { + fprintf(stderr, " %d", fFrameSizesWithinBlock[i]); + frameSizesTotal += fFrameSizesWithinBlock[i]; + } + if (fNumFramesInBlock > 1) fprintf(stderr, " (total: %u)", frameSizesTotal); + fprintf(stderr, " bytes\n"); +#endif + // Next, start delivering these frames: + fCurrentParseState = DELIVERING_FRAME_WITHIN_BLOCK; + fCurOffsetWithinFrame = fNextFrameNumberToDeliver = 0; + setParseState(); + return; + } while (0); + + // An error occurred. Try to recover: +#ifdef DEBUG + fprintf(stderr, "parseBlock(): Error parsing data; trying to recover...\n"); +#endif + fCurrentParseState = LOOKING_FOR_BLOCK; +} + +Boolean MatroskaFileParser::deliverFrameWithinBlock() { +#ifdef DEBUG + fprintf(stderr, "delivering frame within SimpleBlock or Block\n"); +#endif + do { + MatroskaTrack* track = fOurFile.lookup(fBlockTrackNumber); + if (track == NULL) break; // shouldn't happen + + MatroskaDemuxedTrack* demuxedTrack = fOurDemux->lookupDemuxedTrack(fBlockTrackNumber); + if (demuxedTrack == NULL) break; // shouldn't happen + if (!demuxedTrack->isCurrentlyAwaitingData()) { + // Someone has been reading this stream, but isn't right now. + // We can't deliver this frame until he asks for it, so punt for now. + // The next time he asks for a frame, he'll get it. +#ifdef DEBUG + fprintf(stderr, "\tdeferring delivery of frame #%d (%d bytes)", fNextFrameNumberToDeliver, fFrameSizesWithinBlock[fNextFrameNumberToDeliver]); + if (track->haveSubframes()) fprintf(stderr, "[offset %d]", fCurOffsetWithinFrame); + fprintf(stderr, "\n"); +#endif + restoreSavedParserState(); // so we read from the beginning next time + return False; + } + + unsigned frameSize; + u_int8_t const* specialFrameSource = NULL; + u_int8_t const opusCommentHeader[16] + = {'O','p','u','s','T','a','g','s', 0, 0, 0, 0, 0, 0, 0, 0}; + if (track->codecIsOpus && demuxedTrack->fOpusTrackNumber < 2) { + // Special case for Opus audio. The first frame (the 'configuration' header) comes from + // the 'private data'. The second frame (the 'comment' header) comes is synthesized by + // us here: + if (demuxedTrack->fOpusTrackNumber == 0) { + specialFrameSource = track->codecPrivate; + frameSize = track->codecPrivateSize; + } else { // demuxedTrack->fOpusTrackNumber == 1 + specialFrameSource = opusCommentHeader; + frameSize = sizeof opusCommentHeader; + } + ++demuxedTrack->fOpusTrackNumber; + } else { + frameSize = fFrameSizesWithinBlock[fNextFrameNumberToDeliver]; + if (track->haveSubframes()) { + // The next "track->subframeSizeSize" bytes contain the length of a 'subframe': + if (fCurOffsetWithinFrame + track->subframeSizeSize > frameSize) break; // sanity check + unsigned subframeSize = 0; + for (unsigned i = 0; i < track->subframeSizeSize; ++i) { + u_int8_t c; + getCommonFrameBytes(track, &c, 1, 0); + if (fCurFrameNumBytesToGet > 0) { // it'll be 1 + c = get1Byte(); + ++fCurOffsetWithinFrame; + } + subframeSize = subframeSize*256 + c; + } + if (subframeSize == 0 || fCurOffsetWithinFrame + subframeSize > frameSize) break; // sanity check + frameSize = subframeSize; + } + } + + // Compute the presentation time of this frame (from the cluster timecode, the block timecode, and the default duration): + double pt = (fClusterTimecode+fBlockTimecode)*(fOurFile.fTimecodeScale/1000000000.0) + + fNextFrameNumberToDeliver*(track->defaultDuration/1000000000.0); + if (fPresentationTimeOffset == 0.0) { + // This is the first time we've computed a presentation time. Compute an offset to make the presentation times aligned + // with 'wall clock' time: + struct timeval timeNow; + gettimeofday(&timeNow, NULL); + double ptNow = timeNow.tv_sec + timeNow.tv_usec/1000000.0; + fPresentationTimeOffset = ptNow - pt; + } + pt += fPresentationTimeOffset; + struct timeval presentationTime; + presentationTime.tv_sec = (unsigned)pt; + presentationTime.tv_usec = (unsigned)((pt - presentationTime.tv_sec)*1000000); + unsigned durationInMicroseconds; + if (specialFrameSource != NULL) { + durationInMicroseconds = 0; + } else { // normal case + durationInMicroseconds = track->defaultDuration/1000; + if (track->haveSubframes()) { + // If this is a 'subframe', use a duration of 0 instead (unless it's the last 'subframe'): + if (fCurOffsetWithinFrame + frameSize + track->subframeSizeSize < fFrameSizesWithinBlock[fNextFrameNumberToDeliver]) { + // There's room for at least one more subframe after this, so give this subframe a duration of 0 + durationInMicroseconds = 0; + } + } + } + + if (track->defaultDuration == 0) { + // Adjust the frame duration to keep the sum of frame durations aligned with presentation times. + if (demuxedTrack->prevPresentationTime().tv_sec != 0) { // not the first time for this track + demuxedTrack->durationImbalance() + += (presentationTime.tv_sec - demuxedTrack->prevPresentationTime().tv_sec)*1000000 + + (presentationTime.tv_usec - demuxedTrack->prevPresentationTime().tv_usec); + } + int adjustment = 0; + if (demuxedTrack->durationImbalance() > 0) { + // The duration needs to be increased. + int const adjustmentThreshold = 100000; // don't increase the duration by more than this amount (in case there's a mistake) + adjustment = demuxedTrack->durationImbalance() > adjustmentThreshold + ? adjustmentThreshold : demuxedTrack->durationImbalance(); + } else if (demuxedTrack->durationImbalance() < 0) { + // The duration needs to be decreased. + adjustment = (unsigned)(-demuxedTrack->durationImbalance()) < durationInMicroseconds + ? demuxedTrack->durationImbalance() : -(int)durationInMicroseconds; + } + durationInMicroseconds += adjustment; + demuxedTrack->durationImbalance() -= durationInMicroseconds; // for next time + demuxedTrack->prevPresentationTime() = presentationTime; // for next time + } + + demuxedTrack->presentationTime() = presentationTime; + demuxedTrack->durationInMicroseconds() = durationInMicroseconds; + + // Deliver the next block now: + if (frameSize > demuxedTrack->maxSize()) { + demuxedTrack->numTruncatedBytes() = frameSize - demuxedTrack->maxSize(); + demuxedTrack->frameSize() = demuxedTrack->maxSize(); + } else { // normal case + demuxedTrack->numTruncatedBytes() = 0; + demuxedTrack->frameSize() = frameSize; + } + getCommonFrameBytes(track, demuxedTrack->to(), demuxedTrack->frameSize(), demuxedTrack->numTruncatedBytes()); + + // Next, deliver (and/or skip) bytes from the input file: + if (specialFrameSource != NULL) { + memmove(demuxedTrack->to(), specialFrameSource, demuxedTrack->frameSize()); +#ifdef DEBUG + fprintf(stderr, "\tdelivered special frame: %d bytes", demuxedTrack->frameSize()); + if (demuxedTrack->numTruncatedBytes() > 0) fprintf(stderr, " (%d bytes truncated)", demuxedTrack->numTruncatedBytes()); + fprintf(stderr, " @%u.%06u (%.06f from start); duration %u us\n", demuxedTrack->presentationTime().tv_sec, demuxedTrack->presentationTime().tv_usec, demuxedTrack->presentationTime().tv_sec+demuxedTrack->presentationTime().tv_usec/1000000.0-fPresentationTimeOffset, demuxedTrack->durationInMicroseconds()); +#endif + setParseState(); + FramedSource::afterGetting(demuxedTrack); // completes delivery + } else { // normal case + fCurrentParseState = DELIVERING_FRAME_BYTES; + setParseState(); + } + return True; + } while (0); + + // An error occurred. Try to recover: +#ifdef DEBUG + fprintf(stderr, "deliverFrameWithinBlock(): Error parsing data; trying to recover...\n"); +#endif + fCurrentParseState = LOOKING_FOR_BLOCK; + return True; +} + +void MatroskaFileParser::deliverFrameBytes() { + do { + MatroskaTrack* track = fOurFile.lookup(fBlockTrackNumber); + if (track == NULL) break; // shouldn't happen + + MatroskaDemuxedTrack* demuxedTrack = fOurDemux->lookupDemuxedTrack(fBlockTrackNumber); + if (demuxedTrack == NULL) break; // shouldn't happen + + unsigned const BANK_SIZE = bankSize(); + while (fCurFrameNumBytesToGet > 0) { + // Hack: We can get no more than BANK_SIZE bytes at a time: + unsigned numBytesToGet = fCurFrameNumBytesToGet > BANK_SIZE ? BANK_SIZE : fCurFrameNumBytesToGet; + getBytes(fCurFrameTo, numBytesToGet); + fCurFrameTo += numBytesToGet; + fCurFrameNumBytesToGet -= numBytesToGet; + fCurOffsetWithinFrame += numBytesToGet; + setParseState(); + } + while (fCurFrameNumBytesToSkip > 0) { + // Hack: We can skip no more than BANK_SIZE bytes at a time: + unsigned numBytesToSkip = fCurFrameNumBytesToSkip > BANK_SIZE ? BANK_SIZE : fCurFrameNumBytesToSkip; + skipBytes(numBytesToSkip); + fCurFrameNumBytesToSkip -= numBytesToSkip; + fCurOffsetWithinFrame += numBytesToSkip; + setParseState(); + } +#ifdef DEBUG + fprintf(stderr, "\tdelivered frame #%d: %d bytes", fNextFrameNumberToDeliver, demuxedTrack->frameSize()); + if (track->haveSubframes()) fprintf(stderr, "[offset %d]", fCurOffsetWithinFrame - track->subframeSizeSize - demuxedTrack->frameSize() - demuxedTrack->numTruncatedBytes()); + if (demuxedTrack->numTruncatedBytes() > 0) fprintf(stderr, " (%d bytes truncated)", demuxedTrack->numTruncatedBytes()); + fprintf(stderr, " @%u.%06u (%.06f from start); duration %u us\n", demuxedTrack->presentationTime().tv_sec, demuxedTrack->presentationTime().tv_usec, demuxedTrack->presentationTime().tv_sec+demuxedTrack->presentationTime().tv_usec/1000000.0-fPresentationTimeOffset, demuxedTrack->durationInMicroseconds()); +#endif + + if (!track->haveSubframes() + || fCurOffsetWithinFrame + track->subframeSizeSize >= fFrameSizesWithinBlock[fNextFrameNumberToDeliver]) { + // Either we don't have subframes, or there's no more room for another subframe => We're completely done with this frame now: + ++fNextFrameNumberToDeliver; + fCurOffsetWithinFrame = 0; + } + if (fNextFrameNumberToDeliver == fNumFramesInBlock) { + // We've delivered all of the frames from this block. Look for another block next: + fCurrentParseState = LOOKING_FOR_BLOCK; + } else { + fCurrentParseState = DELIVERING_FRAME_WITHIN_BLOCK; + } + + setParseState(); + FramedSource::afterGetting(demuxedTrack); // completes delivery + return; + } while (0); + + // An error occurred. Try to recover: +#ifdef DEBUG + fprintf(stderr, "deliverFrameBytes(): Error parsing data; trying to recover...\n"); +#endif + fCurrentParseState = LOOKING_FOR_BLOCK; +} + +void MatroskaFileParser +::getCommonFrameBytes(MatroskaTrack* track, u_int8_t* to, unsigned numBytesToGet, unsigned numBytesToSkip) { + if (track->headerStrippedBytesSize > fCurOffsetWithinFrame) { + // We have some common 'header stripped' bytes that remain to be prepended to the frame. Use these first: + unsigned numRemainingHeaderStrippedBytes = track->headerStrippedBytesSize - fCurOffsetWithinFrame; + unsigned numHeaderStrippedBytesToGet; + if (numBytesToGet <= numRemainingHeaderStrippedBytes) { + numHeaderStrippedBytesToGet = numBytesToGet; + numBytesToGet = 0; + if (numBytesToGet + numBytesToSkip <= numRemainingHeaderStrippedBytes) { + numBytesToSkip = 0; + } else { + numBytesToSkip = numBytesToGet + numBytesToSkip - numRemainingHeaderStrippedBytes; + } + } else { + numHeaderStrippedBytesToGet = numRemainingHeaderStrippedBytes; + numBytesToGet = numBytesToGet - numRemainingHeaderStrippedBytes; + } + + if (numHeaderStrippedBytesToGet > 0) { + memmove(to, &track->headerStrippedBytes[fCurOffsetWithinFrame], numHeaderStrippedBytesToGet); + to += numHeaderStrippedBytesToGet; + fCurOffsetWithinFrame += numHeaderStrippedBytesToGet; + } + } + + fCurFrameTo = to; + fCurFrameNumBytesToGet = numBytesToGet; + fCurFrameNumBytesToSkip = numBytesToSkip; +} + +Boolean MatroskaFileParser::parseEBMLNumber(EBMLNumber& num) { + unsigned i; + u_int8_t bitmask = 0x80; + for (i = 0; i < EBML_NUMBER_MAX_LEN; ++i) { + while (1) { + if (fLimitOffsetInFile > 0 && fCurOffsetInFile > fLimitOffsetInFile) return False; // We've hit our pre-set limit + num.data[i] = get1Byte(); + ++fCurOffsetInFile; + + // If we're looking for an id, skip any leading bytes that don't contain a '1' in the first 4 bits: + if (i == 0/*we're a leading byte*/ && !num.stripLeading1/*we're looking for an id*/ && (num.data[i]&0xF0) == 0) { + setParseState(); // ensures that we make forward progress if the parsing gets interrupted + continue; + } + break; + } + if ((num.data[0]&bitmask) != 0) { + // num[i] is the last byte of the id + if (num.stripLeading1) num.data[0] &=~ bitmask; + break; + } + bitmask >>= 1; + } + if (i == EBML_NUMBER_MAX_LEN) return False; + + num.len = i+1; + return True; +} + +Boolean MatroskaFileParser::parseEBMLIdAndSize(EBMLId& id, EBMLDataSize& size) { + return parseEBMLNumber(id) && parseEBMLNumber(size); +} + +Boolean MatroskaFileParser::parseEBMLVal_unsigned64(EBMLDataSize& size, u_int64_t& result) { + u_int64_t sv = size.val(); + if (sv > 8) return False; // size too large + + result = 0; // initially + for (unsigned i = (unsigned)sv; i > 0; --i) { + if (fLimitOffsetInFile > 0 && fCurOffsetInFile > fLimitOffsetInFile) return False; // We've hit our pre-set limit + + u_int8_t c = get1Byte(); + ++fCurOffsetInFile; + + result = result*256 + c; + } + + return True; +} + +Boolean MatroskaFileParser::parseEBMLVal_unsigned(EBMLDataSize& size, unsigned& result) { + if (size.val() > 4) return False; // size too large + + u_int64_t result64; + if (!parseEBMLVal_unsigned64(size, result64)) return False; + + result = (unsigned)result64; + + return True; +} + +Boolean MatroskaFileParser::parseEBMLVal_float(EBMLDataSize& size, float& result) { + if (size.val() == 4) { + // Normal case. Read the value as if it were a 4-byte integer, then copy it to the 'float' result: + unsigned resultAsUnsigned; + if (!parseEBMLVal_unsigned(size, resultAsUnsigned)) return False; + + if (sizeof result != sizeof resultAsUnsigned) return False; + memcpy(&result, &resultAsUnsigned, sizeof result); + return True; + } else if (size.val() == 8) { + // Read the value as if it were an 8-byte integer, then copy it to a 'double', the convert that to the 'float' result: + u_int64_t resultAsUnsigned64; + if (!parseEBMLVal_unsigned64(size, resultAsUnsigned64)) return False; + + double resultDouble; + if (sizeof resultDouble != sizeof resultAsUnsigned64) return False; + memcpy(&resultDouble, &resultAsUnsigned64, sizeof resultDouble); + + result = (float)resultDouble; + return True; + } else { + // Unworkable size + return False; + } +} + +Boolean MatroskaFileParser::parseEBMLVal_string(EBMLDataSize& size, char*& result) { + unsigned resultLength = (unsigned)size.val(); + result = new char[resultLength + 1]; // allow for the trailing '\0' + if (result == NULL) return False; + + char* p = result; + unsigned i; + for (i = 0; i < resultLength; ++i) { + if (fLimitOffsetInFile > 0 && fCurOffsetInFile > fLimitOffsetInFile) break; // We've hit our pre-set limit + + u_int8_t c = get1Byte(); + ++fCurOffsetInFile; + + *p++ = c; + } + if (i < resultLength) { // an error occurred + delete[] result; + result = NULL; + return False; + } + *p = '\0'; + + return True; +} + +Boolean MatroskaFileParser::parseEBMLVal_binary(EBMLDataSize& size, u_int8_t*& result) { + unsigned resultLength = (unsigned)size.val(); + result = new u_int8_t[resultLength]; + if (result == NULL) return False; + + u_int8_t* p = result; + unsigned i; + for (i = 0; i < resultLength; ++i) { + if (fLimitOffsetInFile > 0 && fCurOffsetInFile > fLimitOffsetInFile) break; // We've hit our pre-set limit + + u_int8_t c = get1Byte(); + ++fCurOffsetInFile; + + *p++ = c; + } + if (i < resultLength) { // an error occurred + delete[] result; + result = NULL; + return False; + } + + return True; +} + +void MatroskaFileParser::skipHeader(EBMLDataSize const& size) { + u_int64_t sv = (unsigned)size.val(); +#ifdef DEBUG + fprintf(stderr, "\tskipping %llu bytes\n", sv); +#endif + + fNumHeaderBytesToSkip = sv; + skipRemainingHeaderBytes(False); +} + +void MatroskaFileParser::skipRemainingHeaderBytes(Boolean isContinuation) { + if (fNumHeaderBytesToSkip == 0) return; // common case + + // Hack: To avoid tripping into a parser 'internal error' if we try to skip an excessively large + // distance, break up the skipping into manageable chunks, to ensure forward progress: + unsigned const maxBytesToSkip = bankSize(); + while (fNumHeaderBytesToSkip > 0) { + unsigned numBytesToSkipNow + = fNumHeaderBytesToSkip < maxBytesToSkip ? (unsigned)fNumHeaderBytesToSkip : maxBytesToSkip; + setParseState(); + skipBytes(numBytesToSkipNow); +#ifdef DEBUG + if (isContinuation || numBytesToSkipNow < fNumHeaderBytesToSkip) { + fprintf(stderr, "\t\t(skipped %u bytes; %llu bytes remaining)\n", + numBytesToSkipNow, fNumHeaderBytesToSkip - numBytesToSkipNow); + } +#endif + fCurOffsetInFile += numBytesToSkipNow; + fNumHeaderBytesToSkip -= numBytesToSkipNow; + } +} + +void MatroskaFileParser::setParseState() { + fSavedCurOffsetInFile = fCurOffsetInFile; + fSavedCurOffsetWithinFrame = fCurOffsetWithinFrame; + saveParserState(); +} + +void MatroskaFileParser::restoreSavedParserState() { + StreamParser::restoreSavedParserState(); + fCurOffsetInFile = fSavedCurOffsetInFile; + fCurOffsetWithinFrame = fSavedCurOffsetWithinFrame; +} + +void MatroskaFileParser::seekToFilePosition(u_int64_t offsetInFile) { + ByteStreamFileSource* fileSource = (ByteStreamFileSource*)fInputSource; // we know it's a "ByteStreamFileSource" + if (fileSource != NULL) { + fileSource->seekToByteAbsolute(offsetInFile); + resetStateAfterSeeking(); + } +} + +void MatroskaFileParser::seekToEndOfFile() { + ByteStreamFileSource* fileSource = (ByteStreamFileSource*)fInputSource; // we know it's a "ByteStreamFileSource" + if (fileSource != NULL) { + fileSource->seekToEnd(); + resetStateAfterSeeking(); + } +} + +void MatroskaFileParser::resetStateAfterSeeking() { + // Because we're resuming parsing after seeking to a new position in the file, reset the parser state: + fCurOffsetInFile = fSavedCurOffsetInFile = 0; + fCurOffsetWithinFrame = fSavedCurOffsetWithinFrame = 0; + flushInput(); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MatroskaFileParser.hh b/AnyCore/lib_rtsp/liveMedia/MatroskaFileParser.hh new file mode 100644 index 0000000..cab91c5 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MatroskaFileParser.hh @@ -0,0 +1,134 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A parser for a Matroska file. +// C++ header + +#ifndef _MATROSKA_FILE_PARSER_HH + +#ifndef _STREAM_PARSER_HH +#include "StreamParser.hh" +#endif +#ifndef _MATROSKA_FILE_HH +#include "MatroskaFile.hh" +#endif +#ifndef _EBML_NUMBER_HH +#include "EBMLNumber.hh" +#endif + +// An enum representing the current state of the parser: +enum MatroskaParseState { + PARSING_START_OF_FILE, + LOOKING_FOR_TRACKS, + PARSING_TRACK, + PARSING_CUES, + LOOKING_FOR_CLUSTER, + LOOKING_FOR_BLOCK, + PARSING_BLOCK, + DELIVERING_FRAME_WITHIN_BLOCK, + DELIVERING_FRAME_BYTES +}; + +class MatroskaFileParser: public StreamParser { +public: + MatroskaFileParser(MatroskaFile& ourFile, FramedSource* inputSource, + FramedSource::onCloseFunc* onEndFunc, void* onEndClientData, + MatroskaDemux* ourDemux = NULL); + virtual ~MatroskaFileParser(); + + void seekToTime(double& seekNPT); + + // StreamParser 'client continue' function: + static void continueParsing(void* clientData, unsigned char* ptr, unsigned size, struct timeval presentationTime); + void continueParsing(); + +private: + // Parsing functions: + Boolean parse(); + // returns True iff we have finished parsing to the end of all 'Track' headers (on initialization) + + Boolean parseStartOfFile(); + void lookForNextTrack(); + Boolean parseTrack(); + Boolean parseCues(); + + void lookForNextBlock(); + void parseBlock(); + Boolean deliverFrameWithinBlock(); + void deliverFrameBytes(); + + void getCommonFrameBytes(MatroskaTrack* track, u_int8_t* to, unsigned numBytesToGet, unsigned numBytesToSkip); + + Boolean parseEBMLNumber(EBMLNumber& num); + Boolean parseEBMLIdAndSize(EBMLId& id, EBMLDataSize& size); + Boolean parseEBMLVal_unsigned64(EBMLDataSize& size, u_int64_t& result); + Boolean parseEBMLVal_unsigned(EBMLDataSize& size, unsigned& result); + Boolean parseEBMLVal_float(EBMLDataSize& size, float& result); + Boolean parseEBMLVal_string(EBMLDataSize& size, char*& result); + // Note: "result" is dynamically allocated; the caller must delete[] it later + Boolean parseEBMLVal_binary(EBMLDataSize& size, u_int8_t*& result); + // Note: "result" is dynamically allocated; the caller must delete[] it later + void skipHeader(EBMLDataSize const& size); + void skipRemainingHeaderBytes(Boolean isContinuation); + + void setParseState(); + + void seekToFilePosition(u_int64_t offsetInFile); + void seekToEndOfFile(); + void resetStateAfterSeeking(); // common code, called by both of the above + +private: // redefined virtual functions + virtual void restoreSavedParserState(); + +private: + // General state for parsing: + MatroskaFile& fOurFile; + FramedSource* fInputSource; + FramedSource::onCloseFunc* fOnEndFunc; + void* fOnEndClientData; + MatroskaDemux* fOurDemux; + MatroskaParseState fCurrentParseState; + u_int64_t fCurOffsetInFile, fSavedCurOffsetInFile, fLimitOffsetInFile; + + // For skipping over (possibly large) headers: + u_int64_t fNumHeaderBytesToSkip; + + // For parsing 'Seek ID's: + EBMLId fLastSeekId; + + // Parameters of the most recently-parsed 'Cluster': + unsigned fClusterTimecode; + + // Parameters of the most recently-parsed 'Block': + unsigned fBlockSize; + unsigned fBlockTrackNumber; + short fBlockTimecode; + unsigned fNumFramesInBlock; + unsigned* fFrameSizesWithinBlock; + + // Parameters of the most recently-parsed frame within a 'Block': + double fPresentationTimeOffset; + unsigned fNextFrameNumberToDeliver; + unsigned fCurOffsetWithinFrame, fSavedCurOffsetWithinFrame; // used if track->haveSubframes() + + // Parameters of the (sub)frame that's currently being delivered: + u_int8_t* fCurFrameTo; + unsigned fCurFrameNumBytesToGet; + unsigned fCurFrameNumBytesToSkip; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/MatroskaFileServerDemux.cpp b/AnyCore/lib_rtsp/liveMedia/MatroskaFileServerDemux.cpp new file mode 100644 index 0000000..0da966d --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MatroskaFileServerDemux.cpp @@ -0,0 +1,121 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A server demultiplexor for a Matroska file +// Implementation + +#include "MatroskaFileServerDemux.hh" +#include "MP3AudioMatroskaFileServerMediaSubsession.hh" +#include "MatroskaFileServerMediaSubsession.hh" + +void MatroskaFileServerDemux +::createNew(UsageEnvironment& env, char const* fileName, + onCreationFunc* onCreation, void* onCreationClientData, + char const* preferredLanguage) { + (void)new MatroskaFileServerDemux(env, fileName, + onCreation, onCreationClientData, + preferredLanguage); +} + +ServerMediaSubsession* MatroskaFileServerDemux::newServerMediaSubsession() { + unsigned dummyResultTrackNumber; + return newServerMediaSubsession(dummyResultTrackNumber); +} + +ServerMediaSubsession* MatroskaFileServerDemux +::newServerMediaSubsession(unsigned& resultTrackNumber) { + ServerMediaSubsession* result; + resultTrackNumber = 0; + + for (result = NULL; result == NULL && fNextTrackTypeToCheck != MATROSKA_TRACK_TYPE_OTHER; fNextTrackTypeToCheck <<= 1) { + if (fNextTrackTypeToCheck == MATROSKA_TRACK_TYPE_VIDEO) resultTrackNumber = fOurMatroskaFile->chosenVideoTrackNumber(); + else if (fNextTrackTypeToCheck == MATROSKA_TRACK_TYPE_AUDIO) resultTrackNumber = fOurMatroskaFile->chosenAudioTrackNumber(); + else if (fNextTrackTypeToCheck == MATROSKA_TRACK_TYPE_SUBTITLE) resultTrackNumber = fOurMatroskaFile->chosenSubtitleTrackNumber(); + + result = newServerMediaSubsessionByTrackNumber(resultTrackNumber); + } + + return result; +} + +ServerMediaSubsession* MatroskaFileServerDemux +::newServerMediaSubsessionByTrackNumber(unsigned trackNumber) { + MatroskaTrack* track = fOurMatroskaFile->lookup(trackNumber); + if (track == NULL) return NULL; + + // Use the track's "codecID" string to figure out which "ServerMediaSubsession" subclass to use: + ServerMediaSubsession* result = NULL; + if (strcmp(track->mimeType, "audio/MPEG") == 0) { + result = MP3AudioMatroskaFileServerMediaSubsession::createNew(*this, track); + } else { + result = MatroskaFileServerMediaSubsession::createNew(*this, track); + } + + if (result != NULL) { +#ifdef DEBUG + fprintf(stderr, "Created 'ServerMediaSubsession' object for track #%d: %s (%s)\n", track->trackNumber, track->codecID, track->mimeType); +#endif + } + + return result; +} + +FramedSource* MatroskaFileServerDemux::newDemuxedTrack(unsigned clientSessionId, unsigned trackNumber) { + MatroskaDemux* demuxToUse = NULL; + + if (clientSessionId != 0 && clientSessionId == fLastClientSessionId) { + demuxToUse = fLastCreatedDemux; // use the same demultiplexor as before + // Note: This code relies upon the fact that the creation of streams for different + // client sessions do not overlap - so all demuxed tracks are created for one "MatroskaDemux" at a time. + // Also, the "clientSessionId != 0" test is a hack, because 'session 0' is special; its audio and video streams + // are created and destroyed one-at-a-time, rather than both streams being + // created, and then (later) both streams being destroyed (as is the case + // for other ('real') session ids). Because of this, a separate demultiplexor is used for each 'session 0' track. + } + + if (demuxToUse == NULL) demuxToUse = fOurMatroskaFile->newDemux(); + + fLastClientSessionId = clientSessionId; + fLastCreatedDemux = demuxToUse; + + return demuxToUse->newDemuxedTrackByTrackNumber(trackNumber); +} + +MatroskaFileServerDemux +::MatroskaFileServerDemux(UsageEnvironment& env, char const* fileName, + onCreationFunc* onCreation, void* onCreationClientData, + char const* preferredLanguage) + : Medium(env), + fFileName(fileName), fOnCreation(onCreation), fOnCreationClientData(onCreationClientData), + fNextTrackTypeToCheck(0x1), fLastClientSessionId(0), fLastCreatedDemux(NULL) { + MatroskaFile::createNew(env, fileName, onMatroskaFileCreation, this, preferredLanguage); +} + +MatroskaFileServerDemux::~MatroskaFileServerDemux() { + Medium::close(fOurMatroskaFile); +} + +void MatroskaFileServerDemux::onMatroskaFileCreation(MatroskaFile* newFile, void* clientData) { + ((MatroskaFileServerDemux*)clientData)->onMatroskaFileCreation(newFile); +} + +void MatroskaFileServerDemux::onMatroskaFileCreation(MatroskaFile* newFile) { + fOurMatroskaFile = newFile; + + // Now, call our own creation notification function: + if (fOnCreation != NULL) (*fOnCreation)(this, fOnCreationClientData); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MatroskaFileServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/MatroskaFileServerMediaSubsession.cpp new file mode 100644 index 0000000..2897c70 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MatroskaFileServerMediaSubsession.cpp @@ -0,0 +1,65 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a track within a Matroska file. +// Implementation + +#include "MatroskaFileServerMediaSubsession.hh" +#include "MatroskaDemuxedTrack.hh" +#include "FramedFilter.hh" + +MatroskaFileServerMediaSubsession* MatroskaFileServerMediaSubsession +::createNew(MatroskaFileServerDemux& demux, MatroskaTrack* track) { + return new MatroskaFileServerMediaSubsession(demux, track); +} + +MatroskaFileServerMediaSubsession +::MatroskaFileServerMediaSubsession(MatroskaFileServerDemux& demux, MatroskaTrack* track) + : FileServerMediaSubsession(demux.envir(), demux.fileName(), False), + fOurDemux(demux), fTrack(track), fNumFiltersInFrontOfTrack(0) { +} + +MatroskaFileServerMediaSubsession::~MatroskaFileServerMediaSubsession() { +} + +float MatroskaFileServerMediaSubsession::duration() const { return fOurDemux.fileDuration(); } + +void MatroskaFileServerMediaSubsession +::seekStreamSource(FramedSource* inputSource, double& seekNPT, double /*streamDuration*/, u_int64_t& /*numBytes*/) { + for (unsigned i = 0; i < fNumFiltersInFrontOfTrack; ++i) { + // "inputSource" is a filter. Go back to *its* source: + inputSource = ((FramedFilter*)inputSource)->inputSource(); + } + ((MatroskaDemuxedTrack*)inputSource)->seekToTime(seekNPT); +} + +FramedSource* MatroskaFileServerMediaSubsession +::createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate) { + FramedSource* baseSource = fOurDemux.newDemuxedTrack(clientSessionId, fTrack->trackNumber); + if (baseSource == NULL) return NULL; + + return fOurDemux.ourMatroskaFile() + ->createSourceForStreaming(baseSource, fTrack->trackNumber, + estBitrate, fNumFiltersInFrontOfTrack); +} + +RTPSink* MatroskaFileServerMediaSubsession +::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* /*inputSource*/) { + return fOurDemux.ourMatroskaFile() + ->createRTPSinkForTrackNumber(fTrack->trackNumber, rtpGroupsock, rtpPayloadTypeIfDynamic); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MatroskaFileServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/MatroskaFileServerMediaSubsession.hh new file mode 100644 index 0000000..33bdf7f --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MatroskaFileServerMediaSubsession.hh @@ -0,0 +1,55 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a track within a Matroska file. +// C++ header + +#ifndef _MATROSKA_FILE_SERVER_MEDIA_SUBSESSION_HH +#define _MATROSKA_FILE_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _FILE_SERVER_MEDIA_SUBSESSION_HH +#include "FileServerMediaSubsession.hh" +#endif +#ifndef _MATROSKA_FILE_SERVER_DEMUX_HH +#include "MatroskaFileServerDemux.hh" +#endif + +class MatroskaFileServerMediaSubsession: public FileServerMediaSubsession { +public: + static MatroskaFileServerMediaSubsession* + createNew(MatroskaFileServerDemux& demux, MatroskaTrack* track); + +protected: + MatroskaFileServerMediaSubsession(MatroskaFileServerDemux& demux, MatroskaTrack* track); + // called only by createNew(), or by subclass constructors + virtual ~MatroskaFileServerMediaSubsession(); + +protected: // redefined virtual functions + virtual float duration() const; + virtual void seekStreamSource(FramedSource* inputSource, double& seekNPT, double streamDuration, u_int64_t& numBytes); + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate); + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource); + +protected: + MatroskaFileServerDemux& fOurDemux; + MatroskaTrack* fTrack; + unsigned fNumFiltersInFrontOfTrack; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/Media.cpp b/AnyCore/lib_rtsp/liveMedia/Media.cpp new file mode 100644 index 0000000..058e746 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/Media.cpp @@ -0,0 +1,167 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Media +// Implementation + +#include "Media.hh" +#include "HashTable.hh" + +////////// Medium ////////// + +Medium::Medium(UsageEnvironment& env) + : fEnviron(env), fNextTask(NULL) { + // First generate a name for the new medium: + MediaLookupTable::ourMedia(env)->generateNewName(fMediumName, mediumNameMaxLen); + env.setResultMsg(fMediumName); + + // Then add it to our table: + MediaLookupTable::ourMedia(env)->addNew(this, fMediumName); +} + +Medium::~Medium() { + // Remove any tasks that might be pending for us: + fEnviron.taskScheduler().unscheduleDelayedTask(fNextTask); +} + +Boolean Medium::lookupByName(UsageEnvironment& env, char const* mediumName, + Medium*& resultMedium) { + resultMedium = MediaLookupTable::ourMedia(env)->lookup(mediumName); + if (resultMedium == NULL) { + env.setResultMsg("Medium ", mediumName, " does not exist"); + return False; + } + + return True; +} + +void Medium::close(UsageEnvironment& env, char const* name) { + MediaLookupTable::ourMedia(env)->remove(name); +} + +void Medium::close(Medium* medium) { + if (medium == NULL) return; + + close(medium->envir(), medium->name()); +} + +Boolean Medium::isSource() const { + return False; // default implementation +} + +Boolean Medium::isSink() const { + return False; // default implementation +} + +Boolean Medium::isRTCPInstance() const { + return False; // default implementation +} + +Boolean Medium::isRTSPClient() const { + return False; // default implementation +} + +Boolean Medium::isRTSPServer() const { + return False; // default implementation +} + +Boolean Medium::isMediaSession() const { + return False; // default implementation +} + +Boolean Medium::isServerMediaSession() const { + return False; // default implementation +} + +Boolean Medium::isDarwinInjector() const { + return False; // default implementation +} + + +////////// _Tables implementation ////////// + +_Tables* _Tables::getOurTables(UsageEnvironment& env, Boolean createIfNotPresent) { + if (env.liveMediaPriv == NULL && createIfNotPresent) { + env.liveMediaPriv = new _Tables(env); + } + return (_Tables*)(env.liveMediaPriv); +} + +void _Tables::reclaimIfPossible() { + if (mediaTable == NULL && socketTable == NULL) { + fEnv.liveMediaPriv = NULL; + delete this; + } +} + +_Tables::_Tables(UsageEnvironment& env) + : mediaTable(NULL), socketTable(NULL), fEnv(env) { +} + +_Tables::~_Tables() { +} + + +////////// MediaLookupTable implementation ////////// + +MediaLookupTable* MediaLookupTable::ourMedia(UsageEnvironment& env) { + _Tables* ourTables = _Tables::getOurTables(env); + if (ourTables->mediaTable == NULL) { + // Create a new table to record the media that are to be created in + // this environment: + ourTables->mediaTable = new MediaLookupTable(env); + } + return ourTables->mediaTable; +} + +Medium* MediaLookupTable::lookup(char const* name) const { + return (Medium*)(fTable->Lookup(name)); +} + +void MediaLookupTable::addNew(Medium* medium, char* mediumName) { + fTable->Add(mediumName, (void*)medium); +} + +void MediaLookupTable::remove(char const* name) { + Medium* medium = lookup(name); + if (medium != NULL) { + fTable->Remove(name); + if (fTable->IsEmpty()) { + // We can also delete ourselves (to reclaim space): + _Tables* ourTables = _Tables::getOurTables(fEnv); + delete this; + ourTables->mediaTable = NULL; + ourTables->reclaimIfPossible(); + } + + delete medium; + } +} + +void MediaLookupTable::generateNewName(char* mediumName, + unsigned /*maxLen*/) { + // We should really use snprintf() here, but not all systems have it + sprintf(mediumName, "liveMedia%d", fNameGenerator++); +} + +MediaLookupTable::MediaLookupTable(UsageEnvironment& env) + : fEnv(env), fTable(HashTable::create(STRING_HASH_KEYS)), fNameGenerator(0) { +} + +MediaLookupTable::~MediaLookupTable() { + delete fTable; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MediaSession.cpp b/AnyCore/lib_rtsp/liveMedia/MediaSession.cpp new file mode 100644 index 0000000..b120d8c --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MediaSession.cpp @@ -0,0 +1,1446 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A data structure that represents a session that consists of +// potentially multiple (audio and/or video) sub-sessions +// Implementation + +#include "liveMedia.hh" +#include "Locale.hh" +#include "GroupsockHelper.hh" +#include + +////////// MediaSession ////////// + +MediaSession* MediaSession::createNew(UsageEnvironment& env, + char const* sdpDescription) { + MediaSession* newSession = new MediaSession(env); + if (newSession != NULL) { + if (!newSession->initializeWithSDP(sdpDescription)) { + delete newSession; + return NULL; + } + } + + return newSession; +} + +Boolean MediaSession::lookupByName(UsageEnvironment& env, + char const* instanceName, + MediaSession*& resultSession) { + resultSession = NULL; // unless we succeed + + Medium* medium; + if (!Medium::lookupByName(env, instanceName, medium)) return False; + + if (!medium->isMediaSession()) { + env.setResultMsg(instanceName, " is not a 'MediaSession' object"); + return False; + } + + resultSession = (MediaSession*)medium; + return True; +} + +MediaSession::MediaSession(UsageEnvironment& env) + : Medium(env), + fSubsessionsHead(NULL), fSubsessionsTail(NULL), + fConnectionEndpointName(NULL), + fMaxPlayStartTime(0.0f), fMaxPlayEndTime(0.0f), fAbsStartTime(NULL), fAbsEndTime(NULL), + fScale(1.0f), fMediaSessionType(NULL), fSessionName(NULL), fSessionDescription(NULL), + fControlPath(NULL) { + fSourceFilterAddr.s_addr = 0; + + // Get our host name, and use this for the RTCP CNAME: + const unsigned maxCNAMElen = 100; + char CNAME[maxCNAMElen+1]; +#ifndef CRIS + gethostname((char*)CNAME, maxCNAMElen); +#else + // "gethostname()" isn't defined for this platform + sprintf(CNAME, "unknown host %d", (unsigned)(our_random()*0x7FFFFFFF)); +#endif + CNAME[maxCNAMElen] = '\0'; // just in case + fCNAME = strDup(CNAME); +} + +MediaSession::~MediaSession() { + delete fSubsessionsHead; + delete[] fCNAME; + delete[] fConnectionEndpointName; + delete[] fAbsStartTime; delete[] fAbsEndTime; + delete[] fMediaSessionType; + delete[] fSessionName; + delete[] fSessionDescription; + delete[] fControlPath; +} + +Boolean MediaSession::isMediaSession() const { + return True; +} + +MediaSubsession* MediaSession::createNewMediaSubsession() { + // default implementation: + return new MediaSubsession(*this); +} + +Boolean MediaSession::initializeWithSDP(char const* sdpDescription) { + if (sdpDescription == NULL) return False; + + // Begin by processing all SDP lines until we see the first "m=" + char const* sdpLine = sdpDescription; + char const* nextSDPLine; + while (1) { + if (!parseSDPLine(sdpLine, nextSDPLine)) return False; + //##### We should really check for the correct SDP version (v=0) + if (sdpLine[0] == 'm') break; + sdpLine = nextSDPLine; + if (sdpLine == NULL) break; // there are no m= lines at all + + // Check for various special SDP lines that we understand: + if (parseSDPLine_s(sdpLine)) continue; + if (parseSDPLine_i(sdpLine)) continue; + if (parseSDPLine_c(sdpLine)) continue; + if (parseSDPAttribute_control(sdpLine)) continue; + if (parseSDPAttribute_range(sdpLine)) continue; + if (parseSDPAttribute_type(sdpLine)) continue; + if (parseSDPAttribute_source_filter(sdpLine)) continue; + } + + while (sdpLine != NULL) { + // We have a "m=" line, representing a new sub-session: + MediaSubsession* subsession = createNewMediaSubsession(); + if (subsession == NULL) { + envir().setResultMsg("Unable to create new MediaSubsession"); + return False; + } + + // Parse the line as "m= RTP/AVP " + // or "m= / RTP/AVP " + // (Should we be checking for >1 payload format number here?)##### + char* mediumName = strDupSize(sdpLine); // ensures we have enough space + char const* protocolName = NULL; + unsigned payloadFormat; + if ((sscanf(sdpLine, "m=%s %hu RTP/AVP %u", + mediumName, &subsession->fClientPortNum, &payloadFormat) == 3 || + sscanf(sdpLine, "m=%s %hu/%*u RTP/AVP %u", + mediumName, &subsession->fClientPortNum, &payloadFormat) == 3) + && payloadFormat <= 127) { + protocolName = "RTP"; + } else if ((sscanf(sdpLine, "m=%s %hu UDP %u", + mediumName, &subsession->fClientPortNum, &payloadFormat) == 3 || + sscanf(sdpLine, "m=%s %hu udp %u", + mediumName, &subsession->fClientPortNum, &payloadFormat) == 3 || + sscanf(sdpLine, "m=%s %hu RAW/RAW/UDP %u", + mediumName, &subsession->fClientPortNum, &payloadFormat) == 3) + && payloadFormat <= 127) { + // This is a RAW UDP source + protocolName = "UDP"; + } else { + // This "m=" line is bad; output an error message saying so: + char* sdpLineStr; + if (nextSDPLine == NULL) { + sdpLineStr = (char*)sdpLine; + } else { + sdpLineStr = strDup(sdpLine); + sdpLineStr[nextSDPLine-sdpLine] = '\0'; + } + envir() << "Bad SDP \"m=\" line: " << sdpLineStr << "\n"; + if (sdpLineStr != (char*)sdpLine) delete[] sdpLineStr; + + delete[] mediumName; + delete subsession; + + // Skip the following SDP lines, up until the next "m=": + while (1) { + sdpLine = nextSDPLine; + if (sdpLine == NULL) break; // we've reached the end + if (!parseSDPLine(sdpLine, nextSDPLine)) return False; + + if (sdpLine[0] == 'm') break; // we've reached the next subsession + } + continue; + } + + // Insert this subsession at the end of the list: + if (fSubsessionsTail == NULL) { + fSubsessionsHead = fSubsessionsTail = subsession; + } else { + fSubsessionsTail->setNext(subsession); + fSubsessionsTail = subsession; + } + + subsession->serverPortNum = subsession->fClientPortNum; // by default + + char const* mStart = sdpLine; + subsession->fSavedSDPLines = strDup(mStart); + + subsession->fMediumName = strDup(mediumName); + delete[] mediumName; + subsession->fProtocolName = strDup(protocolName); + subsession->fRTPPayloadFormat = payloadFormat; + + // Process the following SDP lines, up until the next "m=": + while (1) { + sdpLine = nextSDPLine; + if (sdpLine == NULL) break; // we've reached the end + if (!parseSDPLine(sdpLine, nextSDPLine)) return False; + + if (sdpLine[0] == 'm') break; // we've reached the next subsession + + // Check for various special SDP lines that we understand: + if (subsession->parseSDPLine_c(sdpLine)) continue; + if (subsession->parseSDPLine_b(sdpLine)) continue; + if (subsession->parseSDPAttribute_rtpmap(sdpLine)) continue; + if (subsession->parseSDPAttribute_rtcpmux(sdpLine)) continue; + if (subsession->parseSDPAttribute_control(sdpLine)) continue; + if (subsession->parseSDPAttribute_range(sdpLine)) continue; + if (subsession->parseSDPAttribute_fmtp(sdpLine)) continue; + if (subsession->parseSDPAttribute_source_filter(sdpLine)) continue; + if (subsession->parseSDPAttribute_x_dimensions(sdpLine)) continue; + if (subsession->parseSDPAttribute_framerate(sdpLine)) continue; + + // (Later, check for malformed lines, and other valid SDP lines#####) + } + if (sdpLine != NULL) subsession->fSavedSDPLines[sdpLine-mStart] = '\0'; + + // If we don't yet know the codec name, try looking it up from the + // list of static payload types: + if (subsession->fCodecName == NULL) { + subsession->fCodecName + = lookupPayloadFormat(subsession->fRTPPayloadFormat, + subsession->fRTPTimestampFrequency, + subsession->fNumChannels); + if (subsession->fCodecName == NULL) { + char typeStr[20]; + sprintf(typeStr, "%d", subsession->fRTPPayloadFormat); + envir().setResultMsg("Unknown codec name for RTP payload type ", + typeStr); + return False; + } + } + + // If we don't yet know this subsession's RTP timestamp frequency + // (because it uses a dynamic payload type and the corresponding + // SDP "rtpmap" attribute erroneously didn't specify it), + // then guess it now: + if (subsession->fRTPTimestampFrequency == 0) { + subsession->fRTPTimestampFrequency + = guessRTPTimestampFrequency(subsession->fMediumName, + subsession->fCodecName); + } + } + + return True; +} + +Boolean MediaSession::parseSDPLine(char const* inputLine, + char const*& nextLine){ + // Begin by finding the start of the next line (if any): + nextLine = NULL; + for (char const* ptr = inputLine; *ptr != '\0'; ++ptr) { + if (*ptr == '\r' || *ptr == '\n') { + // We found the end of the line + ++ptr; + while (*ptr == '\r' || *ptr == '\n') ++ptr; + nextLine = ptr; + if (nextLine[0] == '\0') nextLine = NULL; // special case for end + break; + } + } + + // Then, check that this line is a SDP line of the form = + // (However, we also accept blank lines in the input.) + if (inputLine[0] == '\r' || inputLine[0] == '\n') return True; + if (strlen(inputLine) < 2 || inputLine[1] != '=' + || inputLine[0] < 'a' || inputLine[0] > 'z') { + envir().setResultMsg("Invalid SDP line: ", inputLine); + return False; + } + + return True; +} + +static char* parseCLine(char const* sdpLine) { + char* resultStr = NULL; + char* buffer = strDupSize(sdpLine); // ensures we have enough space + if (sscanf(sdpLine, "c=IN IP4 %[^/\r\n]", buffer) == 1) { + // Later, handle the optional / and / ##### + resultStr = strDup(buffer); + } + delete[] buffer; + + return resultStr; +} + +Boolean MediaSession::parseSDPLine_s(char const* sdpLine) { + // Check for "s=" line + char* buffer = strDupSize(sdpLine); + Boolean parseSuccess = False; + + if (sscanf(sdpLine, "s=%[^\r\n]", buffer) == 1) { + delete[] fSessionName; fSessionName = strDup(buffer); + parseSuccess = True; + } + delete[] buffer; + + return parseSuccess; +} + +Boolean MediaSession::parseSDPLine_i(char const* sdpLine) { + // Check for "i=" line + char* buffer = strDupSize(sdpLine); + Boolean parseSuccess = False; + + if (sscanf(sdpLine, "i=%[^\r\n]", buffer) == 1) { + delete[] fSessionDescription; fSessionDescription = strDup(buffer); + parseSuccess = True; + } + delete[] buffer; + + return parseSuccess; +} + +Boolean MediaSession::parseSDPLine_c(char const* sdpLine) { + // Check for "c=IN IP4 " + // or "c=IN IP4 /" + // (Later, do something with also #####) + char* connectionEndpointName = parseCLine(sdpLine); + if (connectionEndpointName != NULL) { + delete[] fConnectionEndpointName; + fConnectionEndpointName = connectionEndpointName; + return True; + } + + return False; +} + +Boolean MediaSession::parseSDPAttribute_type(char const* sdpLine) { + // Check for a "a=type:broadcast|meeting|moderated|test|H.332|recvonly" line: + Boolean parseSuccess = False; + + char* buffer = strDupSize(sdpLine); + if (sscanf(sdpLine, "a=type: %[^ ]", buffer) == 1) { + delete[] fMediaSessionType; + fMediaSessionType = strDup(buffer); + parseSuccess = True; + } + delete[] buffer; + + return parseSuccess; +} + +Boolean MediaSession::parseSDPAttribute_control(char const* sdpLine) { + // Check for a "a=control:" line: + Boolean parseSuccess = False; + + char* controlPath = strDupSize(sdpLine); // ensures we have enough space + if (sscanf(sdpLine, "a=control: %s", controlPath) == 1) { + parseSuccess = True; + delete[] fControlPath; fControlPath = strDup(controlPath); + } + delete[] controlPath; + + return parseSuccess; +} + +static Boolean parseRangeAttribute(char const* sdpLine, double& startTime, double& endTime) { + return sscanf(sdpLine, "a=range: npt = %lg - %lg", &startTime, &endTime) == 2; +} + +static Boolean parseRangeAttribute(char const* sdpLine, char*& absStartTime, char*& absEndTime) { + size_t len = strlen(sdpLine) + 1; + char* as = new char[len]; + char* ae = new char[len]; + int sscanfResult = sscanf(sdpLine, "a=range: clock = %[^-\r\n]-%[^\r\n]", as, ae); + if (sscanfResult == 2) { + absStartTime = as; + absEndTime = ae; + } else if (sscanfResult == 1) { + absStartTime = as; + delete[] ae; + } else { + delete[] as; delete[] ae; + return False; + } + + return True; +} + +Boolean MediaSession::parseSDPAttribute_range(char const* sdpLine) { + // Check for a "a=range:npt=-" line: + // (Later handle other kinds of "a=range" attributes also???#####) + Boolean parseSuccess = False; + + double playStartTime; + double playEndTime; + if (parseRangeAttribute(sdpLine, playStartTime, playEndTime)) { + parseSuccess = True; + if (playStartTime > fMaxPlayStartTime) { + fMaxPlayStartTime = playStartTime; + } + if (playEndTime > fMaxPlayEndTime) { + fMaxPlayEndTime = playEndTime; + } + } else if (parseRangeAttribute(sdpLine, _absStartTime(), _absEndTime())) { + parseSuccess = True; + } + + return parseSuccess; +} + +static Boolean parseSourceFilterAttribute(char const* sdpLine, + struct in_addr& sourceAddr) { + // Check for a "a=source-filter:incl IN IP4 " line. + // Note: At present, we don't check that really matches + // one of our multicast addresses. We also don't support more than + // one ##### + Boolean result = False; // until we succeed + char* sourceName = strDupSize(sdpLine); // ensures we have enough space + do { + if (sscanf(sdpLine, "a=source-filter: incl IN IP4 %*s %s", + sourceName) != 1) break; + + // Now, convert this name to an address, if we can: + NetAddressList addresses(sourceName); + if (addresses.numAddresses() == 0) break; + + netAddressBits sourceAddrBits + = *(netAddressBits*)(addresses.firstAddress()->data()); + if (sourceAddrBits == 0) break; + + sourceAddr.s_addr = sourceAddrBits; + result = True; + } while (0); + + delete[] sourceName; + return result; +} + +Boolean MediaSession +::parseSDPAttribute_source_filter(char const* sdpLine) { + return parseSourceFilterAttribute(sdpLine, fSourceFilterAddr); +} + +char* MediaSession::lookupPayloadFormat(unsigned char rtpPayloadType, + unsigned& freq, unsigned& nCh) { + // Look up the codec name and timestamp frequency for known (static) + // RTP payload formats. + char const* temp = NULL; + switch (rtpPayloadType) { + case 0: {temp = "PCMU"; freq = 8000; nCh = 1; break;} + case 2: {temp = "G726-32"; freq = 8000; nCh = 1; break;} + case 3: {temp = "GSM"; freq = 8000; nCh = 1; break;} + case 4: {temp = "G723"; freq = 8000; nCh = 1; break;} + case 5: {temp = "DVI4"; freq = 8000; nCh = 1; break;} + case 6: {temp = "DVI4"; freq = 16000; nCh = 1; break;} + case 7: {temp = "LPC"; freq = 8000; nCh = 1; break;} + case 8: {temp = "PCMA"; freq = 8000; nCh = 1; break;} + case 9: {temp = "G722"; freq = 8000; nCh = 1; break;} + case 10: {temp = "L16"; freq = 44100; nCh = 2; break;} + case 11: {temp = "L16"; freq = 44100; nCh = 1; break;} + case 12: {temp = "QCELP"; freq = 8000; nCh = 1; break;} + case 14: {temp = "MPA"; freq = 90000; nCh = 1; break;} + // 'number of channels' is actually encoded in the media stream + case 15: {temp = "G728"; freq = 8000; nCh = 1; break;} + case 16: {temp = "DVI4"; freq = 11025; nCh = 1; break;} + case 17: {temp = "DVI4"; freq = 22050; nCh = 1; break;} + case 18: {temp = "G729"; freq = 8000; nCh = 1; break;} + case 25: {temp = "CELB"; freq = 90000; nCh = 1; break;} + case 26: {temp = "JPEG"; freq = 90000; nCh = 1; break;} + case 28: {temp = "NV"; freq = 90000; nCh = 1; break;} + case 31: {temp = "H261"; freq = 90000; nCh = 1; break;} + case 32: {temp = "MPV"; freq = 90000; nCh = 1; break;} + case 33: {temp = "MP2T"; freq = 90000; nCh = 1; break;} + case 34: {temp = "H263"; freq = 90000; nCh = 1; break;} + }; + + return strDup(temp); +} + +unsigned MediaSession::guessRTPTimestampFrequency(char const* mediumName, + char const* codecName) { + // By default, we assume that audio sessions use a frequency of 8000, + // video sessions use a frequency of 90000, + // and text sessions use a frequency of 1000. + // Begin by checking for known exceptions to this rule + // (where the frequency is known unambiguously (e.g., not like "DVI4")) + if (strcmp(codecName, "L16") == 0) return 44100; + if (strcmp(codecName, "MPA") == 0 + || strcmp(codecName, "MPA-ROBUST") == 0 + || strcmp(codecName, "X-MP3-DRAFT-00") == 0) return 90000; + + // Now, guess default values: + if (strcmp(mediumName, "video") == 0) return 90000; + else if (strcmp(mediumName, "text") == 0) return 1000; + return 8000; // for "audio", and any other medium +} + +char* MediaSession::absStartTime() const { + if (fAbsStartTime != NULL) return fAbsStartTime; + + // If a subsession has an 'absolute' start time, then use that: + MediaSubsessionIterator iter(*this); + MediaSubsession* subsession; + while ((subsession = iter.next()) != NULL) { + if (subsession->_absStartTime() != NULL) return subsession->_absStartTime(); + } + return NULL; +} + +char* MediaSession::absEndTime() const { + if (fAbsEndTime != NULL) return fAbsEndTime; + + // If a subsession has an 'absolute' end time, then use that: + MediaSubsessionIterator iter(*this); + MediaSubsession* subsession; + while ((subsession = iter.next()) != NULL) { + if (subsession->_absEndTime() != NULL) return subsession->_absEndTime(); + } + return NULL; +} + +Boolean MediaSession +::initiateByMediaType(char const* mimeType, + MediaSubsession*& resultSubsession, + int useSpecialRTPoffset) { + // Look through this session's subsessions for media that match "mimeType" + resultSubsession = NULL; + MediaSubsessionIterator iter(*this); + MediaSubsession* subsession; + while ((subsession = iter.next()) != NULL) { + Boolean wasAlreadyInitiated = subsession->readSource() != NULL; + if (!wasAlreadyInitiated) { + // Try to create a source for this subsession: + if (!subsession->initiate(useSpecialRTPoffset)) return False; + } + + // Make sure the source's MIME type is one that we handle: + if (strcmp(subsession->readSource()->MIMEtype(), mimeType) != 0) { + if (!wasAlreadyInitiated) subsession->deInitiate(); + continue; + } + + resultSubsession = subsession; + break; // use this + } + + if (resultSubsession == NULL) { + envir().setResultMsg("Session has no usable media subsession"); + return False; + } + + return True; +} + + +////////// MediaSubsessionIterator ////////// + +MediaSubsessionIterator::MediaSubsessionIterator(MediaSession const& session) + : fOurSession(session) { + reset(); +} + +MediaSubsessionIterator::~MediaSubsessionIterator() { +} + +MediaSubsession* MediaSubsessionIterator::next() { + MediaSubsession* result = fNextPtr; + + if (fNextPtr != NULL) fNextPtr = fNextPtr->fNext; + + return result; +} + +void MediaSubsessionIterator::reset() { + fNextPtr = fOurSession.fSubsessionsHead; +} + + +////////// SDPAttribute definition ////////// + +class SDPAttribute { +public: + SDPAttribute(char const* strValue, Boolean valueIsHexadecimal); + virtual ~SDPAttribute(); + + char const* strValue() const { return fStrValue; } + char const* strValueToLower() const { return fStrValueToLower; } + int intValue() const { return fIntValue; } + Boolean valueIsHexadecimal() const { return fValueIsHexadecimal; } + +private: + char* fStrValue; + char* fStrValueToLower; + int fIntValue; + Boolean fValueIsHexadecimal; +}; + + +////////// MediaSubsession ////////// + +MediaSubsession::MediaSubsession(MediaSession& parent) + : serverPortNum(0), sink(NULL), miscPtr(NULL), + fParent(parent), fNext(NULL), + fConnectionEndpointName(NULL), + fClientPortNum(0), fRTPPayloadFormat(0xFF), + fSavedSDPLines(NULL), fMediumName(NULL), fCodecName(NULL), fProtocolName(NULL), + fRTPTimestampFrequency(0), fMultiplexRTCPWithRTP(False), fControlPath(NULL), + fSourceFilterAddr(parent.sourceFilterAddr()), fBandwidth(0), + fPlayStartTime(0.0), fPlayEndTime(0.0), fAbsStartTime(NULL), fAbsEndTime(NULL), + fVideoWidth(0), fVideoHeight(0), fVideoFPS(0), fNumChannels(1), fScale(1.0f), fNPT_PTS_Offset(0.0f), + fAttributeTable(HashTable::create(STRING_HASH_KEYS)), + fRTPSocket(NULL), fRTCPSocket(NULL), + fRTPSource(NULL), fRTCPInstance(NULL), fReadSource(NULL), + fReceiveRawMP3ADUs(False), fReceiveRawJPEGFrames(False), + fSessionId(NULL) { + rtpInfo.seqNum = 0; rtpInfo.timestamp = 0; rtpInfo.infoIsNew = False; + + // A few attributes have unusual default values. Set these now: + setAttribute("profile-level-id", "0", True/*value is hexadecimal*/); // used with "video/H264" + // This won't work for MPEG-4 (unless the value is <10), because for MPEG-4, the value + // is assumed to be a decimal string, not a hexadecimal string. NEED TO FIX ##### + setAttribute("profile-id", "1"); // used with "video/H265" + setAttribute("level-id", "93"); // used with "video/H265" + setAttribute("interop-constraints", "B00000000000"); // used with "video/H265" +} + +MediaSubsession::~MediaSubsession() { + deInitiate(); + + delete[] fConnectionEndpointName; delete[] fSavedSDPLines; + delete[] fMediumName; delete[] fCodecName; delete[] fProtocolName; + delete[] fControlPath; + delete[] fAbsStartTime; delete[] fAbsEndTime; + delete[] fSessionId; + + // Empty and delete our 'attributes table': + SDPAttribute* attr; + while ((attr = (SDPAttribute*)fAttributeTable->RemoveNext()) != NULL) { + delete attr; + } + delete fAttributeTable; + + delete fNext; +} + +void MediaSubsession::addFilter(FramedFilter* filter){ + if (filter == NULL || filter->inputSource() != fReadSource) return; // sanity check + fReadSource = filter; +} + +double MediaSubsession::playStartTime() const { + if (fPlayStartTime > 0) return fPlayStartTime; + + return fParent.playStartTime(); +} + +double MediaSubsession::playEndTime() const { + if (fPlayEndTime > 0) return fPlayEndTime; + + return fParent.playEndTime(); +} + +char* MediaSubsession::absStartTime() const { + if (fAbsStartTime != NULL) return fAbsStartTime; + + return fParent.absStartTime(); +} + +char* MediaSubsession::absEndTime() const { + if (fAbsEndTime != NULL) return fAbsEndTime; + + return fParent.absEndTime(); +} + +static Boolean const honorSDPPortChoice +#ifdef IGNORE_UNICAST_SDP_PORTS += False; +#else += True; +#endif + +Boolean MediaSubsession::initiate(int useSpecialRTPoffset) { + if (fReadSource != NULL) return True; // has already been initiated + + do { + if (fCodecName == NULL) { + env().setResultMsg("Codec is unspecified"); + break; + } + + // Create RTP and RTCP 'Groupsocks' on which to receive incoming data. + // (Groupsocks will work even for unicast addresses) + struct in_addr tempAddr; + tempAddr.s_addr = connectionEndpointAddress(); + // This could get changed later, as a result of a RTSP "SETUP" + + if (fClientPortNum != 0 && (honorSDPPortChoice || IsMulticastAddress(tempAddr.s_addr))) { + // The sockets' port numbers were specified for us. Use these: + Boolean const protocolIsRTP = strcmp(fProtocolName, "RTP") == 0; + if (protocolIsRTP && !fMultiplexRTCPWithRTP) { + fClientPortNum = fClientPortNum&~1; + // use an even-numbered port for RTP, and the next (odd-numbered) port for RTCP + } + if (isSSM()) { + fRTPSocket = new Groupsock(env(), tempAddr, fSourceFilterAddr, fClientPortNum); + } else { + fRTPSocket = new Groupsock(env(), tempAddr, fClientPortNum, 255); + } + if (fRTPSocket == NULL) { + env().setResultMsg("Failed to create RTP socket"); + break; + } + + if (protocolIsRTP) { + if (fMultiplexRTCPWithRTP) { + // Use the RTP 'groupsock' object for RTCP as well: + fRTCPSocket = fRTPSocket; + } else { + // Set our RTCP port to be the RTP port + 1: + portNumBits const rtcpPortNum = fClientPortNum|1; + if (isSSM()) { + fRTCPSocket = new Groupsock(env(), tempAddr, fSourceFilterAddr, rtcpPortNum); + } else { + fRTCPSocket = new Groupsock(env(), tempAddr, rtcpPortNum, 255); + } + } + } + } else { + // Port numbers were not specified in advance, so we use ephemeral port numbers. + // Create sockets until we get a port-number pair (even: RTP; even+1: RTCP). + // (However, if we're multiplexing RTCP with RTP, then we create only one socket, + // and the port number can be even or odd.) + // We need to make sure that we don't keep trying to use the same bad port numbers over + // and over again, so we store bad sockets in a table, and delete them all when we're done. + HashTable* socketHashTable = HashTable::create(ONE_WORD_HASH_KEYS); + if (socketHashTable == NULL) break; + Boolean success = False; + NoReuse dummy(env()); + // ensures that our new ephemeral port number won't be one that's already in use + + while (1) { + // Create a new socket: + if (isSSM()) { + fRTPSocket = new Groupsock(env(), tempAddr, fSourceFilterAddr, 0); + } else { + fRTPSocket = new Groupsock(env(), tempAddr, 0, 255); + } + if (fRTPSocket == NULL) { + env().setResultMsg("MediaSession::initiate(): unable to create RTP and RTCP sockets"); + break; + } + + // Get the client port number: + Port clientPort(0); + if (!getSourcePort(env(), fRTPSocket->socketNum(), clientPort)) { + break; + } + fClientPortNum = ntohs(clientPort.num()); + + if (fMultiplexRTCPWithRTP) { + // Use this RTP 'groupsock' object for RTCP as well: + fRTCPSocket = fRTPSocket; + success = True; + break; + } + + // To be usable for RTP, the client port number must be even: + if ((fClientPortNum&1) != 0) { // it's odd + // Record this socket in our table, and keep trying: + unsigned key = (unsigned)fClientPortNum; + Groupsock* existing = (Groupsock*)socketHashTable->Add((char const*)key, fRTPSocket); + delete existing; // in case it wasn't NULL + continue; + } + + // Make sure we can use the next (i.e., odd) port number, for RTCP: + portNumBits rtcpPortNum = fClientPortNum|1; + if (isSSM()) { + fRTCPSocket = new Groupsock(env(), tempAddr, fSourceFilterAddr, rtcpPortNum); + } else { + fRTCPSocket = new Groupsock(env(), tempAddr, rtcpPortNum, 255); + } + if (fRTCPSocket != NULL && fRTCPSocket->socketNum() >= 0) { + // Success! Use these two sockets. + success = True; + break; + } else { + // We couldn't create the RTCP socket (perhaps that port number's already in use elsewhere?). + delete fRTCPSocket; fRTCPSocket = NULL; + + // Record the first socket in our table, and keep trying: + unsigned key = (unsigned)fClientPortNum; + Groupsock* existing = (Groupsock*)socketHashTable->Add((char const*)key, fRTPSocket); + delete existing; // in case it wasn't NULL + continue; + } + } + + // Clean up the socket hash table (and contents): + Groupsock* oldGS; + while ((oldGS = (Groupsock*)socketHashTable->RemoveNext()) != NULL) { + delete oldGS; + } + delete socketHashTable; + + if (!success) break; // a fatal error occurred trying to create the RTP and RTCP sockets; we can't continue + } + + // Try to use a big receive buffer for RTP - at least 0.1 second of + // specified bandwidth and at least 50 KB + unsigned rtpBufSize = fBandwidth * 25 / 2; // 1 kbps * 0.1 s = 12.5 bytes + if (rtpBufSize < 50 * 1024) + rtpBufSize = 50 * 1024; + increaseReceiveBufferTo(env(), fRTPSocket->socketNum(), rtpBufSize); + + if (isSSM() && fRTCPSocket != NULL) { + // Special case for RTCP SSM: Send RTCP packets back to the source via unicast: + fRTCPSocket->changeDestinationParameters(fSourceFilterAddr,0,~0); + } + + // Create "fRTPSource" and "fReadSource": + if (!createSourceObjects(useSpecialRTPoffset)) break; + + if (fReadSource == NULL) { + env().setResultMsg("Failed to create read source"); + break; + } + + // Finally, create our RTCP instance. (It starts running automatically) + if (fRTPSource != NULL && fRTCPSocket != NULL) { + // If bandwidth is specified, use it and add 5% for RTCP overhead. + // Otherwise make a guess at 500 kbps. + unsigned totSessionBandwidth + = fBandwidth ? fBandwidth + fBandwidth / 20 : 500; + fRTCPInstance = RTCPInstance::createNew(env(), fRTCPSocket, + totSessionBandwidth, + (unsigned char const*) + fParent.CNAME(), + NULL /* we're a client */, + fRTPSource); + if (fRTCPInstance == NULL) { + env().setResultMsg("Failed to create RTCP instance"); + break; + } + } + + return True; + } while (0); + + deInitiate(); + fClientPortNum = 0; + return False; +} + +void MediaSubsession::deInitiate() { + Medium::close(fRTCPInstance); fRTCPInstance = NULL; + + Medium::close(fReadSource); // this is assumed to also close fRTPSource + fReadSource = NULL; fRTPSource = NULL; + + delete fRTPSocket; + if (fRTCPSocket != fRTPSocket) delete fRTCPSocket; + fRTPSocket = NULL; fRTCPSocket = NULL; +} + +Boolean MediaSubsession::setClientPortNum(unsigned short portNum) { + if (fReadSource != NULL) { + env().setResultMsg("A read source has already been created"); + return False; + } + + fClientPortNum = portNum; + return True; +} + +char const* MediaSubsession::attrVal_str(char const* attrName) const { + SDPAttribute* attr = (SDPAttribute*)(fAttributeTable->Lookup(attrName)); + if (attr == NULL) return ""; + + return attr->strValue(); +} + +char const* MediaSubsession::attrVal_strToLower(char const* attrName) const { + SDPAttribute* attr = (SDPAttribute*)(fAttributeTable->Lookup(attrName)); + if (attr == NULL) return ""; + + return attr->strValueToLower(); +} + +unsigned MediaSubsession::attrVal_int(char const* attrName) const { + SDPAttribute* attr = (SDPAttribute*)(fAttributeTable->Lookup(attrName)); + if (attr == NULL) return 0; + + return attr->intValue(); +} + +char const* MediaSubsession::fmtp_config() const { + char const* result = attrVal_str("config"); + if (result[0] == '\0') result = attrVal_str("configuration"); + + return result; +} + +netAddressBits MediaSubsession::connectionEndpointAddress() const { + do { + // Get the endpoint name from with us, or our parent session: + char const* endpointString = connectionEndpointName(); + if (endpointString == NULL) { + endpointString = parentSession().connectionEndpointName(); + } + if (endpointString == NULL) break; + + // Now, convert this name to an address, if we can: + NetAddressList addresses(endpointString); + if (addresses.numAddresses() == 0) break; + + return *(netAddressBits*)(addresses.firstAddress()->data()); + } while (0); + + // No address known: + return 0; +} + +void MediaSubsession::setDestinations(netAddressBits defaultDestAddress) { + // Get the destination address from the connection endpoint name + // (This will be 0 if it's not known, in which case we use the default) + netAddressBits destAddress = connectionEndpointAddress(); + if (destAddress == 0) destAddress = defaultDestAddress; + struct in_addr destAddr; destAddr.s_addr = destAddress; + + // The destination TTL remains unchanged: + int destTTL = ~0; // means: don't change + + if (fRTPSocket != NULL) { + Port destPort(serverPortNum); + fRTPSocket->changeDestinationParameters(destAddr, destPort, destTTL); + } + if (fRTCPSocket != NULL && !isSSM() && !fMultiplexRTCPWithRTP) { + // Note: For SSM sessions, the dest address for RTCP was already set. + Port destPort(serverPortNum+1); + fRTCPSocket->changeDestinationParameters(destAddr, destPort, destTTL); + } +} + +void MediaSubsession::setSessionId(char const* sessionId) { + delete[] fSessionId; + fSessionId = strDup(sessionId); +} + +double MediaSubsession::getNormalPlayTime(struct timeval const& presentationTime) { + if (rtpSource() == NULL || rtpSource()->timestampFrequency() == 0) return 0.0; // no RTP source, or bad freq! + + // First, check whether our "RTPSource" object has already been synchronized using RTCP. + // If it hasn't, then - as a special case - we need to use the RTP timestamp to compute the NPT. + if (!rtpSource()->hasBeenSynchronizedUsingRTCP()) { + if (!rtpInfo.infoIsNew) return 0.0; // the "rtpInfo" structure has not been filled in + u_int32_t timestampOffset = rtpSource()->curPacketRTPTimestamp() - rtpInfo.timestamp; + double nptOffset = (timestampOffset/(double)(rtpSource()->timestampFrequency()))*scale(); + double npt = playStartTime() + nptOffset; + + return npt; + } else { + // Common case: We have been synchronized using RTCP. This means that the "presentationTime" parameter + // will be accurate, and so we should use this to compute the NPT. + double ptsDouble = (double)(presentationTime.tv_sec + presentationTime.tv_usec/1000000.0); + + if (rtpInfo.infoIsNew) { + // This is the first time we've been called with a synchronized presentation time since the "rtpInfo" + // structure was last filled in. Use this "presentationTime" to compute "fNPT_PTS_Offset": + if (seqNumLT(rtpSource()->curPacketRTPSeqNum(), rtpInfo.seqNum)) return -0.1; // sanity check; ignore old packets + u_int32_t timestampOffset = rtpSource()->curPacketRTPTimestamp() - rtpInfo.timestamp; + double nptOffset = (timestampOffset/(double)(rtpSource()->timestampFrequency()))*scale(); + double npt = playStartTime() + nptOffset; + fNPT_PTS_Offset = npt - ptsDouble*scale(); + rtpInfo.infoIsNew = False; // for next time + + return npt; + } else { + // Use the precomputed "fNPT_PTS_Offset" to compute the NPT from the PTS: + if (fNPT_PTS_Offset == 0.0) return 0.0; // error: The "rtpInfo" structure was apparently never filled in + return (double)(ptsDouble*scale() + fNPT_PTS_Offset); + } + } +} + +void MediaSubsession +::setAttribute(char const* name, char const* value, Boolean valueIsHexadecimal) { + // Replace any existing attribute record with this name (except that the 'valueIsHexadecimal' + // property will be inherited from it, if it exists). + SDPAttribute* oldAttr = (SDPAttribute*)fAttributeTable->Lookup(name); + if (oldAttr != NULL) { + valueIsHexadecimal = oldAttr->valueIsHexadecimal(); + fAttributeTable->Remove(name); + delete oldAttr; + } + + SDPAttribute* newAttr = new SDPAttribute(value, valueIsHexadecimal); + (void)fAttributeTable->Add(name, newAttr); +} + +Boolean MediaSubsession::parseSDPLine_c(char const* sdpLine) { + // Check for "c=IN IP4 " + // or "c=IN IP4 /" + // (Later, do something with also #####) + char* connectionEndpointName = parseCLine(sdpLine); + if (connectionEndpointName != NULL) { + delete[] fConnectionEndpointName; + fConnectionEndpointName = connectionEndpointName; + return True; + } + + return False; +} + +Boolean MediaSubsession::parseSDPLine_b(char const* sdpLine) { + // Check for "b=:" line + // RTP applications are expected to use bwtype="AS" + return sscanf(sdpLine, "b=AS:%u", &fBandwidth) == 1; +} + +Boolean MediaSubsession::parseSDPAttribute_rtpmap(char const* sdpLine) { + // Check for a "a=rtpmap: /" line: + // (Also check without the "/"; RealNetworks omits this) + // Also check for a trailing "/". + Boolean parseSuccess = False; + + unsigned rtpmapPayloadFormat; + char* codecName = strDupSize(sdpLine); // ensures we have enough space + unsigned rtpTimestampFrequency = 0; + unsigned numChannels = 1; + if (sscanf(sdpLine, "a=rtpmap: %u %[^/]/%u/%u", + &rtpmapPayloadFormat, codecName, &rtpTimestampFrequency, + &numChannels) == 4 + || sscanf(sdpLine, "a=rtpmap: %u %[^/]/%u", + &rtpmapPayloadFormat, codecName, &rtpTimestampFrequency) == 3 + || sscanf(sdpLine, "a=rtpmap: %u %s", + &rtpmapPayloadFormat, codecName) == 2) { + parseSuccess = True; + if (rtpmapPayloadFormat == fRTPPayloadFormat) { + // This "rtpmap" matches our payload format, so set our + // codec name and timestamp frequency: + // (First, make sure the codec name is upper case) + { + Locale l("POSIX"); + for (char* p = codecName; *p != '\0'; ++p) *p = toupper(*p); + } + delete[] fCodecName; fCodecName = strDup(codecName); + fRTPTimestampFrequency = rtpTimestampFrequency; + fNumChannels = numChannels; + } + } + delete[] codecName; + + return parseSuccess; +} + +Boolean MediaSubsession::parseSDPAttribute_rtcpmux(char const* sdpLine) { + if (strncmp(sdpLine, "a=rtcp-mux", 10) == 0) { + fMultiplexRTCPWithRTP = True; + return True; + } + + return False; +} + +Boolean MediaSubsession::parseSDPAttribute_control(char const* sdpLine) { + // Check for a "a=control:" line: + Boolean parseSuccess = False; + + char* controlPath = strDupSize(sdpLine); // ensures we have enough space + if (sscanf(sdpLine, "a=control: %s", controlPath) == 1) { + parseSuccess = True; + delete[] fControlPath; fControlPath = strDup(controlPath); + } + delete[] controlPath; + + return parseSuccess; +} + +Boolean MediaSubsession::parseSDPAttribute_range(char const* sdpLine) { + // Check for a "a=range:npt=-" line: + // (Later handle other kinds of "a=range" attributes also???#####) + Boolean parseSuccess = False; + + double playStartTime; + double playEndTime; + if (parseRangeAttribute(sdpLine, playStartTime, playEndTime)) { + parseSuccess = True; + if (playStartTime > fPlayStartTime) { + fPlayStartTime = playStartTime; + if (playStartTime > fParent.playStartTime()) { + fParent.playStartTime() = playStartTime; + } + } + if (playEndTime > fPlayEndTime) { + fPlayEndTime = playEndTime; + if (playEndTime > fParent.playEndTime()) { + fParent.playEndTime() = playEndTime; + } + } + } else if (parseRangeAttribute(sdpLine, _absStartTime(), _absEndTime())) { + parseSuccess = True; + } + + return parseSuccess; +} + +Boolean MediaSubsession::parseSDPAttribute_fmtp(char const* sdpLine) { + // Check for a "a=fmtp:" line: + // Later: Check that payload format number matches; ##### + do { + if (strncmp(sdpLine, "a=fmtp:", 7) != 0) break; sdpLine += 7; + while (isdigit(*sdpLine)) ++sdpLine; + + // The remaining "sdpLine" should be a sequence of + // =; + // or + // ; + // parameter assignments. Look at each of these. + unsigned const sdpLineLen = strlen(sdpLine); + char* nameStr = new char[sdpLineLen+1]; + char* valueStr = new char[sdpLineLen+1]; + + while (*sdpLine != '\0' && *sdpLine != '\r' && *sdpLine != '\n') { + int sscanfResult = sscanf(sdpLine, " %[^=; \t\r\n] = %[^; \t\r\n]", nameStr, valueStr); + if (sscanfResult >= 1) { + // or = + // Convert to lower-case, to ease comparison: + Locale l("POSIX"); + for (char* c = nameStr; *c != '\0'; ++c) *c = tolower(*c); + + if (sscanfResult == 1) { + // + setAttribute(nameStr); + } else { + // = + setAttribute(nameStr, valueStr); + } + } + + // Move to the next parameter assignment string: + while (*sdpLine != '\0' && *sdpLine != '\r' && *sdpLine != '\n' && *sdpLine != ';') ++sdpLine; + while (*sdpLine == ';') ++sdpLine; + } + delete[] nameStr; delete[] valueStr; + return True; + } while (0); + + return False; +} + +Boolean MediaSubsession +::parseSDPAttribute_source_filter(char const* sdpLine) { + return parseSourceFilterAttribute(sdpLine, fSourceFilterAddr); +} + +Boolean MediaSubsession::parseSDPAttribute_x_dimensions(char const* sdpLine) { + // Check for a "a=x-dimensions:," line: + Boolean parseSuccess = False; + + int width, height; + if (sscanf(sdpLine, "a=x-dimensions:%d,%d", &width, &height) == 2) { + parseSuccess = True; + fVideoWidth = (unsigned short)width; + fVideoHeight = (unsigned short)height; + } + + return parseSuccess; +} + +Boolean MediaSubsession::parseSDPAttribute_framerate(char const* sdpLine) { + // Check for a "a=framerate: " or "a=x-framerate: " line: + Boolean parseSuccess = False; + + float frate; + int rate; + if (sscanf(sdpLine, "a=framerate: %f", &frate) == 1 || sscanf(sdpLine, "a=framerate:%f", &frate) == 1) { + parseSuccess = True; + fVideoFPS = (unsigned)frate; + } else if (sscanf(sdpLine, "a=x-framerate: %d", &rate) == 1) { + parseSuccess = True; + fVideoFPS = (unsigned)rate; + } + + return parseSuccess; +} + +Boolean MediaSubsession::createSourceObjects(int useSpecialRTPoffset) { + do { + // First, check "fProtocolName" + if (strcmp(fProtocolName, "UDP") == 0) { + // A UDP-packetized stream (*not* a RTP stream) + fReadSource = BasicUDPSource::createNew(env(), fRTPSocket); + fRTPSource = NULL; // Note! + + if (strcmp(fCodecName, "MP2T") == 0) { // MPEG-2 Transport Stream + fReadSource = MPEG2TransportStreamFramer::createNew(env(), fReadSource); + // this sets "durationInMicroseconds" correctly, based on the PCR values + } + } else { + // Check "fCodecName" against the set of codecs that we support, + // and create our RTP source accordingly + // (Later make this code more efficient, as this set grows #####) + // (Also, add more fmts that can be implemented by SimpleRTPSource#####) + Boolean createSimpleRTPSource = False; // by default; can be changed below + Boolean doNormalMBitRule = False; // default behavior if "createSimpleRTPSource" is True + if (strcmp(fCodecName, "QCELP") == 0) { // QCELP audio + fReadSource = + QCELPAudioRTPSource::createNew(env(), fRTPSocket, fRTPSource, + fRTPPayloadFormat, + fRTPTimestampFrequency); + // Note that fReadSource will differ from fRTPSource in this case + } else if (strcmp(fCodecName, "AMR") == 0) { // AMR audio (narrowband) + fReadSource = + AMRAudioRTPSource::createNew(env(), fRTPSocket, fRTPSource, + fRTPPayloadFormat, False /*isWideband*/, + fNumChannels, attrVal_bool("octet-align"), + attrVal_unsigned("interleaving"), + attrVal_bool("robust-sorting"), + attrVal_bool("crc")); + // Note that fReadSource will differ from fRTPSource in this case + } else if (strcmp(fCodecName, "AMR-WB") == 0) { // AMR audio (wideband) + fReadSource = + AMRAudioRTPSource::createNew(env(), fRTPSocket, fRTPSource, + fRTPPayloadFormat, True /*isWideband*/, + fNumChannels, attrVal_bool("octet-align"), + attrVal_unsigned("interleaving"), + attrVal_bool("robust-sorting"), + attrVal_bool("crc")); + // Note that fReadSource will differ from fRTPSource in this case + } else if (strcmp(fCodecName, "MPA") == 0) { // MPEG-1 or 2 audio + fReadSource = fRTPSource + = MPEG1or2AudioRTPSource::createNew(env(), fRTPSocket, + fRTPPayloadFormat, + fRTPTimestampFrequency); + } else if (strcmp(fCodecName, "MPA-ROBUST") == 0) { // robust MP3 audio + fReadSource = fRTPSource + = MP3ADURTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, + fRTPTimestampFrequency); + if (fRTPSource == NULL) break; + + if (!fReceiveRawMP3ADUs) { + // Add a filter that deinterleaves the ADUs after depacketizing them: + MP3ADUdeinterleaver* deinterleaver + = MP3ADUdeinterleaver::createNew(env(), fRTPSource); + if (deinterleaver == NULL) break; + + // Add another filter that converts these ADUs to MP3 frames: + fReadSource = MP3FromADUSource::createNew(env(), deinterleaver); + } + } else if (strcmp(fCodecName, "X-MP3-DRAFT-00") == 0) { + // a non-standard variant of "MPA-ROBUST" used by RealNetworks + // (one 'ADU'ized MP3 frame per packet; no headers) + fRTPSource + = SimpleRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, + fRTPTimestampFrequency, + "audio/MPA-ROBUST" /*hack*/); + if (fRTPSource == NULL) break; + + // Add a filter that converts these ADUs to MP3 frames: + fReadSource = MP3FromADUSource::createNew(env(), fRTPSource, + False /*no ADU header*/); + } else if (strcmp(fCodecName, "MP4A-LATM") == 0) { // MPEG-4 LATM audio + fReadSource = fRTPSource + = MPEG4LATMAudioRTPSource::createNew(env(), fRTPSocket, + fRTPPayloadFormat, + fRTPTimestampFrequency); + } else if (strcmp(fCodecName, "VORBIS") == 0) { // Vorbis audio + fReadSource = fRTPSource + = VorbisAudioRTPSource::createNew(env(), fRTPSocket, + fRTPPayloadFormat, + fRTPTimestampFrequency); + } else if (strcmp(fCodecName, "THEORA") == 0) { // Theora video + fReadSource = fRTPSource + = TheoraVideoRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat); + } else if (strcmp(fCodecName, "VP8") == 0) { // VP8 video + fReadSource = fRTPSource + = VP8VideoRTPSource::createNew(env(), fRTPSocket, + fRTPPayloadFormat, + fRTPTimestampFrequency); + } else if (strcmp(fCodecName, "AC3") == 0 || strcmp(fCodecName, "EAC3") == 0) { // AC3 audio + fReadSource = fRTPSource + = AC3AudioRTPSource::createNew(env(), fRTPSocket, + fRTPPayloadFormat, + fRTPTimestampFrequency); + } else if (strcmp(fCodecName, "MP4V-ES") == 0) { // MPEG-4 Elementary Stream video + fReadSource = fRTPSource + = MPEG4ESVideoRTPSource::createNew(env(), fRTPSocket, + fRTPPayloadFormat, + fRTPTimestampFrequency); + } else if (strcmp(fCodecName, "MPEG4-GENERIC") == 0) { + fReadSource = fRTPSource + = MPEG4GenericRTPSource::createNew(env(), fRTPSocket, + fRTPPayloadFormat, + fRTPTimestampFrequency, + fMediumName, attrVal_strToLower("mode"), + attrVal_unsigned("sizelength"), + attrVal_unsigned("indexlength"), + attrVal_unsigned("indexdeltalength")); + } else if (strcmp(fCodecName, "MPV") == 0) { // MPEG-1 or 2 video + fReadSource = fRTPSource + = MPEG1or2VideoRTPSource::createNew(env(), fRTPSocket, + fRTPPayloadFormat, + fRTPTimestampFrequency); + } else if (strcmp(fCodecName, "MP2T") == 0) { // MPEG-2 Transport Stream + fRTPSource = SimpleRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, + fRTPTimestampFrequency, "video/MP2T", + 0, False); + fReadSource = MPEG2TransportStreamFramer::createNew(env(), fRTPSource); + // this sets "durationInMicroseconds" correctly, based on the PCR values + } else if (strcmp(fCodecName, "H261") == 0) { // H.261 + fReadSource = fRTPSource + = H261VideoRTPSource::createNew(env(), fRTPSocket, + fRTPPayloadFormat, + fRTPTimestampFrequency); + } else if (strcmp(fCodecName, "H263-1998") == 0 || + strcmp(fCodecName, "H263-2000") == 0) { // H.263+ + fReadSource = fRTPSource + = H263plusVideoRTPSource::createNew(env(), fRTPSocket, + fRTPPayloadFormat, + fRTPTimestampFrequency); + } else if (strcmp(fCodecName, "H264") == 0) { + fReadSource = fRTPSource + = H264VideoRTPSource::createNew(env(), fRTPSocket, + fRTPPayloadFormat, + fRTPTimestampFrequency); + } else if (strcmp(fCodecName, "H265") == 0) { + Boolean expectDONFields = attrVal_unsigned("sprop-depack-buf-nalus") > 0; + fReadSource = fRTPSource + = H265VideoRTPSource::createNew(env(), fRTPSocket, + fRTPPayloadFormat, + expectDONFields, + fRTPTimestampFrequency); + } else if (strcmp(fCodecName, "DV") == 0) { + fReadSource = fRTPSource + = DVVideoRTPSource::createNew(env(), fRTPSocket, + fRTPPayloadFormat, + fRTPTimestampFrequency); + } else if (strcmp(fCodecName, "JPEG") == 0) { // motion JPEG + if (fReceiveRawJPEGFrames) { + // Special case (used when proxying JPEG/RTP streams): Receive each JPEG/RTP packet, including the special RTP headers: + fReadSource = fRTPSource + = SimpleRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, + fRTPTimestampFrequency, "video/JPEG", + 0/*special offset*/, False/*doNormalMBitRule => ignore the 'M' bit*/); + } else { + // Normal case: Receive each JPEG frame as a complete, displayable JPEG image: + fReadSource = fRTPSource + = JPEGVideoRTPSource::createNew(env(), fRTPSocket, + fRTPPayloadFormat, + fRTPTimestampFrequency, + videoWidth(), + videoHeight()); + } + } else if (strcmp(fCodecName, "X-QT") == 0 + || strcmp(fCodecName, "X-QUICKTIME") == 0) { + // Generic QuickTime streams, as defined in + // + char* mimeType + = new char[strlen(mediumName()) + strlen(codecName()) + 2] ; + sprintf(mimeType, "%s/%s", mediumName(), codecName()); + fReadSource = fRTPSource + = QuickTimeGenericRTPSource::createNew(env(), fRTPSocket, + fRTPPayloadFormat, + fRTPTimestampFrequency, + mimeType); + delete[] mimeType; + } else if ( strcmp(fCodecName, "PCMU") == 0 // PCM u-law audio + || strcmp(fCodecName, "GSM") == 0 // GSM audio + || strcmp(fCodecName, "DVI4") == 0 // DVI4 (IMA ADPCM) audio + || strcmp(fCodecName, "PCMA") == 0 // PCM a-law audio + || strcmp(fCodecName, "MP1S") == 0 // MPEG-1 System Stream + || strcmp(fCodecName, "MP2P") == 0 // MPEG-2 Program Stream + || strcmp(fCodecName, "L8") == 0 // 8-bit linear audio + || strcmp(fCodecName, "L16") == 0 // 16-bit linear audio + || strcmp(fCodecName, "L20") == 0 // 20-bit linear audio (RFC 3190) + || strcmp(fCodecName, "L24") == 0 // 24-bit linear audio (RFC 3190) + || strcmp(fCodecName, "G722") == 0 // G.722 audio (RFC 3551) + || strcmp(fCodecName, "G726-16") == 0 // G.726, 16 kbps + || strcmp(fCodecName, "G726-24") == 0 // G.726, 24 kbps + || strcmp(fCodecName, "G726-32") == 0 // G.726, 32 kbps + || strcmp(fCodecName, "G726-40") == 0 // G.726, 40 kbps + || strcmp(fCodecName, "SPEEX") == 0 // SPEEX audio + || strcmp(fCodecName, "ILBC") == 0 // iLBC audio + || strcmp(fCodecName, "OPUS") == 0 // Opus audio + || strcmp(fCodecName, "T140") == 0 // T.140 text (RFC 4103) + || strcmp(fCodecName, "DAT12") == 0 // 12-bit nonlinear audio (RFC 3190) + || strcmp(fCodecName, "VND.ONVIF.METADATA") == 0 // 'ONVIF' 'metadata' (a XML document) + ) { + createSimpleRTPSource = True; + useSpecialRTPoffset = 0; + if (strcmp(fCodecName, "VND.ONVIF.METADATA") == 0) { + // This RTP payload format uses the RTP "M" bit to indicate the end of the content (a XML document): + doNormalMBitRule = True; + } + } else if (useSpecialRTPoffset >= 0) { + // We don't know this RTP payload format, but try to receive + // it using a 'SimpleRTPSource' with the specified header offset: + createSimpleRTPSource = True; + } else { + env().setResultMsg("RTP payload format unknown or not supported"); + break; + } + + if (createSimpleRTPSource) { + char* mimeType + = new char[strlen(mediumName()) + strlen(codecName()) + 2] ; + sprintf(mimeType, "%s/%s", mediumName(), codecName()); + fReadSource = fRTPSource + = SimpleRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, + fRTPTimestampFrequency, mimeType, + (unsigned)useSpecialRTPoffset, + doNormalMBitRule); + delete[] mimeType; + } + } + + return True; + } while (0); + + return False; // an error occurred +} + + +////////// SDPAttribute implementation ////////// + +SDPAttribute::SDPAttribute(char const* strValue, Boolean valueIsHexadecimal) + : fStrValue(strDup(strValue)), fStrValueToLower(NULL), fValueIsHexadecimal(valueIsHexadecimal) { + if (fStrValue == NULL) { + // No value was given for this attribute, so consider it to be a Boolean, with value True: + fIntValue = 1; + } else { + // Create a 'tolower' version of "fStrValue", in case it's needed: + Locale l("POSIX"); + size_t strSize; + + fStrValueToLower = strDupSize(fStrValue, strSize); + for (unsigned i = 0; i < strSize-1; ++i) fStrValueToLower[i] = tolower(fStrValue[i]); + fStrValueToLower[strSize-1] = '\0'; + + // Try to parse "fStrValueToLower" as an integer. If we can't, assume an integer value of 0: + if (sscanf(fStrValueToLower, valueIsHexadecimal ? "%x" : "%d", &fIntValue) != 1) { + fIntValue = 0; + } + } +} + +SDPAttribute::~SDPAttribute() { + delete[] fStrValue; + delete[] fStrValueToLower; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MediaSink.cpp b/AnyCore/lib_rtsp/liveMedia/MediaSink.cpp new file mode 100644 index 0000000..5e241f7 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MediaSink.cpp @@ -0,0 +1,225 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Media Sinks +// Implementation + +#include "MediaSink.hh" +#include "GroupsockHelper.hh" +#include + +////////// MediaSink ////////// + +MediaSink::MediaSink(UsageEnvironment& env) + : Medium(env), fSource(NULL) { +} + +MediaSink::~MediaSink() { + stopPlaying(); +} + +Boolean MediaSink::isSink() const { + return True; +} + +Boolean MediaSink::lookupByName(UsageEnvironment& env, char const* sinkName, + MediaSink*& resultSink) { + resultSink = NULL; // unless we succeed + + Medium* medium; + if (!Medium::lookupByName(env, sinkName, medium)) return False; + + if (!medium->isSink()) { + env.setResultMsg(sinkName, " is not a media sink"); + return False; + } + + resultSink = (MediaSink*)medium; + return True; +} + +Boolean MediaSink::sourceIsCompatibleWithUs(MediaSource& source) { + // We currently support only framed sources. + return source.isFramedSource(); +} + +Boolean MediaSink::startPlaying(MediaSource& source, + afterPlayingFunc* afterFunc, + void* afterClientData) { + // Make sure we're not already being played: + if (fSource != NULL) { + envir().setResultMsg("This sink is already being played"); + return False; + } + + // Make sure our source is compatible: + if (!sourceIsCompatibleWithUs(source)) { + envir().setResultMsg("MediaSink::startPlaying(): source is not compatible!"); + return False; + } + fSource = (FramedSource*)&source; + + fAfterFunc = afterFunc; + fAfterClientData = afterClientData; + return continuePlaying(); +} + +void MediaSink::stopPlaying() { + // First, tell the source that we're no longer interested: + if (fSource != NULL) fSource->stopGettingFrames(); + + // Cancel any pending tasks: + envir().taskScheduler().unscheduleDelayedTask(nextTask()); + + fSource = NULL; // indicates that we can be played again + fAfterFunc = NULL; +} + +void MediaSink::onSourceClosure(void* clientData) { + MediaSink* sink = (MediaSink*)clientData; + sink->onSourceClosure(); +} + +void MediaSink::onSourceClosure() { + // Cancel any pending tasks: + envir().taskScheduler().unscheduleDelayedTask(nextTask()); + + fSource = NULL; // indicates that we can be played again + if (fAfterFunc != NULL) { + (*fAfterFunc)(fAfterClientData); + } +} + +Boolean MediaSink::isRTPSink() const { + return False; // default implementation +} + +////////// OutPacketBuffer ////////// + +unsigned OutPacketBuffer::maxSize = 60000; // by default + +OutPacketBuffer +::OutPacketBuffer(unsigned preferredPacketSize, unsigned maxPacketSize, unsigned maxBufferSize) + : fPreferred(preferredPacketSize), fMax(maxPacketSize), + fOverflowDataSize(0) { + if (maxBufferSize == 0) maxBufferSize = maxSize; + unsigned maxNumPackets = (maxBufferSize + (maxPacketSize-1))/maxPacketSize; + fLimit = maxNumPackets*maxPacketSize; + fBuf = new unsigned char[fLimit]; + resetPacketStart(); + resetOffset(); + resetOverflowData(); +} + +OutPacketBuffer::~OutPacketBuffer() { + delete[] fBuf; +} + +void OutPacketBuffer::enqueue(unsigned char const* from, unsigned numBytes) { + if (numBytes > totalBytesAvailable()) { +#ifdef DEBUG + fprintf(stderr, "OutPacketBuffer::enqueue() warning: %d > %d\n", numBytes, totalBytesAvailable()); +#endif + numBytes = totalBytesAvailable(); + } + + if (curPtr() != from) memmove(curPtr(), from, numBytes); + increment(numBytes); +} + +void OutPacketBuffer::enqueueWord(u_int32_t word) { + u_int32_t nWord = htonl(word); + enqueue((unsigned char*)&nWord, 4); +} + +void OutPacketBuffer::insert(unsigned char const* from, unsigned numBytes, + unsigned toPosition) { + unsigned realToPosition = fPacketStart + toPosition; + if (realToPosition + numBytes > fLimit) { + if (realToPosition > fLimit) return; // we can't do this + numBytes = fLimit - realToPosition; + } + + memmove(&fBuf[realToPosition], from, numBytes); + if (toPosition + numBytes > fCurOffset) { + fCurOffset = toPosition + numBytes; + } +} + +void OutPacketBuffer::insertWord(u_int32_t word, unsigned toPosition) { + u_int32_t nWord = htonl(word); + insert((unsigned char*)&nWord, 4, toPosition); +} + +void OutPacketBuffer::extract(unsigned char* to, unsigned numBytes, + unsigned fromPosition) { + unsigned realFromPosition = fPacketStart + fromPosition; + if (realFromPosition + numBytes > fLimit) { // sanity check + if (realFromPosition > fLimit) return; // we can't do this + numBytes = fLimit - realFromPosition; + } + + memmove(to, &fBuf[realFromPosition], numBytes); +} + +u_int32_t OutPacketBuffer::extractWord(unsigned fromPosition) { + u_int32_t nWord; + extract((unsigned char*)&nWord, 4, fromPosition); + return ntohl(nWord); +} + +void OutPacketBuffer::skipBytes(unsigned numBytes) { + if (numBytes > totalBytesAvailable()) { + numBytes = totalBytesAvailable(); + } + + increment(numBytes); +} + +void OutPacketBuffer +::setOverflowData(unsigned overflowDataOffset, + unsigned overflowDataSize, + struct timeval const& presentationTime, + unsigned durationInMicroseconds) { + fOverflowDataOffset = overflowDataOffset; + fOverflowDataSize = overflowDataSize; + fOverflowPresentationTime = presentationTime; + fOverflowDurationInMicroseconds = durationInMicroseconds; +} + +void OutPacketBuffer::useOverflowData() { + enqueue(&fBuf[fPacketStart + fOverflowDataOffset], fOverflowDataSize); + fCurOffset -= fOverflowDataSize; // undoes increment performed by "enqueue" + resetOverflowData(); +} + +void OutPacketBuffer::adjustPacketStart(unsigned numBytes) { + fPacketStart += numBytes; + if (fOverflowDataOffset >= numBytes) { + fOverflowDataOffset -= numBytes; + } else { + fOverflowDataOffset = 0; + fOverflowDataSize = 0; // an error otherwise + } +} + +void OutPacketBuffer::resetPacketStart() { + if (fOverflowDataSize > 0) { + fOverflowDataOffset += fPacketStart; + } + fPacketStart = 0; +} diff --git a/AnyCore/lib_rtsp/liveMedia/MediaSource.cpp b/AnyCore/lib_rtsp/liveMedia/MediaSource.cpp new file mode 100644 index 0000000..d5b5947 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MediaSource.cpp @@ -0,0 +1,88 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Media Sources +// Implementation + +#include "MediaSource.hh" + +////////// MediaSource ////////// + +MediaSource::MediaSource(UsageEnvironment& env) + : Medium(env) { +} + +MediaSource::~MediaSource() { +} + +Boolean MediaSource::isSource() const { + return True; +} + +char const* MediaSource::MIMEtype() const { + return "application/OCTET-STREAM"; // default type +} + +Boolean MediaSource::isFramedSource() const { + return False; // default implementation +} +Boolean MediaSource::isRTPSource() const { + return False; // default implementation +} +Boolean MediaSource::isMPEG1or2VideoStreamFramer() const { + return False; // default implementation +} +Boolean MediaSource::isMPEG4VideoStreamFramer() const { + return False; // default implementation +} +Boolean MediaSource::isH264VideoStreamFramer() const { + return False; // default implementation +} +Boolean MediaSource::isH265VideoStreamFramer() const { + return False; // default implementation +} +Boolean MediaSource::isDVVideoStreamFramer() const { + return False; // default implementation +} +Boolean MediaSource::isJPEGVideoSource() const { + return False; // default implementation +} +Boolean MediaSource::isAMRAudioSource() const { + return False; // default implementation +} + +Boolean MediaSource::lookupByName(UsageEnvironment& env, + char const* sourceName, + MediaSource*& resultSource) { + resultSource = NULL; // unless we succeed + + Medium* medium; + if (!Medium::lookupByName(env, sourceName, medium)) return False; + + if (!medium->isSource()) { + env.setResultMsg(sourceName, " is not a media source"); + return False; + } + + resultSource = (MediaSource*)medium; + return True; +} + +void MediaSource::getAttributes() const { + // Default implementation + envir().setResultMsg(""); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MultiFramedRTPSink.cpp b/AnyCore/lib_rtsp/liveMedia/MultiFramedRTPSink.cpp new file mode 100644 index 0000000..e58fe24 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MultiFramedRTPSink.cpp @@ -0,0 +1,423 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for a common kind of payload format: Those which pack multiple, +// complete codec frames (as many as possible) into each RTP packet. +// Implementation + +#include "MultiFramedRTPSink.hh" +#include "GroupsockHelper.hh" + +////////// MultiFramedRTPSink ////////// + +void MultiFramedRTPSink::setPacketSizes(unsigned preferredPacketSize, + unsigned maxPacketSize) { + if (preferredPacketSize > maxPacketSize || preferredPacketSize == 0) return; + // sanity check + + delete fOutBuf; + fOutBuf = new OutPacketBuffer(preferredPacketSize, maxPacketSize); + fOurMaxPacketSize = maxPacketSize; // save value, in case subclasses need it +} + +MultiFramedRTPSink::MultiFramedRTPSink(UsageEnvironment& env, + Groupsock* rtpGS, + unsigned char rtpPayloadType, + unsigned rtpTimestampFrequency, + char const* rtpPayloadFormatName, + unsigned numChannels) + : RTPSink(env, rtpGS, rtpPayloadType, rtpTimestampFrequency, + rtpPayloadFormatName, numChannels), + fOutBuf(NULL), fCurFragmentationOffset(0), fPreviousFrameEndedFragmentation(False), + fOnSendErrorFunc(NULL), fOnSendErrorData(NULL) { + setPacketSizes(1000, 1448); + // Default max packet size (1500, minus allowance for IP, UDP, UMTP headers) + // (Also, make it a multiple of 4 bytes, just in case that matters.) +} + +MultiFramedRTPSink::~MultiFramedRTPSink() { + delete fOutBuf; +} + +void MultiFramedRTPSink +::doSpecialFrameHandling(unsigned /*fragmentationOffset*/, + unsigned char* /*frameStart*/, + unsigned /*numBytesInFrame*/, + struct timeval framePresentationTime, + unsigned /*numRemainingBytes*/) { + // default implementation: If this is the first frame in the packet, + // use its presentationTime for the RTP timestamp: + if (isFirstFrameInPacket()) { + setTimestamp(framePresentationTime); + } +} + +Boolean MultiFramedRTPSink::allowFragmentationAfterStart() const { + return False; // by default +} + +Boolean MultiFramedRTPSink::allowOtherFramesAfterLastFragment() const { + return False; // by default +} + +Boolean MultiFramedRTPSink +::frameCanAppearAfterPacketStart(unsigned char const* /*frameStart*/, + unsigned /*numBytesInFrame*/) const { + return True; // by default +} + +unsigned MultiFramedRTPSink::specialHeaderSize() const { + // default implementation: Assume no special header: + return 0; +} + +unsigned MultiFramedRTPSink::frameSpecificHeaderSize() const { + // default implementation: Assume no frame-specific header: + return 0; +} + +unsigned MultiFramedRTPSink::computeOverflowForNewFrame(unsigned newFrameSize) const { + // default implementation: Just call numOverflowBytes() + return fOutBuf->numOverflowBytes(newFrameSize); +} + +void MultiFramedRTPSink::setMarkerBit() { + unsigned rtpHdr = fOutBuf->extractWord(0); + rtpHdr |= 0x00800000; + fOutBuf->insertWord(rtpHdr, 0); +} + +void MultiFramedRTPSink::setTimestamp(struct timeval framePresentationTime) { + // First, convert the presentation time to a 32-bit RTP timestamp: + fCurrentTimestamp = convertToRTPTimestamp(framePresentationTime); + + // Then, insert it into the RTP packet: + fOutBuf->insertWord(fCurrentTimestamp, fTimestampPosition); +} + +void MultiFramedRTPSink::setSpecialHeaderWord(unsigned word, + unsigned wordPosition) { + fOutBuf->insertWord(word, fSpecialHeaderPosition + 4*wordPosition); +} + +void MultiFramedRTPSink::setSpecialHeaderBytes(unsigned char const* bytes, + unsigned numBytes, + unsigned bytePosition) { + fOutBuf->insert(bytes, numBytes, fSpecialHeaderPosition + bytePosition); +} + +void MultiFramedRTPSink::setFrameSpecificHeaderWord(unsigned word, + unsigned wordPosition) { + fOutBuf->insertWord(word, fCurFrameSpecificHeaderPosition + 4*wordPosition); +} + +void MultiFramedRTPSink::setFrameSpecificHeaderBytes(unsigned char const* bytes, + unsigned numBytes, + unsigned bytePosition) { + fOutBuf->insert(bytes, numBytes, fCurFrameSpecificHeaderPosition + bytePosition); +} + +void MultiFramedRTPSink::setFramePadding(unsigned numPaddingBytes) { + if (numPaddingBytes > 0) { + // Add the padding bytes (with the last one being the padding size): + unsigned char paddingBuffer[255]; //max padding + memset(paddingBuffer, 0, numPaddingBytes); + paddingBuffer[numPaddingBytes-1] = numPaddingBytes; + fOutBuf->enqueue(paddingBuffer, numPaddingBytes); + + // Set the RTP padding bit: + unsigned rtpHdr = fOutBuf->extractWord(0); + rtpHdr |= 0x20000000; + fOutBuf->insertWord(rtpHdr, 0); + } +} + +Boolean MultiFramedRTPSink::continuePlaying() { + // Send the first packet. + // (This will also schedule any future sends.) + buildAndSendPacket(True); + return True; +} + +void MultiFramedRTPSink::stopPlaying() { + fOutBuf->resetPacketStart(); + fOutBuf->resetOffset(); + fOutBuf->resetOverflowData(); + + // Then call the default "stopPlaying()" function: + MediaSink::stopPlaying(); +} + +void MultiFramedRTPSink::buildAndSendPacket(Boolean isFirstPacket) { + fIsFirstPacket = isFirstPacket; + + // Set up the RTP header: + unsigned rtpHdr = 0x80000000; // RTP version 2; marker ('M') bit not set (by default; it can be set later) + rtpHdr |= (fRTPPayloadType<<16); + rtpHdr |= fSeqNo; // sequence number + fOutBuf->enqueueWord(rtpHdr); + + // Note where the RTP timestamp will go. + // (We can't fill this in until we start packing payload frames.) + fTimestampPosition = fOutBuf->curPacketSize(); + fOutBuf->skipBytes(4); // leave a hole for the timestamp + + fOutBuf->enqueueWord(SSRC()); + + // Allow for a special, payload-format-specific header following the + // RTP header: + fSpecialHeaderPosition = fOutBuf->curPacketSize(); + fSpecialHeaderSize = specialHeaderSize(); + fOutBuf->skipBytes(fSpecialHeaderSize); + + // Begin packing as many (complete) frames into the packet as we can: + fTotalFrameSpecificHeaderSizes = 0; + fNoFramesLeft = False; + fNumFramesUsedSoFar = 0; + packFrame(); +} + +void MultiFramedRTPSink::packFrame() { + // Get the next frame. + + // First, see if we have an overflow frame that was too big for the last pkt + if (fOutBuf->haveOverflowData()) { + // Use this frame before reading a new one from the source + unsigned frameSize = fOutBuf->overflowDataSize(); + struct timeval presentationTime = fOutBuf->overflowPresentationTime(); + unsigned durationInMicroseconds = fOutBuf->overflowDurationInMicroseconds(); + fOutBuf->useOverflowData(); + + afterGettingFrame1(frameSize, 0, presentationTime, durationInMicroseconds); + } else { + // Normal case: we need to read a new frame from the source + if (fSource == NULL) return; + + fCurFrameSpecificHeaderPosition = fOutBuf->curPacketSize(); + fCurFrameSpecificHeaderSize = frameSpecificHeaderSize(); + fOutBuf->skipBytes(fCurFrameSpecificHeaderSize); + fTotalFrameSpecificHeaderSizes += fCurFrameSpecificHeaderSize; + + fSource->getNextFrame(fOutBuf->curPtr(), fOutBuf->totalBytesAvailable(), + afterGettingFrame, this, ourHandleClosure, this); + } +} + +void MultiFramedRTPSink +::afterGettingFrame(void* clientData, unsigned numBytesRead, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + MultiFramedRTPSink* sink = (MultiFramedRTPSink*)clientData; + sink->afterGettingFrame1(numBytesRead, numTruncatedBytes, + presentationTime, durationInMicroseconds); +} + +void MultiFramedRTPSink +::afterGettingFrame1(unsigned frameSize, unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + if (fIsFirstPacket) { + // Record the fact that we're starting to play now: + gettimeofday(&fNextSendTime, NULL); + } + + fMostRecentPresentationTime = presentationTime; + if (fInitialPresentationTime.tv_sec == 0 && fInitialPresentationTime.tv_usec == 0) { + fInitialPresentationTime = presentationTime; + } + + if (numTruncatedBytes > 0) { + unsigned const bufferSize = fOutBuf->totalBytesAvailable(); + envir() << "MultiFramedRTPSink::afterGettingFrame1(): The input frame data was too large for our buffer size (" + << bufferSize << "). " + << numTruncatedBytes << " bytes of trailing data was dropped! Correct this by increasing \"OutPacketBuffer::maxSize\" to at least " + << OutPacketBuffer::maxSize + numTruncatedBytes << ", *before* creating this 'RTPSink'. (Current value is " + << OutPacketBuffer::maxSize << ".)\n"; + } + unsigned curFragmentationOffset = fCurFragmentationOffset; + unsigned numFrameBytesToUse = frameSize; + unsigned overflowBytes = 0; + + // If we have already packed one or more frames into this packet, + // check whether this new frame is eligible to be packed after them. + // (This is independent of whether the packet has enough room for this + // new frame; that check comes later.) + if (fNumFramesUsedSoFar > 0) { + if ((fPreviousFrameEndedFragmentation + && !allowOtherFramesAfterLastFragment()) + || !frameCanAppearAfterPacketStart(fOutBuf->curPtr(), frameSize)) { + // Save away this frame for next time: + numFrameBytesToUse = 0; + fOutBuf->setOverflowData(fOutBuf->curPacketSize(), frameSize, + presentationTime, durationInMicroseconds); + } + } + fPreviousFrameEndedFragmentation = False; + + if (numFrameBytesToUse > 0) { + // Check whether this frame overflows the packet + if (fOutBuf->wouldOverflow(frameSize)) { + // Don't use this frame now; instead, save it as overflow data, and + // send it in the next packet instead. However, if the frame is too + // big to fit in a packet by itself, then we need to fragment it (and + // use some of it in this packet, if the payload format permits this.) + if (isTooBigForAPacket(frameSize) + && (fNumFramesUsedSoFar == 0 || allowFragmentationAfterStart())) { + // We need to fragment this frame, and use some of it now: + overflowBytes = computeOverflowForNewFrame(frameSize); + numFrameBytesToUse -= overflowBytes; + fCurFragmentationOffset += numFrameBytesToUse; + } else { + // We don't use any of this frame now: + overflowBytes = frameSize; + numFrameBytesToUse = 0; + } + fOutBuf->setOverflowData(fOutBuf->curPacketSize() + numFrameBytesToUse, + overflowBytes, presentationTime, durationInMicroseconds); + } else if (fCurFragmentationOffset > 0) { + // This is the last fragment of a frame that was fragmented over + // more than one packet. Do any special handling for this case: + fCurFragmentationOffset = 0; + fPreviousFrameEndedFragmentation = True; + } + } + + if (numFrameBytesToUse == 0 && frameSize > 0) { + // Send our packet now, because we have filled it up: + sendPacketIfNecessary(); + } else { + // Use this frame in our outgoing packet: + unsigned char* frameStart = fOutBuf->curPtr(); + fOutBuf->increment(numFrameBytesToUse); + // do this now, in case "doSpecialFrameHandling()" calls "setFramePadding()" to append padding bytes + + // Here's where any payload format specific processing gets done: + doSpecialFrameHandling(curFragmentationOffset, frameStart, + numFrameBytesToUse, presentationTime, + overflowBytes); + + ++fNumFramesUsedSoFar; + + // Update the time at which the next packet should be sent, based + // on the duration of the frame that we just packed into it. + // However, if this frame has overflow data remaining, then don't + // count its duration yet. + if (overflowBytes == 0) { + fNextSendTime.tv_usec += durationInMicroseconds; + fNextSendTime.tv_sec += fNextSendTime.tv_usec/1000000; + fNextSendTime.tv_usec %= 1000000; + } + + // Send our packet now if (i) it's already at our preferred size, or + // (ii) (heuristic) another frame of the same size as the one we just + // read would overflow the packet, or + // (iii) it contains the last fragment of a fragmented frame, and we + // don't allow anything else to follow this or + // (iv) one frame per packet is allowed: + if (fOutBuf->isPreferredSize() + || fOutBuf->wouldOverflow(numFrameBytesToUse) + || (fPreviousFrameEndedFragmentation && + !allowOtherFramesAfterLastFragment()) + || !frameCanAppearAfterPacketStart(fOutBuf->curPtr() - frameSize, + frameSize) ) { + // The packet is ready to be sent now + sendPacketIfNecessary(); + } else { + // There's room for more frames; try getting another: + packFrame(); + } + } +} + +static unsigned const rtpHeaderSize = 12; + +Boolean MultiFramedRTPSink::isTooBigForAPacket(unsigned numBytes) const { + // Check whether a 'numBytes'-byte frame - together with a RTP header and + // (possible) special headers - would be too big for an output packet: + // (Later allow for RTP extension header!) ##### + numBytes += rtpHeaderSize + specialHeaderSize() + frameSpecificHeaderSize(); + return fOutBuf->isTooBigForAPacket(numBytes); +} + +void MultiFramedRTPSink::sendPacketIfNecessary() { + if (fNumFramesUsedSoFar > 0) { + // Send the packet: +#ifdef TEST_LOSS + if ((our_random()%10) != 0) // simulate 10% packet loss ##### +#endif + if (!fRTPInterface.sendPacket(fOutBuf->packet(), fOutBuf->curPacketSize())) { + // if failure handler has been specified, call it + if (fOnSendErrorFunc != NULL) (*fOnSendErrorFunc)(fOnSendErrorData); + } + ++fPacketCount; + fTotalOctetCount += fOutBuf->curPacketSize(); + fOctetCount += fOutBuf->curPacketSize() + - rtpHeaderSize - fSpecialHeaderSize - fTotalFrameSpecificHeaderSizes; + + ++fSeqNo; // for next time + } + + if (fOutBuf->haveOverflowData() + && fOutBuf->totalBytesAvailable() > fOutBuf->totalBufferSize()/2) { + // Efficiency hack: Reset the packet start pointer to just in front of + // the overflow data (allowing for the RTP header and special headers), + // so that we probably don't have to "memmove()" the overflow data + // into place when building the next packet: + unsigned newPacketStart = fOutBuf->curPacketSize() + - (rtpHeaderSize + fSpecialHeaderSize + frameSpecificHeaderSize()); + fOutBuf->adjustPacketStart(newPacketStart); + } else { + // Normal case: Reset the packet start pointer back to the start: + fOutBuf->resetPacketStart(); + } + fOutBuf->resetOffset(); + fNumFramesUsedSoFar = 0; + + if (fNoFramesLeft) { + // We're done: + onSourceClosure(); + } else { + // We have more frames left to send. Figure out when the next frame + // is due to start playing, then make sure that we wait this long before + // sending the next packet. + struct timeval timeNow; + gettimeofday(&timeNow, NULL); + int secsDiff = fNextSendTime.tv_sec - timeNow.tv_sec; + int64_t uSecondsToGo = secsDiff*1000000 + (fNextSendTime.tv_usec - timeNow.tv_usec); + if (uSecondsToGo < 0 || secsDiff < 0) { // sanity check: Make sure that the time-to-delay is non-negative: + uSecondsToGo = 0; + } + + // Delay this amount of time: + nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecondsToGo, (TaskFunc*)sendNext, this); + } +} + +// The following is called after each delay between packet sends: +void MultiFramedRTPSink::sendNext(void* firstArg) { + MultiFramedRTPSink* sink = (MultiFramedRTPSink*)firstArg; + sink->buildAndSendPacket(False); +} + +void MultiFramedRTPSink::ourHandleClosure(void* clientData) { + MultiFramedRTPSink* sink = (MultiFramedRTPSink*)clientData; + // There are no frames left, but we may have a partially built packet + // to send + sink->fNoFramesLeft = True; + sink->sendPacketIfNecessary(); +} diff --git a/AnyCore/lib_rtsp/liveMedia/MultiFramedRTPSource.cpp b/AnyCore/lib_rtsp/liveMedia/MultiFramedRTPSource.cpp new file mode 100644 index 0000000..c0cf5e9 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/MultiFramedRTPSource.cpp @@ -0,0 +1,627 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP source for a common kind of payload format: Those that pack multiple, +// complete codec frames (as many as possible) into each RTP packet. +// Implementation + +#include "MultiFramedRTPSource.hh" +#include "RTCP.hh" +#include "GroupsockHelper.hh" +#include + +////////// ReorderingPacketBuffer definition ////////// + +class ReorderingPacketBuffer { +public: + ReorderingPacketBuffer(BufferedPacketFactory* packetFactory); + virtual ~ReorderingPacketBuffer(); + void reset(); + + BufferedPacket* getFreePacket(MultiFramedRTPSource* ourSource); + Boolean storePacket(BufferedPacket* bPacket); + BufferedPacket* getNextCompletedPacket(Boolean& packetLossPreceded); + void releaseUsedPacket(BufferedPacket* packet); + void freePacket(BufferedPacket* packet) { + if (packet != fSavedPacket) { + delete packet; + } else { + fSavedPacketFree = True; + } + } + Boolean isEmpty() const { return fHeadPacket == NULL; } + + void setThresholdTime(unsigned uSeconds) { fThresholdTime = uSeconds; } + void resetHaveSeenFirstPacket() { fHaveSeenFirstPacket = False; } + +private: + BufferedPacketFactory* fPacketFactory; + unsigned fThresholdTime; // uSeconds + Boolean fHaveSeenFirstPacket; // used to set initial "fNextExpectedSeqNo" + unsigned short fNextExpectedSeqNo; + BufferedPacket* fHeadPacket; + BufferedPacket* fTailPacket; + BufferedPacket* fSavedPacket; + // to avoid calling new/free in the common case + Boolean fSavedPacketFree; +}; + + +////////// MultiFramedRTPSource implementation ////////// + +MultiFramedRTPSource +::MultiFramedRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency, + BufferedPacketFactory* packetFactory) + : RTPSource(env, RTPgs, rtpPayloadFormat, rtpTimestampFrequency) { + reset(); + fReorderingBuffer = new ReorderingPacketBuffer(packetFactory); + + // Try to use a big receive buffer for RTP: + increaseReceiveBufferTo(env, RTPgs->socketNum(), 50*1024); +} + +void MultiFramedRTPSource::reset() { + fCurrentPacketBeginsFrame = True; // by default + fCurrentPacketCompletesFrame = True; // by default + fAreDoingNetworkReads = False; + fPacketReadInProgress = NULL; + fNeedDelivery = False; + fPacketLossInFragmentedFrame = False; +} + +MultiFramedRTPSource::~MultiFramedRTPSource() { + delete fReorderingBuffer; +} + +Boolean MultiFramedRTPSource +::processSpecialHeader(BufferedPacket* /*packet*/, + unsigned& resultSpecialHeaderSize) { + // Default implementation: Assume no special header: + resultSpecialHeaderSize = 0; + return True; +} + +Boolean MultiFramedRTPSource +::packetIsUsableInJitterCalculation(unsigned char* /*packet*/, + unsigned /*packetSize*/) { + // Default implementation: + return True; +} + +void MultiFramedRTPSource::doStopGettingFrames() { + envir().taskScheduler().unscheduleDelayedTask(nextTask()); + fRTPInterface.stopNetworkReading(); + fReorderingBuffer->reset(); + reset(); +} + +void MultiFramedRTPSource::doGetNextFrame() { + if (!fAreDoingNetworkReads) { + // Turn on background read handling of incoming packets: + fAreDoingNetworkReads = True; + TaskScheduler::BackgroundHandlerProc* handler + = (TaskScheduler::BackgroundHandlerProc*)&networkReadHandler; + fRTPInterface.startNetworkReading(handler); + } + + fSavedTo = fTo; + fSavedMaxSize = fMaxSize; + fFrameSize = 0; // for now + fNeedDelivery = True; + doGetNextFrame1(); +} + +void MultiFramedRTPSource::doGetNextFrame1() { + while (fNeedDelivery) { + // If we already have packet data available, then deliver it now. + Boolean packetLossPrecededThis; + BufferedPacket* nextPacket + = fReorderingBuffer->getNextCompletedPacket(packetLossPrecededThis); + if (nextPacket == NULL) break; + + fNeedDelivery = False; + + if (nextPacket->useCount() == 0) { + // Before using the packet, check whether it has a special header + // that needs to be processed: + unsigned specialHeaderSize; + if (!processSpecialHeader(nextPacket, specialHeaderSize)) { + // Something's wrong with the header; reject the packet: + fReorderingBuffer->releaseUsedPacket(nextPacket); + fNeedDelivery = True; + break; + } + nextPacket->skip(specialHeaderSize); + } + + // Check whether we're part of a multi-packet frame, and whether + // there was packet loss that would render this packet unusable: + if (fCurrentPacketBeginsFrame) { + if (packetLossPrecededThis || fPacketLossInFragmentedFrame) { + // We didn't get all of the previous frame. + // Forget any data that we used from it: + fTo = fSavedTo; fMaxSize = fSavedMaxSize; + fFrameSize = 0; + } + fPacketLossInFragmentedFrame = False; + } else if (packetLossPrecededThis) { + // We're in a multi-packet frame, with preceding packet loss + fPacketLossInFragmentedFrame = True; + } + if (fPacketLossInFragmentedFrame) { + // This packet is unusable; reject it: + fReorderingBuffer->releaseUsedPacket(nextPacket); + fNeedDelivery = True; + break; + } + + // The packet is usable. Deliver all or part of it to our caller: + unsigned frameSize; + nextPacket->use(fTo, fMaxSize, frameSize, fNumTruncatedBytes, + fCurPacketRTPSeqNum, fCurPacketRTPTimestamp, + fPresentationTime, fCurPacketHasBeenSynchronizedUsingRTCP, + fCurPacketMarkerBit); + fFrameSize += frameSize; + + if (!nextPacket->hasUsableData()) { + // We're completely done with this packet now + fReorderingBuffer->releaseUsedPacket(nextPacket); + } + + if (fCurrentPacketCompletesFrame) { + // We have all the data that the client wants. + if (fNumTruncatedBytes > 0) { + envir() << "MultiFramedRTPSource::doGetNextFrame1(): The total received frame size exceeds the client's buffer size (" + << fSavedMaxSize << "). " + << fNumTruncatedBytes << " bytes of trailing data will be dropped!\n"; + } + // Call our own 'after getting' function, so that the downstream object can consume the data: + if (fReorderingBuffer->isEmpty()) { + // Common case optimization: There are no more queued incoming packets, so this code will not get + // executed again without having first returned to the event loop. Call our 'after getting' function + // directly, because there's no risk of a long chain of recursion (and thus stack overflow): + afterGetting(this); + } else { + // Special case: Call our 'after getting' function via the event loop. + nextTask() = envir().taskScheduler().scheduleDelayedTask(0, + (TaskFunc*)FramedSource::afterGetting, this); + } + } else { + // This packet contained fragmented data, and does not complete + // the data that the client wants. Keep getting data: + fTo += frameSize; fMaxSize -= frameSize; + fNeedDelivery = True; + } + } +} + +void MultiFramedRTPSource +::setPacketReorderingThresholdTime(unsigned uSeconds) { + fReorderingBuffer->setThresholdTime(uSeconds); +} + +#define ADVANCE(n) do { bPacket->skip(n); } while (0) + +void MultiFramedRTPSource::networkReadHandler(MultiFramedRTPSource* source, int /*mask*/) { + source->networkReadHandler1(); +} + +void MultiFramedRTPSource::networkReadHandler1() { + BufferedPacket* bPacket = fPacketReadInProgress; + if (bPacket == NULL) { + // Normal case: Get a free BufferedPacket descriptor to hold the new network packet: + bPacket = fReorderingBuffer->getFreePacket(this); + } + + // Read the network packet, and perform sanity checks on the RTP header: + Boolean readSuccess = False; + do { + struct sockaddr_in fromAddress; + Boolean packetReadWasIncomplete = fPacketReadInProgress != NULL; + if (!bPacket->fillInData(fRTPInterface, fromAddress, packetReadWasIncomplete)) { + if (bPacket->bytesAvailable() == 0) { + envir() << "MultiFramedRTPSource error: Hit limit when reading incoming packet over TCP. Increase \"MAX_PACKET_SIZE\"\n"; + } + fPacketReadInProgress = NULL; + break; + } + if (packetReadWasIncomplete) { + // We need additional read(s) before we can process the incoming packet: + fPacketReadInProgress = bPacket; + return; + } else { + fPacketReadInProgress = NULL; + } +#ifdef TEST_LOSS + setPacketReorderingThresholdTime(0); + // don't wait for 'lost' packets to arrive out-of-order later + if ((our_random()%10) == 0) break; // simulate 10% packet loss +#endif + + // Check for the 12-byte RTP header: + if (bPacket->dataSize() < 12) break; + unsigned rtpHdr = ntohl(*(u_int32_t*)(bPacket->data())); ADVANCE(4); + Boolean rtpMarkerBit = (rtpHdr&0x00800000) != 0; + unsigned rtpTimestamp = ntohl(*(u_int32_t*)(bPacket->data()));ADVANCE(4); + unsigned rtpSSRC = ntohl(*(u_int32_t*)(bPacket->data())); ADVANCE(4); + + // Check the RTP version number (it should be 2): + if ((rtpHdr&0xC0000000) != 0x80000000) break; + + // Check the Payload Type. + unsigned char rtpPayloadType = (unsigned char)((rtpHdr&0x007F0000)>>16); + if (rtpPayloadType != rtpPayloadFormat()) { + if (fRTCPInstanceForMultiplexedRTCPPackets != NULL + && rtpPayloadType >= 64 && rtpPayloadType <= 95) { + // This is a multiplexed RTCP packet, and we've been asked to deliver such packets. + // Do so now: + fRTCPInstanceForMultiplexedRTCPPackets + ->injectReport(bPacket->data()-12, bPacket->dataSize()+12, fromAddress); + } + break; + } + + // Skip over any CSRC identifiers in the header: + unsigned cc = (rtpHdr>>24)&0xF; + if (bPacket->dataSize() < cc) break; + ADVANCE(cc*4); + + // Check for (& ignore) any RTP header extension + if (rtpHdr&0x10000000) { + if (bPacket->dataSize() < 4) break; + unsigned extHdr = ntohl(*(u_int32_t*)(bPacket->data())); ADVANCE(4); + unsigned remExtSize = 4*(extHdr&0xFFFF); + if (bPacket->dataSize() < remExtSize) break; + ADVANCE(remExtSize); + } + + // Discard any padding bytes: + if (rtpHdr&0x20000000) { + if (bPacket->dataSize() == 0) break; + unsigned numPaddingBytes + = (unsigned)(bPacket->data())[bPacket->dataSize()-1]; + if (bPacket->dataSize() < numPaddingBytes) break; + bPacket->removePadding(numPaddingBytes); + } + + // The rest of the packet is the usable data. Record and save it: + if (rtpSSRC != fLastReceivedSSRC) { + // The SSRC of incoming packets has changed. Unfortunately we don't yet handle streams that contain multiple SSRCs, + // but we can handle a single-SSRC stream where the SSRC changes occasionally: + fLastReceivedSSRC = rtpSSRC; + fReorderingBuffer->resetHaveSeenFirstPacket(); + } + unsigned short rtpSeqNo = (unsigned short)(rtpHdr&0xFFFF); + Boolean usableInJitterCalculation + = packetIsUsableInJitterCalculation((bPacket->data()), + bPacket->dataSize()); + struct timeval presentationTime; // computed by: + Boolean hasBeenSyncedUsingRTCP; // computed by: + receptionStatsDB() + .noteIncomingPacket(rtpSSRC, rtpSeqNo, rtpTimestamp, + timestampFrequency(), + usableInJitterCalculation, presentationTime, + hasBeenSyncedUsingRTCP, bPacket->dataSize()); + + // Fill in the rest of the packet descriptor, and store it: + struct timeval timeNow; + gettimeofday(&timeNow, NULL); + bPacket->assignMiscParams(rtpSeqNo, rtpTimestamp, presentationTime, + hasBeenSyncedUsingRTCP, rtpMarkerBit, + timeNow); + if (!fReorderingBuffer->storePacket(bPacket)) break; + + readSuccess = True; + } while (0); + if (!readSuccess) fReorderingBuffer->freePacket(bPacket); + + doGetNextFrame1(); + // If we didn't get proper data this time, we'll get another chance +} + + +////////// BufferedPacket and BufferedPacketFactory implementation ///// + +#define MAX_PACKET_SIZE 20000 + +BufferedPacket::BufferedPacket() +: fPacketSize(MAX_PACKET_SIZE), +fBuf(new unsigned char[MAX_PACKET_SIZE]), +fNextPacket(NULL) { +} + +BufferedPacket::~BufferedPacket() { + delete fNextPacket; + delete[] fBuf; +} + +void BufferedPacket::reset() { + fHead = fTail = 0; + fUseCount = 0; + fIsFirstPacket = False; // by default +} + +// The following function has been deprecated: +unsigned BufferedPacket +::nextEnclosedFrameSize(unsigned char*& /*framePtr*/, unsigned dataSize) { + // By default, use the entire buffered data, even though it may consist + // of more than one frame, on the assumption that the client doesn't + // care. (This is more efficient than delivering a frame at a time) + return dataSize; +} + +void BufferedPacket +::getNextEnclosedFrameParameters(unsigned char*& framePtr, unsigned dataSize, + unsigned& frameSize, + unsigned& frameDurationInMicroseconds) { + // By default, use the entire buffered data, even though it may consist + // of more than one frame, on the assumption that the client doesn't + // care. (This is more efficient than delivering a frame at a time) + + // For backwards-compatibility with existing uses of (the now deprecated) + // "nextEnclosedFrameSize()", call that function to implement this one: + frameSize = nextEnclosedFrameSize(framePtr, dataSize); + + frameDurationInMicroseconds = 0; // by default. Subclasses should correct this. +} + +Boolean BufferedPacket::fillInData(RTPInterface& rtpInterface, struct sockaddr_in& fromAddress, + Boolean& packetReadWasIncomplete) { + if (!packetReadWasIncomplete) reset(); + + unsigned const maxBytesToRead = bytesAvailable(); + if (maxBytesToRead == 0) return False; // exceeded buffer size when reading over TCP + + unsigned numBytesRead; + int tcpSocketNum; // not used + unsigned char tcpStreamChannelId; // not used + if (!rtpInterface.handleRead(&fBuf[fTail], maxBytesToRead, + numBytesRead, fromAddress, + tcpSocketNum, tcpStreamChannelId, + packetReadWasIncomplete)) { + return False; + } + fTail += numBytesRead; + return True; +} + +void BufferedPacket +::assignMiscParams(unsigned short rtpSeqNo, unsigned rtpTimestamp, +struct timeval presentationTime, + Boolean hasBeenSyncedUsingRTCP, Boolean rtpMarkerBit, +struct timeval timeReceived) { + fRTPSeqNo = rtpSeqNo; + fRTPTimestamp = rtpTimestamp; + fPresentationTime = presentationTime; + fHasBeenSyncedUsingRTCP = hasBeenSyncedUsingRTCP; + fRTPMarkerBit = rtpMarkerBit; + fTimeReceived = timeReceived; +} + +void BufferedPacket::skip(unsigned numBytes) { + fHead += numBytes; + if (fHead > fTail) fHead = fTail; +} + +void BufferedPacket::removePadding(unsigned numBytes) { + if (numBytes > fTail-fHead) numBytes = fTail-fHead; + fTail -= numBytes; +} + +void BufferedPacket::appendData(unsigned char* newData, unsigned numBytes) { + if (numBytes > fPacketSize-fTail) numBytes = fPacketSize - fTail; + memmove(&fBuf[fTail], newData, numBytes); + fTail += numBytes; +} + +void BufferedPacket::use(unsigned char* to, unsigned toSize, + unsigned& bytesUsed, unsigned& bytesTruncated, + unsigned short& rtpSeqNo, unsigned& rtpTimestamp, +struct timeval& presentationTime, + Boolean& hasBeenSyncedUsingRTCP, + Boolean& rtpMarkerBit) { + unsigned char* origFramePtr = &fBuf[fHead]; + unsigned char* newFramePtr = origFramePtr; // may change in the call below + unsigned frameSize, frameDurationInMicroseconds; + getNextEnclosedFrameParameters(newFramePtr, fTail - fHead, + frameSize, frameDurationInMicroseconds); + if (frameSize > toSize) { + bytesTruncated += frameSize - toSize; + bytesUsed = toSize; + } else { + bytesTruncated = 0; + bytesUsed = frameSize; + } + + memmove(to, newFramePtr, bytesUsed); + fHead += (newFramePtr - origFramePtr) + frameSize; + ++fUseCount; + + rtpSeqNo = fRTPSeqNo; + rtpTimestamp = fRTPTimestamp; + presentationTime = fPresentationTime; + hasBeenSyncedUsingRTCP = fHasBeenSyncedUsingRTCP; + rtpMarkerBit = fRTPMarkerBit; + + // Update "fPresentationTime" for the next enclosed frame (if any): + fPresentationTime.tv_usec += frameDurationInMicroseconds; + if (fPresentationTime.tv_usec >= 1000000) { + fPresentationTime.tv_sec += fPresentationTime.tv_usec/1000000; + fPresentationTime.tv_usec = fPresentationTime.tv_usec%1000000; + } +} + +BufferedPacketFactory::BufferedPacketFactory() { +} + +BufferedPacketFactory::~BufferedPacketFactory() { +} + +BufferedPacket* BufferedPacketFactory +::createNewPacket(MultiFramedRTPSource* /*ourSource*/) { + return new BufferedPacket; +} + + +////////// ReorderingPacketBuffer implementation ////////// + +ReorderingPacketBuffer +::ReorderingPacketBuffer(BufferedPacketFactory* packetFactory) +: fThresholdTime(100000) /* default reordering threshold: 100 ms */, +fHaveSeenFirstPacket(False), fHeadPacket(NULL), fTailPacket(NULL), fSavedPacket(NULL), fSavedPacketFree(True) { + fPacketFactory = (packetFactory == NULL) + ? (new BufferedPacketFactory) + : packetFactory; +} + +ReorderingPacketBuffer::~ReorderingPacketBuffer() { + reset(); + delete fPacketFactory; +} + +void ReorderingPacketBuffer::reset() { + if (fSavedPacketFree) delete fSavedPacket; // because fSavedPacket is not in the list + delete fHeadPacket; // will also delete fSavedPacket if it's in the list + resetHaveSeenFirstPacket(); + fHeadPacket = fTailPacket = fSavedPacket = NULL; +} + +BufferedPacket* ReorderingPacketBuffer::getFreePacket(MultiFramedRTPSource* ourSource) { + if (fSavedPacket == NULL) { // we're being called for the first time + fSavedPacket = fPacketFactory->createNewPacket(ourSource); + fSavedPacketFree = True; + } + + if (fSavedPacketFree == True) { + fSavedPacketFree = False; + return fSavedPacket; + } else { + return fPacketFactory->createNewPacket(ourSource); + } +} + +Boolean ReorderingPacketBuffer::storePacket(BufferedPacket* bPacket) { + unsigned short rtpSeqNo = bPacket->rtpSeqNo(); + + if (!fHaveSeenFirstPacket) { + fNextExpectedSeqNo = rtpSeqNo; // initialization + bPacket->isFirstPacket() = True; + fHaveSeenFirstPacket = True; + } + + // Ignore this packet if its sequence number is less than the one + // that we're looking for (in this case, it's been excessively delayed). + if (seqNumLT(rtpSeqNo, fNextExpectedSeqNo)) return False; + + if (fTailPacket == NULL) { + // Common case: There are no packets in the queue; this will be the first one: + bPacket->nextPacket() = NULL; + fHeadPacket = fTailPacket = bPacket; + return True; + } + + if (seqNumLT(fTailPacket->rtpSeqNo(), rtpSeqNo)) { + // The next-most common case: There are packets already in the queue; this packet arrived in order => put it at the tail: + bPacket->nextPacket() = NULL; + fTailPacket->nextPacket() = bPacket; + fTailPacket = bPacket; + return True; + } + + if (rtpSeqNo == fTailPacket->rtpSeqNo()) { + // This is a duplicate packet - ignore it + return False; + } + + // Rare case: This packet is out-of-order. Run through the list (from the head), to figure out where it belongs: + BufferedPacket* beforePtr = NULL; + BufferedPacket* afterPtr = fHeadPacket; + while (afterPtr != NULL) { + if (seqNumLT(rtpSeqNo, afterPtr->rtpSeqNo())) break; // it comes here + if (rtpSeqNo == afterPtr->rtpSeqNo()) { + // This is a duplicate packet - ignore it + return False; + } + + beforePtr = afterPtr; + afterPtr = afterPtr->nextPacket(); + } + + // Link our new packet between "beforePtr" and "afterPtr": + bPacket->nextPacket() = afterPtr; + if (beforePtr == NULL) { + fHeadPacket = bPacket; + } else { + beforePtr->nextPacket() = bPacket; + } + + return True; +} + +void ReorderingPacketBuffer::releaseUsedPacket(BufferedPacket* packet) { + // ASSERT: packet == fHeadPacket + // ASSERT: fNextExpectedSeqNo == packet->rtpSeqNo() + ++fNextExpectedSeqNo; // because we're finished with this packet now + + fHeadPacket = fHeadPacket->nextPacket(); + if (!fHeadPacket) { + fTailPacket = NULL; + } + packet->nextPacket() = NULL; + + freePacket(packet); +} + +BufferedPacket* ReorderingPacketBuffer +::getNextCompletedPacket(Boolean& packetLossPreceded) { + if (fHeadPacket == NULL) return NULL; + + // Check whether the next packet we want is already at the head + // of the queue: + // ASSERT: fHeadPacket->rtpSeqNo() >= fNextExpectedSeqNo + if (fHeadPacket->rtpSeqNo() == fNextExpectedSeqNo) { + packetLossPreceded = fHeadPacket->isFirstPacket(); + // (The very first packet is treated as if there was packet loss beforehand.) + return fHeadPacket; + } + + // We're still waiting for our desired packet to arrive. However, if + // our time threshold has been exceeded, then forget it, and return + // the head packet instead: + Boolean timeThresholdHasBeenExceeded; + if (fThresholdTime == 0) { + timeThresholdHasBeenExceeded = True; // optimization + } else { + struct timeval timeNow; + gettimeofday(&timeNow, NULL); + unsigned uSecondsSinceReceived + = (timeNow.tv_sec - fHeadPacket->timeReceived().tv_sec)*1000000 + + (timeNow.tv_usec - fHeadPacket->timeReceived().tv_usec); + timeThresholdHasBeenExceeded = uSecondsSinceReceived > fThresholdTime; + } + if (timeThresholdHasBeenExceeded) { + fNextExpectedSeqNo = fHeadPacket->rtpSeqNo(); + // we've given up on earlier packets now + packetLossPreceded = True; + return fHeadPacket; + } + + // Otherwise, keep waiting for our desired packet to arrive: + return NULL; +} diff --git a/AnyCore/lib_rtsp/liveMedia/OggDemuxedTrack.cpp b/AnyCore/lib_rtsp/liveMedia/OggDemuxedTrack.cpp new file mode 100644 index 0000000..e6ea8d5 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/OggDemuxedTrack.cpp @@ -0,0 +1,43 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A media track, demultiplexed from an Ogg file +// Implementation + +#include "OggDemuxedTrack.hh" +#include "OggFile.hh" + +OggDemuxedTrack::OggDemuxedTrack(UsageEnvironment& env, unsigned trackNumber, OggDemux& sourceDemux) + : FramedSource(env), + fOurTrackNumber(trackNumber), fOurSourceDemux(sourceDemux), + fCurrentPageIsContinuation(False) { + fNextPresentationTime.tv_sec = 0; fNextPresentationTime.tv_usec = 0; +} + +OggDemuxedTrack::~OggDemuxedTrack() { + fOurSourceDemux.removeTrack(fOurTrackNumber); +} + +void OggDemuxedTrack::doGetNextFrame() { + fOurSourceDemux.continueReading(); +} + +char const* OggDemuxedTrack::MIMEtype() const { + OggTrack* track = fOurSourceDemux.fOurFile.lookup(fOurTrackNumber); + if (track == NULL) return "(unknown)"; // shouldn't happen + return track->mimeType; +} diff --git a/AnyCore/lib_rtsp/liveMedia/OggDemuxedTrack.hh b/AnyCore/lib_rtsp/liveMedia/OggDemuxedTrack.hh new file mode 100644 index 0000000..b74fe31 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/OggDemuxedTrack.hh @@ -0,0 +1,58 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A media track, demultiplexed from an Ogg file +// C++ header + +#ifndef _OGG_DEMUXED_TRACK_HH +#define _OGG_DEMUXED_TRACK_HH + +#ifndef _FRAMED_SOURCE_HH +#include "FramedSource.hh" +#endif + +class OggDemux; // forward + +class OggDemuxedTrack: public FramedSource { +private: // We are created only by a OggDemux (a friend) + friend class OggDemux; + OggDemuxedTrack(UsageEnvironment& env, unsigned trackNumber, OggDemux& sourceDemux); + virtual ~OggDemuxedTrack(); + +private: + // redefined virtual functions: + virtual void doGetNextFrame(); + virtual char const* MIMEtype() const; + +private: // We are accessed only by OggDemux and by OggFileParser (a friend) + friend class OggFileParser; + unsigned char*& to() { return fTo; } + unsigned& maxSize() { return fMaxSize; } + unsigned& frameSize() { return fFrameSize; } + unsigned& numTruncatedBytes() { return fNumTruncatedBytes; } + struct timeval& presentationTime() { return fPresentationTime; } + unsigned& durationInMicroseconds() { return fDurationInMicroseconds; } + struct timeval& nextPresentationTime() { return fNextPresentationTime; } + +private: + unsigned fOurTrackNumber; + OggDemux& fOurSourceDemux; + Boolean fCurrentPageIsContinuation; + struct timeval fNextPresentationTime; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/OggFile.cpp b/AnyCore/lib_rtsp/liveMedia/OggFile.cpp new file mode 100644 index 0000000..54ac998 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/OggFile.cpp @@ -0,0 +1,328 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A class that encapsulates an Ogg file. +// Implementation + +#include "OggFileParser.hh" +#include "OggDemuxedTrack.hh" +#include "ByteStreamFileSource.hh" +#include "VorbisAudioRTPSink.hh" +#include "SimpleRTPSink.hh" +#include "TheoraVideoRTPSink.hh" + +////////// OggTrackTable definition ///////// + +// For looking up and iterating over the file's tracks: + +class OggTrackTable { +public: + OggTrackTable(); + virtual ~OggTrackTable(); + + void add(OggTrack* newTrack); + OggTrack* lookup(u_int32_t trackNumber); + + unsigned numTracks() const; + +private: + friend class OggTrackTableIterator; + HashTable* fTable; +}; + + +////////// OggFile implementation ////////// + +void OggFile::createNew(UsageEnvironment& env, char const* fileName, + onCreationFunc* onCreation, void* onCreationClientData) { + new OggFile(env, fileName, onCreation, onCreationClientData); +} + +OggTrack* OggFile::lookup(u_int32_t trackNumber) { + return fTrackTable->lookup(trackNumber); +} + +OggDemux* OggFile::newDemux() { + OggDemux* demux = new OggDemux(*this); + fDemuxesTable->Add((char const*)demux, demux); + + return demux; +} + +unsigned OggFile::numTracks() const { + return fTrackTable->numTracks(); +} + +FramedSource* OggFile +::createSourceForStreaming(FramedSource* baseSource, u_int32_t trackNumber, + unsigned& estBitrate, unsigned& numFiltersInFrontOfTrack) { + if (baseSource == NULL) return NULL; + + FramedSource* result = baseSource; // by default + numFiltersInFrontOfTrack = 0; // by default + + // Look at the track's MIME type to set its estimated bitrate (for use by RTCP). + // (Later, try to be smarter about figuring out the bitrate.) ##### + // Some MIME types also require adding a special 'framer' in front of the source. + OggTrack* track = lookup(trackNumber); + if (track != NULL) { // should always be true + estBitrate = track->estBitrate; + } + + return result; +} + +RTPSink* OggFile +::createRTPSinkForTrackNumber(u_int32_t trackNumber, Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic) { + OggTrack* track = lookup(trackNumber); + if (track == NULL || track->mimeType == NULL) return NULL; + + RTPSink* result = NULL; // default value for unknown media types + + if (strcmp(track->mimeType, "audio/VORBIS") == 0) { + // For Vorbis audio, we use the special "identification", "comment", and "setup" headers + // that we read when we initially read the headers at the start of the file: + result = VorbisAudioRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + track->samplingFrequency, track->numChannels, + track->vtoHdrs.header[0], track->vtoHdrs.headerSize[0], + track->vtoHdrs.header[1], track->vtoHdrs.headerSize[1], + track->vtoHdrs.header[2], track->vtoHdrs.headerSize[2]); + } else if (strcmp(track->mimeType, "audio/OPUS") == 0) { + result = SimpleRTPSink + ::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + 48000, "audio", "OPUS", 2, False/*only 1 Opus 'packet' in each RTP packet*/); + } else if (strcmp(track->mimeType, "video/THEORA") == 0) { + // For Theora video, we use the special "identification", "comment", and "setup" headers + // that we read when we initially read the headers at the start of the file: + result = TheoraVideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + track->vtoHdrs.header[0], track->vtoHdrs.headerSize[0], + track->vtoHdrs.header[1], track->vtoHdrs.headerSize[1], + track->vtoHdrs.header[2], track->vtoHdrs.headerSize[2]); + } + + return result; +} + + +OggFile::OggFile(UsageEnvironment& env, char const* fileName, + onCreationFunc* onCreation, void* onCreationClientData) + : Medium(env), + fFileName(strDup(fileName)), + fOnCreation(onCreation), fOnCreationClientData(onCreationClientData) { + fTrackTable = new OggTrackTable; + fDemuxesTable = HashTable::create(ONE_WORD_HASH_KEYS); + + FramedSource* inputSource = ByteStreamFileSource::createNew(envir(), fileName); + if (inputSource == NULL) { + // The specified input file does not exist! + fParserForInitialization = NULL; + handleEndOfBosPageParsing(); // we have no file, and thus no tracks, but we still need to signal this + } else { + // Initialize ourselves by parsing the file's headers: + fParserForInitialization + = new OggFileParser(*this, inputSource, handleEndOfBosPageParsing, this); + } +} + +OggFile::~OggFile() { + delete fParserForInitialization; + + // Delete any outstanding "OggDemux"s, and the table for them: + OggDemux* demux; + while ((demux = (OggDemux*)fDemuxesTable->RemoveNext()) != NULL) { + delete demux; + } + delete fDemuxesTable; + delete fTrackTable; + + delete[] (char*)fFileName; +} + +void OggFile::handleEndOfBosPageParsing(void* clientData) { + ((OggFile*)clientData)->handleEndOfBosPageParsing(); +} + +void OggFile::handleEndOfBosPageParsing() { + // Delete our parser, because it's done its job now: + delete fParserForInitialization; fParserForInitialization = NULL; + + // Finally, signal our caller that we've been created and initialized: + if (fOnCreation != NULL) (*fOnCreation)(this, fOnCreationClientData); +} + +void OggFile::addTrack(OggTrack* newTrack) { + fTrackTable->add(newTrack); +} + +void OggFile::removeDemux(OggDemux* demux) { + fDemuxesTable->Remove((char const*)demux); +} + + +////////// OggTrackTable implementation ///////// + +OggTrackTable::OggTrackTable() + : fTable(HashTable::create(ONE_WORD_HASH_KEYS)) { +} + +OggTrackTable::~OggTrackTable() { + // Remove and delete all of our "OggTrack" descriptors, and the hash table itself: + OggTrack* track; + while ((track = (OggTrack*)fTable->RemoveNext()) != NULL) { + delete track; + } + delete fTable; +} + +void OggTrackTable::add(OggTrack* newTrack) { + OggTrack* existingTrack + = (OggTrack*)fTable->Add((char const*)newTrack->trackNumber, newTrack); + delete existingTrack; // if any +} + +OggTrack* OggTrackTable::lookup(u_int32_t trackNumber) { + return (OggTrack*)fTable->Lookup((char const*)trackNumber); +} + +unsigned OggTrackTable::numTracks() const { return fTable->numEntries(); } + +OggTrackTableIterator::OggTrackTableIterator(OggTrackTable& ourTable) { + fIter = HashTable::Iterator::create(*(ourTable.fTable)); +} + +OggTrackTableIterator::~OggTrackTableIterator() { + delete fIter; +} + +OggTrack* OggTrackTableIterator::next() { + char const* key; + return (OggTrack*)fIter->next(key); +} + + +////////// OggTrack implementation ////////// + +OggTrack::OggTrack() + : trackNumber(0), mimeType(NULL), + samplingFrequency(48000), numChannels(2), estBitrate(100) { // default settings + vtoHdrs.header[0] = vtoHdrs.header[1] = vtoHdrs.header[2] = NULL; + vtoHdrs.headerSize[0] = vtoHdrs.headerSize[1] = vtoHdrs.headerSize[2] = 0; + + vtoHdrs.vorbis_mode_count = 0; + vtoHdrs.vorbis_mode_blockflag = NULL; +} + +OggTrack::~OggTrack() { + delete[] vtoHdrs.header[0]; delete[] vtoHdrs.header[1]; delete[] vtoHdrs.header[2]; + delete[] vtoHdrs.vorbis_mode_blockflag; +} + + +///////// OggDemux implementation ///////// + +FramedSource* OggDemux::newDemuxedTrack(u_int32_t& resultTrackNumber) { + OggTrack* nextTrack; + do { + nextTrack = fIter->next(); + } while (nextTrack != NULL && nextTrack->mimeType == NULL); + + if (nextTrack == NULL) { // no more tracks + resultTrackNumber = 0; + return NULL; + } + + resultTrackNumber = nextTrack->trackNumber; + FramedSource* trackSource = new OggDemuxedTrack(envir(), resultTrackNumber, *this); + fDemuxedTracksTable->Add((char const*)resultTrackNumber, trackSource); + return trackSource; +} + +FramedSource* OggDemux::newDemuxedTrackByTrackNumber(unsigned trackNumber) { + if (trackNumber == 0) return NULL; + + FramedSource* trackSource = new OggDemuxedTrack(envir(), trackNumber, *this); + fDemuxedTracksTable->Add((char const*)trackNumber, trackSource); + return trackSource; +} + +OggDemuxedTrack* OggDemux::lookupDemuxedTrack(u_int32_t trackNumber) { + return (OggDemuxedTrack*)fDemuxedTracksTable->Lookup((char const*)trackNumber); +} + +OggDemux::OggDemux(OggFile& ourFile) + : Medium(ourFile.envir()), + fOurFile(ourFile), fDemuxedTracksTable(HashTable::create(ONE_WORD_HASH_KEYS)), + fIter(new OggTrackTableIterator(*fOurFile.fTrackTable)) { + FramedSource* fileSource = ByteStreamFileSource::createNew(envir(), ourFile.fileName()); + fOurParser = new OggFileParser(ourFile, fileSource, handleEndOfFile, this, this); +} + +OggDemux::~OggDemux() { + // Begin by acting as if we've reached the end of the source file. + // This should cause all of our demuxed tracks to get closed. + handleEndOfFile(); + + // Then delete our table of "OggDemuxedTrack"s + // - but not the "OggDemuxedTrack"s themselves; that should have already happened: + delete fDemuxedTracksTable; + + delete fIter; + delete fOurParser; + fOurFile.removeDemux(this); +} + +void OggDemux::removeTrack(u_int32_t trackNumber) { + fDemuxedTracksTable->Remove((char const*)trackNumber); + if (fDemuxedTracksTable->numEntries() == 0) { + // We no longer have any demuxed tracks, so delete ourselves now: + delete this; + } +} + +void OggDemux::continueReading() { + fOurParser->continueParsing(); +} + +void OggDemux::handleEndOfFile(void* clientData) { + ((OggDemux*)clientData)->handleEndOfFile(); +} + +void OggDemux::handleEndOfFile() { + // Iterate through all of our 'demuxed tracks', handling 'end of input' on each one. + // Hack: Because this can cause the hash table to get modified underneath us, + // we don't call the handlers until after we've first iterated through all of the tracks. + unsigned numTracks = fDemuxedTracksTable->numEntries(); + if (numTracks == 0) return; + OggDemuxedTrack** tracks = new OggDemuxedTrack*[numTracks]; + + HashTable::Iterator* iter = HashTable::Iterator::create(*fDemuxedTracksTable); + unsigned i; + char const* trackNumber; + + for (i = 0; i < numTracks; ++i) { + tracks[i] = (OggDemuxedTrack*)iter->next(trackNumber); + } + delete iter; + + for (i = 0; i < numTracks; ++i) { + if (tracks[i] == NULL) continue; // sanity check; shouldn't happen + tracks[i]->handleClosure(); + } + + delete[] tracks; +} diff --git a/AnyCore/lib_rtsp/liveMedia/OggFileParser.cpp b/AnyCore/lib_rtsp/liveMedia/OggFileParser.cpp new file mode 100644 index 0000000..237045f --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/OggFileParser.cpp @@ -0,0 +1,1029 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A parser for an Ogg file. +// Implementation + +#include "OggFileParser.hh" +#include "OggDemuxedTrack.hh" +#include // for "gettimeofday() + +PacketSizeTable::PacketSizeTable(unsigned number_page_segments) + : numCompletedPackets(0), totSizes(0), nextPacketNumToDeliver(0), + lastPacketIsIncomplete(False) { + size = new unsigned[number_page_segments]; + for (unsigned i = 0; i < number_page_segments; ++i) size[i] = 0; +} + +PacketSizeTable::~PacketSizeTable() { + delete[] size; +} + +OggFileParser::OggFileParser(OggFile& ourFile, FramedSource* inputSource, + FramedSource::onCloseFunc* onEndFunc, void* onEndClientData, + OggDemux* ourDemux) + : StreamParser(inputSource, onEndFunc, onEndClientData, continueParsing, this), + fOurFile(ourFile), fInputSource(inputSource), + fOnEndFunc(onEndFunc), fOnEndClientData(onEndClientData), + fOurDemux(ourDemux), fNumUnfulfilledTracks(0), + fPacketSizeTable(NULL), fCurrentTrackNumber(0), fSavedPacket(NULL) { + if (ourDemux == NULL) { + // Initialization + fCurrentParseState = PARSING_START_OF_FILE; + continueParsing(); + } else { + fCurrentParseState = PARSING_AND_DELIVERING_PAGES; + // In this case, parsing (of page data) doesn't start until a client starts reading from a track. + } +} + +OggFileParser::~OggFileParser() { + delete[] fSavedPacket; + delete fPacketSizeTable; + Medium::close(fInputSource); +} + +void OggFileParser::continueParsing(void* clientData, unsigned char* ptr, unsigned size, struct timeval presentationTime) { + ((OggFileParser*)clientData)->continueParsing(); +} + +void OggFileParser::continueParsing() { + if (fInputSource != NULL) { + if (fInputSource->isCurrentlyAwaitingData()) return; + // Our input source is currently being read. Wait until that read completes + + if (!parse()) { + // We didn't complete the parsing, because we had to read more data from the source, + // or because we're waiting for another read from downstream. + // Once that happens, we'll get called again. + return; + } + } + + // We successfully parsed the file. Call our 'done' function now: + if (fOnEndFunc != NULL) (*fOnEndFunc)(fOnEndClientData); +} + +Boolean OggFileParser::parse() { + try { + while (1) { + switch (fCurrentParseState) { + case PARSING_START_OF_FILE: { + if (parseStartOfFile()) return True; + } + case PARSING_AND_DELIVERING_PAGES: { + parseAndDeliverPages(); + } + case DELIVERING_PACKET_WITHIN_PAGE: { + if (deliverPacketWithinPage()) return False; + } + } + } + } catch (int /*e*/) { +#ifdef DEBUG + fprintf(stderr, "OggFileParser::parse() EXCEPTION (This is normal behavior - *not* an error)\n"); +#endif + return False; // the parsing got interrupted + } +} + +Boolean OggFileParser::parseStartOfFile() { +#ifdef DEBUG + fprintf(stderr, "parsing start of file\n"); +#endif + // Read and parse each 'page', until we see the first non-BOS page, or until we have + // collected all required headers for Vorbis, Theora, or Opus track(s) (if any). + u_int8_t header_type_flag; + do { + header_type_flag = parseInitialPage(); + } while ((header_type_flag&0x02) != 0 || needHeaders()); + +#ifdef DEBUG + fprintf(stderr, "Finished parsing start of file\n"); +#endif + return True; +} + +static u_int32_t byteSwap(u_int32_t x) { + return (x<<24)|((x<<8)&0x00FF0000)|((x>>8)&0x0000FF00)|(x>>24); +} + +u_int8_t OggFileParser::parseInitialPage() { + u_int8_t header_type_flag; + u_int32_t bitstream_serial_number; + parseStartOfPage(header_type_flag, bitstream_serial_number); + + // If this is a BOS page, examine the first 8 bytes of the first 'packet', to see whether + // the track data type is one that we know how to stream: + OggTrack* track; + if ((header_type_flag&0x02) != 0) { // BOS + char const* mimeType = NULL; // if unknown + if (fPacketSizeTable != NULL && fPacketSizeTable->size[0] >= 8) { // sanity check + char buf[8]; + testBytes((u_int8_t*)buf, 8); + + if (strncmp(&buf[1], "vorbis", 6) == 0) { + mimeType = "audio/VORBIS"; + ++fNumUnfulfilledTracks; + } else if (strncmp(buf, "OpusHead", 8) == 0) { + mimeType = "audio/OPUS"; + ++fNumUnfulfilledTracks; + } else if (strncmp(&buf[1], "theora", 6) == 0) { + mimeType = "video/THEORA"; + ++fNumUnfulfilledTracks; + } + } + + // Add a new track descriptor for this track: + track = new OggTrack; + track->trackNumber = bitstream_serial_number; + track->mimeType = mimeType; + fOurFile.addTrack(track); + } else { // not a BOS page + // Because this is not a BOS page, the specified track should already have been seen: + track = fOurFile.lookup(bitstream_serial_number); + } + + if (track != NULL) { // sanity check +#ifdef DEBUG + fprintf(stderr, "This track's MIME type: %s\n", + track->mimeType == NULL ? "(unknown)" : track->mimeType); +#endif + if (track->mimeType != NULL && + (strcmp(track->mimeType, "audio/VORBIS") == 0 || + strcmp(track->mimeType, "video/THEORA") == 0 || + strcmp(track->mimeType, "audio/OPUS") == 0)) { + // Special-case handling of Vorbis, Theora, or Opus tracks: + // Make a copy of each packet, until we get the three special headers that we need: + Boolean isVorbis = strcmp(track->mimeType, "audio/VORBIS") == 0; + Boolean isTheora = strcmp(track->mimeType, "video/THEORA") == 0; + + for (unsigned j = 0; j < fPacketSizeTable->numCompletedPackets && track->weNeedHeaders(); ++j) { + unsigned const packetSize = fPacketSizeTable->size[j]; + if (packetSize == 0) continue; // sanity check + + delete[] fSavedPacket/*if any*/; fSavedPacket = new u_int8_t[packetSize]; + getBytes(fSavedPacket, packetSize); + fPacketSizeTable->totSizes -= packetSize; + + // The start of the packet tells us whether its a header that we know about: + Boolean headerIsKnown = False; + unsigned index = 0; + if (isVorbis) { + u_int8_t const firstByte = fSavedPacket[0]; + + headerIsKnown = firstByte == 1 || firstByte == 3 || firstByte == 5; + index = (firstByte-1)/2; // 1, 3, or 5 => 0, 1, or 2 + } else if (isTheora) { + u_int8_t const firstByte = fSavedPacket[0]; + + headerIsKnown = firstByte == 0x80 || firstByte == 0x81 || firstByte == 0x82; + index = firstByte &~0x80; // 0x80, 0x81, or 0x82 => 0, 1, or 2 + } else { // Opus + if (strncmp((char const*)fSavedPacket, "OpusHead", 8) == 0) { + headerIsKnown = True; + index = 0; // "identification" header + } else if (strncmp((char const*)fSavedPacket, "OpusTags", 8) == 0) { + headerIsKnown = True; + index = 1; // "comment" header + } + } + if (headerIsKnown) { +#ifdef DEBUG + char const* headerName[3] = { "identification", "comment", "setup" }; + fprintf(stderr, "Saved %d-byte %s \"%s\" header\n", packetSize, track->mimeType, + headerName[index]); +#endif + // This is a header, but first check it for validity: + if (!validateHeader(track, fSavedPacket, packetSize)) continue; + + // Save this header (deleting any old header of the same type that we'd saved before) + delete[] track->vtoHdrs.header[index]; + track->vtoHdrs.header[index] = fSavedPacket; + fSavedPacket = NULL; + track->vtoHdrs.headerSize[index] = packetSize; + + if (!track->weNeedHeaders()) { + // We now have all of the needed Vorbis, Theora, or Opus headers for this track: + --fNumUnfulfilledTracks; + } + // Note: The above code won't work if a required header is fragmented over + // more than one 'page'. We assume that that won't ever happen... + } + } + } + } + + // Skip over any remaining packet data bytes: + if (fPacketSizeTable->totSizes > 0) { +#ifdef DEBUG + fprintf(stderr, "Skipping %d remaining packet data bytes\n", fPacketSizeTable->totSizes); +#endif + skipBytes(fPacketSizeTable->totSizes); + } + + return header_type_flag; +} + +// A simple bit vector class for reading bits in little-endian order. +// (We can't use our usual "BitVector" class, because that's big-endian.) +class LEBitVector { +public: + LEBitVector(u_int8_t const* p, unsigned numBytes) + : fPtr(p), fEnd(&p[numBytes]), fNumBitsRemainingInCurrentByte(8) { + } + + u_int32_t getBits(unsigned numBits/*<=32*/) { + if (noMoreBits()) { + return 0; + } else if (numBits == fNumBitsRemainingInCurrentByte) { + u_int32_t result = (*fPtr++)>>(8-fNumBitsRemainingInCurrentByte); + fNumBitsRemainingInCurrentByte = 8; + + return result; + } else if (numBits < fNumBitsRemainingInCurrentByte) { + u_int8_t mask = 0xFF>>(8-numBits); + u_int32_t result = ((*fPtr)>>(8-fNumBitsRemainingInCurrentByte)) & mask; + fNumBitsRemainingInCurrentByte -= numBits; + + return result; + } else { // numBits > fNumBitsRemainingInCurrentByte + // Do two recursive calls to get the result: + unsigned nbr = fNumBitsRemainingInCurrentByte; + u_int32_t firstBits = getBits(nbr); + u_int32_t nextBits = getBits(numBits - nbr); + + return (nextBits< 32) { + (void)getBits(32); + numBits -= 32; + } + (void)getBits(numBits); + } + + unsigned numBitsRemaining() { return (fEnd-fPtr-1)*8 + fNumBitsRemainingInCurrentByte; } + Boolean noMoreBits() const { return fPtr >= fEnd; } + +private: + u_int8_t const* fPtr; + u_int8_t const* fEnd; + unsigned fNumBitsRemainingInCurrentByte; // 1..8 +}; + +static unsigned ilog(int n) { + if (n < 0) return 0; + + unsigned x = (unsigned)n; + unsigned result = 0; + + while (x > 0) { + ++result; + x >>= 1; + } + + return result; +} + +static unsigned lookup1_values(unsigned codebook_entries, unsigned codebook_dimensions) { + // "the greatest integer value for which [return_value] to the power of [codebook_dimensions] + // is less than or equal to [codebook_entries]" + unsigned return_value = 0; + unsigned powerValue; + + do { + ++return_value; + // Compute powerValue = return_value ** codebook_dimensions + if (return_value == 1) powerValue = 1; // optimization + else { + powerValue = 1; + for (unsigned i = 0; i < codebook_dimensions; ++i) { + powerValue *= return_value; + } + } + } while (powerValue <= codebook_entries); + return_value -= 1; + + return return_value; +} + +static Boolean parseVorbisSetup_codebook(LEBitVector& bv) { + if (bv.noMoreBits()) return False; + + unsigned sync = bv.getBits(24); + if (sync != 0x564342) return False; + unsigned codebook_dimensions = bv.getBits(16); + unsigned codebook_entries = bv.getBits(24); + unsigned ordered = bv.getBits(1); +#ifdef DEBUG_SETUP_HEADER + fprintf(stderr, "\t\t\tcodebook_dimensions: %d; codebook_entries: %d, ordered: %d\n", + codebook_dimensions, codebook_entries, ordered); +#endif + unsigned codewordLength; + if (!ordered) { + unsigned sparse = bv.getBits(1); +#ifdef DEBUG_SETUP_HEADER + fprintf(stderr, "\t\t\t!ordered: sparse %d\n", sparse); +#endif + for (unsigned i = 0; i < codebook_entries; ++i) { + if (sparse) { + unsigned flag = bv.getBits(1); + if (flag) { + codewordLength = bv.getBits(5) + 1; + } else { + codewordLength = 0; + } + } else { + codewordLength = bv.getBits(5) + 1; + } +#ifdef DEBUG_SETUP_HEADER + fprintf(stderr, "\t\t\t\tcodeword length[%d]:\t%d\n", i, codewordLength); +#endif + } + } else { // ordered +#ifdef DEBUG_SETUP_HEADER + fprintf(stderr, "\t\t\tordered:\n"); +#endif + unsigned current_entry = 0; + unsigned current_length = bv.getBits(5) + 1; + do { + unsigned number = bv.getBits(ilog(codebook_entries - current_entry)); +#ifdef DEBUG_SETUP_HEADER + fprintf(stderr, "\t\t\t\tcodeword length[%d..%d]:\t%d\n", + current_entry, current_entry + number - 1, current_length); +#endif + current_entry += number; + if (current_entry > codebook_entries) { + fprintf(stderr, "Vorbis codebook parsing error: current_entry %d > codebook_entries %d!\n", current_entry, codebook_entries); + return False; + } + ++current_length; + } while (current_entry < codebook_entries); + } + + unsigned codebook_lookup_type = bv.getBits(4); +#ifdef DEBUG_SETUP_HEADER + fprintf(stderr, "\t\t\tcodebook_lookup_type: %d\n", codebook_lookup_type); +#endif + if (codebook_lookup_type > 2) { + fprintf(stderr, "Vorbis codebook parsing error: codebook_lookup_type %d!\n", codebook_lookup_type); + return False; + } else if (codebook_lookup_type > 0) { // 1 or 2 + bv.skipBits(32+32); // "codebook_minimum_value" and "codebook_delta_value" + unsigned codebook_value_bits = bv.getBits(4) + 1; + bv.skipBits(1); // "codebook_lookup_p" + unsigned codebook_lookup_values; + if (codebook_lookup_type == 1) { + codebook_lookup_values = lookup1_values(codebook_entries, codebook_dimensions); + } else { // 2 + codebook_lookup_values = codebook_entries*codebook_dimensions; + } + + bv.skipBits(codebook_lookup_values*codebook_value_bits); // "codebook_multiplicands" + } + + return True; +} + +static Boolean parseVorbisSetup_codebooks(LEBitVector& bv) { + if (bv.noMoreBits()) return False; + + unsigned vorbis_codebook_count = bv.getBits(8) + 1; +#ifdef DEBUG_SETUP_HEADER + fprintf(stderr, "\tCodebooks: vorbis_codebook_count: %d\n", vorbis_codebook_count); +#endif + for (unsigned i = 0; i < vorbis_codebook_count; ++i) { +#ifdef DEBUG_SETUP_HEADER + fprintf(stderr, "\t\tCodebook %d:\n", i); +#endif + if (!parseVorbisSetup_codebook(bv)) return False; + } + + return True; +} + +static Boolean parseVorbisSetup_timeDomainTransforms(LEBitVector& bv) { + if (bv.noMoreBits()) return False; + + unsigned vorbis_time_count = bv.getBits(6) + 1; +#ifdef DEBUG_SETUP_HEADER + fprintf(stderr, "\tTime domain transforms: vorbis_time_count: %d\n", vorbis_time_count); +#endif + for (unsigned i = 0; i < vorbis_time_count; ++i) { + unsigned val = bv.getBits(16); + if (val != 0) { + fprintf(stderr, "Vorbis Time domain transforms, read non-zero value %d\n", val); + return False; + } + } + + return True; +} + +static Boolean parseVorbisSetup_floors(LEBitVector& bv) { + if (bv.noMoreBits()) return False; + + unsigned vorbis_floor_count = bv.getBits(6) + 1; +#ifdef DEBUG_SETUP_HEADER + fprintf(stderr, "\tFloors: vorbis_floor_count: %d\n", vorbis_floor_count); +#endif + for (unsigned i = 0; i < vorbis_floor_count; ++i) { + unsigned floorType = bv.getBits(16); + if (floorType == 0) { + bv.skipBits(8+16+16+6+8); + unsigned floor0_number_of_books = bv.getBits(4) + 1; + bv.skipBits(floor0_number_of_books*8); + } else if (floorType == 1) { + unsigned floor1_partitions = bv.getBits(5); + + unsigned* floor1_partition_class_list = new unsigned[floor1_partitions]; + unsigned maximum_class = 0, j; + for (j = 0; j < floor1_partitions; ++j) { + floor1_partition_class_list[j] = bv.getBits(4); + if (floor1_partition_class_list[j] > maximum_class) maximum_class = floor1_partition_class_list[j]; + } + + unsigned* floor1_class_dimensions = new unsigned[maximum_class + 1]; + for (j = 0; j <= maximum_class; ++j) { + floor1_class_dimensions[j] = bv.getBits(3) + 1; + unsigned floor1_class_subclasses = bv.getBits(2); + if (floor1_class_subclasses != 0) { + bv.skipBits(8); // "floor1_class_masterbooks[j]" + } + + unsigned twoExp_floor1_class_subclasses = 1 << floor1_class_subclasses; + bv.skipBits(twoExp_floor1_class_subclasses*8); // "floor1_subclass_books[j][*]" + } + + bv.skipBits(2); // "floor1_multiplier" + unsigned rangebits = bv.getBits(4); + for (j = 0; j < floor1_partitions; ++j) { + unsigned current_class_number = floor1_partition_class_list[j]; + bv.skipBits(floor1_class_dimensions[current_class_number] * rangebits); + } + + delete[] floor1_partition_class_list; + delete[] floor1_class_dimensions; + } else { // floorType > 1 + fprintf(stderr, "Vorbis Floors, read bad floor type %d\n", floorType); + return False; + } + } + + return True; +} + +static Boolean parseVorbisSetup_residues(LEBitVector& bv) { + if (bv.noMoreBits()) return False; + + unsigned vorbis_residue_count = bv.getBits(6) + 1; +#ifdef DEBUG_SETUP_HEADER + fprintf(stderr, "\tResidues: vorbis_residue_count: %d\n", vorbis_residue_count); +#endif + for (unsigned i = 0; i < vorbis_residue_count; ++i) { + unsigned vorbis_residue_type = bv.getBits(16); + if (vorbis_residue_type > 2) { + fprintf(stderr, "Vorbis Residues, read bad vorbis_residue_type: %d\n", vorbis_residue_type); + return False; + } else { + bv.skipBits(24+24+24); // "residue_begin", "residue_end", "residue_partition_size" + unsigned residue_classifications = bv.getBits(6) + 1; + bv.skipBits(8); // "residue_classbook" + + u_int8_t* residue_cascade = new u_int8_t[residue_classifications]; + unsigned j; + for (j = 0; j < residue_classifications; ++j) { + u_int8_t high_bits = 0; + u_int8_t low_bits = bv.getBits(3); + unsigned bitflag = bv.getBits(1); + if (bitflag) { + high_bits = bv.getBits(5); + } + + residue_cascade[j] = (high_bits<<3) | low_bits; + } + + for (j = 0; j < residue_classifications; ++j) { + u_int8_t const cascade = residue_cascade[j]; + u_int8_t mask = 0x80; + while (mask != 0) { + if ((cascade&mask) != 0) bv.skipBits(8); // "residue_books[j][*]" + mask >>= 1; + } + } + + delete[] residue_cascade; + } + } + + return True; +} + +static Boolean parseVorbisSetup_mappings(LEBitVector& bv, unsigned audio_channels) { + if (bv.noMoreBits()) return False; + + unsigned vorbis_mapping_count = bv.getBits(6) + 1; +#ifdef DEBUG_SETUP_HEADER + fprintf(stderr, "\tMappings: vorbis_mapping_count: %d\n", vorbis_mapping_count); +#endif + for (unsigned i = 0; i < vorbis_mapping_count; ++i) { + unsigned vorbis_mapping_type = bv.getBits(16); + if (vorbis_mapping_type != 0) { + fprintf(stderr, "Vorbis Mappings, read bad vorbis_mapping_type: %d\n", vorbis_mapping_type); + return False; + } + + unsigned vorbis_mapping_submaps = 1; + if (bv.getBits(1)) vorbis_mapping_submaps = bv.getBits(4) + 1; + + if (bv.getBits(1)) { // "square polar channel mapping is in use" + unsigned vorbis_mapping_coupling_steps = bv.getBits(8) + 1; + + for (unsigned j = 0; j < vorbis_mapping_coupling_steps; ++j) { + unsigned ilog_audio_channels_minus_1 = ilog(audio_channels - 1); + bv.skipBits(2*ilog_audio_channels_minus_1); // "vorbis_mapping_magnitude", "vorbis_mapping_angle" + } + } + + unsigned reserved = bv.getBits(2); + if (reserved != 0) { + fprintf(stderr, "Vorbis Mappings, read bad 'reserved' field\n"); + return False; + } + + if (vorbis_mapping_submaps > 1) { + for (unsigned j = 0; j < audio_channels; ++j) { + unsigned vorbis_mapping_mux = bv.getBits(4); + + fprintf(stderr, "\t\t\t\tvorbis_mapping_mux[%d]: %d\n", j, vorbis_mapping_mux); + if (vorbis_mapping_mux >= vorbis_mapping_submaps) { + fprintf(stderr, "Vorbis Mappings, read bad \"vorbis_mapping_mux\" %d (>= \"vorbis_mapping_submaps\" %d)\n", vorbis_mapping_mux, vorbis_mapping_submaps); + return False; + } + } + } + + bv.skipBits(vorbis_mapping_submaps*(8+8+8)); // "the floor and residue numbers" + } + + return True; +} + +static Boolean parseVorbisSetup_modes(LEBitVector& bv, OggTrack* track) { + if (bv.noMoreBits()) return False; + + unsigned vorbis_mode_count = bv.getBits(6) + 1; + unsigned ilog_vorbis_mode_count_minus_1 = ilog(vorbis_mode_count - 1); +#ifdef DEBUG_SETUP_HEADER + fprintf(stderr, "\tModes: vorbis_mode_count: %d (ilog(%d-1):%d)\n", + vorbis_mode_count, vorbis_mode_count, ilog_vorbis_mode_count_minus_1); +#endif + track->vtoHdrs.vorbis_mode_count = vorbis_mode_count; + track->vtoHdrs.ilog_vorbis_mode_count_minus_1 = ilog_vorbis_mode_count_minus_1; + track->vtoHdrs.vorbis_mode_blockflag = new u_int8_t[vorbis_mode_count]; + + for (unsigned i = 0; i < vorbis_mode_count; ++i) { + track->vtoHdrs.vorbis_mode_blockflag[i] = (u_int8_t)bv.getBits(1); +#ifdef DEBUG_SETUP_HEADER + fprintf(stderr, "\t\tMode %d: vorbis_mode_blockflag: %d\n", i, track->vtoHdrs.vorbis_mode_blockflag[i]); +#endif + bv.skipBits(16+16+8); // "vorbis_mode_windowtype", "vorbis_mode_transformtype", "vorbis_mode_mapping" + } + + return True; +} + +static Boolean parseVorbisSetupHeader(OggTrack* track, u_int8_t const* p, unsigned headerSize) { + LEBitVector bv(p, headerSize); + do { + if (!parseVorbisSetup_codebooks(bv)) break; + if (!parseVorbisSetup_timeDomainTransforms(bv)) break; + if (!parseVorbisSetup_floors(bv)) break; + if (!parseVorbisSetup_residues(bv)) break; + if (!parseVorbisSetup_mappings(bv, track->numChannels)) break; + if (!parseVorbisSetup_modes(bv, track)) break; + unsigned framingFlag = bv.getBits(1); + if (framingFlag == 0) { + fprintf(stderr, "Vorbis \"setup\" header did not end with a 'framing flag'!\n"); + break; + } + + return True; + } while (0); + + // An error occurred: + return False; +} + +#ifdef DEBUG +#define CHECK_PTR if (p >= pEnd) return False +#define printComment(p, len) do { for (unsigned k = 0; k < len; ++k) { CHECK_PTR; fprintf(stderr, "%c", *p++); } } while (0) +#endif + +static Boolean validateCommentHeader(u_int8_t const *p, unsigned headerSize, + unsigned isOpus = 0) { + if (headerSize < 15+isOpus) { // need 7+isOpus + 4(vendor_length) + 4(user_comment_list_length) + fprintf(stderr, "\"comment\" header is too short (%d bytes)\n", headerSize); + return False; + } + +#ifdef DEBUG + u_int8_t const* pEnd = &p[headerSize]; + p += 7+isOpus; + + u_int32_t vendor_length = (p[3]<<24)|(p[2]<<16)|(p[1]<<8)|p[0]; p += 4; + fprintf(stderr, "\tvendor_string:"); + printComment(p, vendor_length); + fprintf(stderr, "\n"); + + u_int32_t user_comment_list_length = (p[3]<<24)|(p[2]<<16)|(p[1]<<8)|p[0]; p += 4; + for (unsigned i = 0; i < user_comment_list_length; ++i) { + CHECK_PTR; u_int32_t length = (p[3]<<24)|(p[2]<<16)|(p[1]<<8)|p[0]; p += 4; + fprintf(stderr, "\tuser_comment[%d]:", i); + printComment(p, length); + fprintf(stderr, "\n"); + } +#endif + + return True; +} + +static unsigned blocksizeFromExponent(unsigned exponent) { + unsigned result = 1; + for (unsigned i = 0; i < exponent; ++i) result = 2*result; + return result; +} + +Boolean OggFileParser::validateHeader(OggTrack* track, u_int8_t const* p, unsigned headerSize) { + // Assert: headerSize >= 7 (because we've already checked "XXXXXX" or "OpusXXXX") + if (strcmp(track->mimeType, "audio/VORBIS") == 0) { + u_int8_t const firstByte = p[0]; + + if (firstByte == 1) { // "identification" header + if (headerSize < 30) { + fprintf(stderr, "Vorbis \"identification\" header is too short (%d bytes)\n", headerSize); + return False; + } else if ((p[29]&0x1) != 1) { + fprintf(stderr, "Vorbis \"identification\" header: 'framing_flag' is not set\n"); + return False; + } + + p += 7; + u_int32_t vorbis_version = (p[3]<<24)|(p[2]<<16)|(p[1]<<8)|p[0]; p += 4; + if (vorbis_version != 0) { + fprintf(stderr, "Vorbis \"identification\" header has a bad 'vorbis_version': 0x%08x\n", vorbis_version); + return False; + } + + u_int8_t audio_channels = *p++; + if (audio_channels == 0) { + fprintf(stderr, "Vorbis \"identification\" header: 'audio_channels' is 0!\n"); + return False; + } + track->numChannels = audio_channels; + + u_int32_t audio_sample_rate = (p[3]<<24)|(p[2]<<16)|(p[1]<<8)|p[0]; p += 4; + if (audio_sample_rate == 0) { + fprintf(stderr, "Vorbis \"identification\" header: 'audio_sample_rate' is 0!\n"); + return False; + } + track->samplingFrequency = audio_sample_rate; + + p += 4; // skip over 'bitrate_maximum' + u_int32_t bitrate_nominal = (p[3]<<24)|(p[2]<<16)|(p[1]<<8)|p[0]; p += 4; + if (bitrate_nominal > 0) track->estBitrate = (bitrate_nominal+500)/1000; // round + + p += 4; // skip over 'bitrate_maximum' + + // Note the two 'block sizes' (samples per packet), and their durations in microseconds: + u_int8_t blocksizeBits = *p++; + unsigned& blocksize_0 = track->vtoHdrs.blocksize[0]; // alias + unsigned& blocksize_1 = track->vtoHdrs.blocksize[1]; // alias + blocksize_0 = blocksizeFromExponent(blocksizeBits&0x0F); + blocksize_1 = blocksizeFromExponent(blocksizeBits>>4); + + double uSecsPerSample = 1000000.0/(track->samplingFrequency*2); + // Why the "2"? I don't know, but it seems to be necessary + track->vtoHdrs.uSecsPerPacket[0] = (unsigned)(uSecsPerSample*blocksize_0); + track->vtoHdrs.uSecsPerPacket[1] = (unsigned)(uSecsPerSample*blocksize_1); +#ifdef DEBUG + fprintf(stderr, "\t%u Hz, %u-channel, %u kbps (est), block sizes: %u,%u (%u,%u us)\n", + track->samplingFrequency, track->numChannels, track->estBitrate, + blocksize_0, blocksize_1, + track->vtoHdrs.uSecsPerPacket[0], track->vtoHdrs.uSecsPerPacket[1]); +#endif + // To be valid, "blocksize_0" must be <= "blocksize_1", and both must be in [64,8192]: + if (!(blocksize_0 <= blocksize_1 && blocksize_0 >= 64 && blocksize_1 <= 8192)) { + fprintf(stderr, "Invalid Vorbis \"blocksize_0\" (%d) and/or \"blocksize_1\" (%d)!\n", + blocksize_0, blocksize_1); + return False; + } + } else if (firstByte == 3) { // "comment" header + if (!validateCommentHeader(p, headerSize)) return False; + } else if (firstByte == 5) { // "setup" header + // Parse the "setup" header to get the values that we want: + // "vorbis_mode_count", and "vorbis_mode_blockflag" for each mode. Unfortunately these come + // near the end of the header, so we have to parse lots of other crap first. + p += 7; + if (!parseVorbisSetupHeader(track, p, headerSize)) { + fprintf(stderr, "Failed to parse Vorbis \"setup\" header!\n"); + return False; + } + } + } else if (strcmp(track->mimeType, "video/THEORA") == 0) { + u_int8_t const firstByte = p[0]; + + if (firstByte == 0x80) { // "identification" header + if (headerSize < 42) { + fprintf(stderr, "Theora \"identification\" header is too short (%d bytes)\n", headerSize); + return False; + } else if ((p[41]&0x7) != 0) { + fprintf(stderr, "Theora \"identification\" header: 'res' bits are non-zero\n"); + return False; + } + + track->vtoHdrs.KFGSHIFT = ((p[40]&3)<<3) | (p[41]>>5); + u_int32_t FRN = (p[22]<<24) | (p[23]<<16) | (p[24]<<8) | p[25]; // Frame rate numerator + u_int32_t FRD = (p[26]<<24) | (p[27]<<16) | (p[28]<<8) | p[29]; // Frame rate numerator +#ifdef DEBUG + fprintf(stderr, "\tKFGSHIFT %d, Frame rate numerator %d, Frame rate denominator %d\n", track->vtoHdrs.KFGSHIFT, FRN, FRD); +#endif + if (FRN == 0 || FRD == 0) { + fprintf(stderr, "Theora \"identification\" header: Bad FRN and/or FRD values: %d, %d\n", FRN, FRD); + return False; + } + track->vtoHdrs.uSecsPerFrame = (unsigned)((1000000.0*FRD)/FRN); +#ifdef DEBUG + fprintf(stderr, "\t\t=> %u microseconds per frame\n", track->vtoHdrs.uSecsPerFrame); +#endif + } else if (firstByte == 0x81) { // "comment" header + if (!validateCommentHeader(p, headerSize)) return False; + } else if (firstByte == 0x82) { // "setup" header + // We don't care about the contents of the Theora "setup" header; just assume it's valid + } + } else { // Opus audio + if (strncmp((char const*)p, "OpusHead", 8) == 0) { // "identification" header + // Just check the size, and the 'major' number of the version byte: + if (headerSize < 19 || (p[8]&0xF0) != 0) return False; + } else { // comment header + if (!validateCommentHeader(p, headerSize, 1/*isOpus*/)) return False; + } + } + + return True; +} + +void OggFileParser::parseAndDeliverPages() { +#ifdef DEBUG + fprintf(stderr, "parsing and delivering data\n"); +#endif + while (parseAndDeliverPage()) {} +} + +Boolean OggFileParser::parseAndDeliverPage() { + u_int8_t header_type_flag; + u_int32_t bitstream_serial_number; + parseStartOfPage(header_type_flag, bitstream_serial_number); + + OggDemuxedTrack* demuxedTrack = fOurDemux->lookupDemuxedTrack(bitstream_serial_number); + if (demuxedTrack == NULL) { // this track is not being read +#ifdef DEBUG + fprintf(stderr, "\tIgnoring page from unread track; skipping %d remaining packet data bytes\n", + fPacketSizeTable->totSizes); +#endif + skipBytes(fPacketSizeTable->totSizes); + return True; + } else if (fPacketSizeTable->totSizes == 0) { + // This page is empty (has no packets). Skip it and continue +#ifdef DEBUG + fprintf(stderr, "\t[track: %s] Skipping empty page\n", demuxedTrack->MIMEtype()); +#endif + return True; + } + + // Start delivering packets next: + demuxedTrack->fCurrentPageIsContinuation = (header_type_flag&0x01) != 0; + fCurrentTrackNumber = bitstream_serial_number; + fCurrentParseState = DELIVERING_PACKET_WITHIN_PAGE; + saveParserState(); + return False; +} + +Boolean OggFileParser::deliverPacketWithinPage() { + OggDemuxedTrack* demuxedTrack = fOurDemux->lookupDemuxedTrack(fCurrentTrackNumber); + if (demuxedTrack == NULL) return False; // should not happen + + unsigned packetNum = fPacketSizeTable->nextPacketNumToDeliver; + unsigned packetSize = fPacketSizeTable->size[packetNum]; + + if (!demuxedTrack->isCurrentlyAwaitingData()) { + // Someone has been reading this stream, but isn't right now. + // We can't deliver this frame until he asks for it, so punt for now. + // The next time he asks for a frame, he'll get it. +#ifdef DEBUG + fprintf(stderr, "\t[track: %s] Deferring delivery of packet %d (%d bytes%s)\n", + demuxedTrack->MIMEtype(), packetNum, packetSize, + packetNum == fPacketSizeTable->numCompletedPackets ? " (incomplete)" : ""); +#endif + return True; + } + + // Deliver the next packet: +#ifdef DEBUG + fprintf(stderr, "\t[track: %s] Delivering packet %d (%d bytes%s)\n", demuxedTrack->MIMEtype(), + packetNum, packetSize, + packetNum == fPacketSizeTable->numCompletedPackets ? " (incomplete)" : ""); +#endif + unsigned numBytesDelivered + = packetSize < demuxedTrack->maxSize() ? packetSize : demuxedTrack->maxSize(); + getBytes(demuxedTrack->to(), numBytesDelivered); + u_int8_t firstByte = numBytesDelivered > 0 ? demuxedTrack->to()[0] : 0x00; + u_int8_t secondByte = numBytesDelivered > 1 ? demuxedTrack->to()[1] : 0x00; + demuxedTrack->to() += numBytesDelivered; + + if (demuxedTrack->fCurrentPageIsContinuation) { // the previous page's read was incomplete + demuxedTrack->frameSize() += numBytesDelivered; + } else { + // This is the first delivery for this "doGetNextFrame()" call. + demuxedTrack->frameSize() = numBytesDelivered; + } + if (packetSize > demuxedTrack->maxSize()) { + demuxedTrack->numTruncatedBytes() += packetSize - demuxedTrack->maxSize(); + } + demuxedTrack->maxSize() -= numBytesDelivered; + + // Figure out the duration and presentation time of this frame. + unsigned durationInMicroseconds; + OggTrack* track = fOurFile.lookup(demuxedTrack->fOurTrackNumber); + + if (strcmp(track->mimeType, "audio/VORBIS") == 0) { + if ((firstByte&0x01) != 0) { // This is a header packet + durationInMicroseconds = 0; + } else { // This is a data packet. + // Parse the first byte to figure out its duration. + // Extract the next "track->vtoHdrs.ilog_vorbis_mode_count_minus_1" bits of the first byte: + u_int8_t const mask = 0xFE<<(track->vtoHdrs.ilog_vorbis_mode_count_minus_1); + u_int8_t const modeNumber = (firstByte&~mask)>>1; + if (modeNumber >= track->vtoHdrs.vorbis_mode_count) { + fprintf(stderr, "Error: Bad mode number %d (>= vorbis_mode_count %d) in Vorbis packet!\n", + modeNumber, track->vtoHdrs.vorbis_mode_count); + durationInMicroseconds = 0; + } else { + unsigned blockNumber = track->vtoHdrs.vorbis_mode_blockflag[modeNumber]; + durationInMicroseconds = track->vtoHdrs.uSecsPerPacket[blockNumber]; + } + } + } else if (strcmp(track->mimeType, "video/THEORA") == 0) { + if ((firstByte&0x80) != 0) { // This is a header packet + durationInMicroseconds = 0; + } else { // This is a data packet. + durationInMicroseconds = track->vtoHdrs.uSecsPerFrame; + } + } else { // "audio/OPUS" + if (firstByte == 0x4F/*'O'*/ && secondByte == 0x70/*'p*/) { // This is a header packet + durationInMicroseconds = 0; + } else { // This is a data packet. + // Parse the first byte to figure out the duration of each frame, and then (if necessary) + // parse the second byte to figure out how many frames are in this packet: + u_int8_t config = firstByte >> 3; + u_int8_t c = firstByte & 0x03; + unsigned const configDuration[32] = { // in microseconds + 10000, 20000, 40000, 60000, // config 0..3 + 10000, 20000, 40000, 60000, // config 4..7 + 10000, 20000, 40000, 60000, // config 8..11 + 10000, 20000, // config 12..13 + 10000, 20000, // config 14..15 + 2500, 5000, 10000, 20000, // config 16..19 + 2500, 5000, 10000, 20000, // config 20..23 + 2500, 5000, 10000, 20000, // config 24..27 + 2500, 5000, 10000, 20000 // config 28..31 + }; + unsigned const numFramesInPacket = c == 0 ? 1 : c == 3 ? (secondByte&0x3F) : 2; + durationInMicroseconds = numFramesInPacket*configDuration[config]; + } + } + + if (demuxedTrack->nextPresentationTime().tv_sec == 0 && demuxedTrack->nextPresentationTime().tv_usec == 0) { + // This is the first delivery. Initialize "demuxedTrack->nextPresentationTime()": + gettimeofday(&demuxedTrack->nextPresentationTime(), NULL); + } + demuxedTrack->presentationTime() = demuxedTrack->nextPresentationTime(); + demuxedTrack->durationInMicroseconds() = durationInMicroseconds; + + demuxedTrack->nextPresentationTime().tv_usec += durationInMicroseconds; + while (demuxedTrack->nextPresentationTime().tv_usec >= 1000000) { + ++demuxedTrack->nextPresentationTime().tv_sec; + demuxedTrack->nextPresentationTime().tv_usec -= 1000000; + } + saveParserState(); + + // And check whether there's a next packet in this page: + if (packetNum == fPacketSizeTable->numCompletedPackets) { + // This delivery was for an incomplete packet, at the end of the page. + // Return without completing delivery: + fCurrentParseState = PARSING_AND_DELIVERING_PAGES; + return False; + } + + if (packetNum < fPacketSizeTable->numCompletedPackets-1 + || fPacketSizeTable->lastPacketIsIncomplete) { + // There is at least one more packet (possibly incomplete) left in this packet. + // Deliver it next: + ++fPacketSizeTable->nextPacketNumToDeliver; + } else { + // Start parsing a new page next: + fCurrentParseState = PARSING_AND_DELIVERING_PAGES; + } + + FramedSource::afterGetting(demuxedTrack); // completes delivery + return True; +} + +void OggFileParser::parseStartOfPage(u_int8_t& header_type_flag, + u_int32_t& bitstream_serial_number) { + saveParserState(); + // First, make sure we start with the 'capture_pattern': 0x4F676753 ('OggS'): + while (test4Bytes() != 0x4F676753) { + skipBytes(1); + saveParserState(); // ensures forward progress through the file + } + skipBytes(4); +#ifdef DEBUG + fprintf(stderr, "\nSaw Ogg page header:\n"); +#endif + + u_int8_t stream_structure_version = get1Byte(); + if (stream_structure_version != 0) { + fprintf(stderr, "Saw page with unknown Ogg file version number: 0x%02x\n", stream_structure_version); + } + + header_type_flag = get1Byte(); +#ifdef DEBUG + fprintf(stderr, "\theader_type_flag: 0x%02x (", header_type_flag); + if (header_type_flag&0x01) fprintf(stderr, "continuation "); + if (header_type_flag&0x02) fprintf(stderr, "bos "); + if (header_type_flag&0x04) fprintf(stderr, "eos "); + fprintf(stderr, ")\n"); +#endif + + u_int32_t granule_position1 = byteSwap(get4Bytes()); + u_int32_t granule_position2 = byteSwap(get4Bytes()); + bitstream_serial_number = byteSwap(get4Bytes()); + u_int32_t page_sequence_number = byteSwap(get4Bytes()); + u_int32_t CRC_checksum = byteSwap(get4Bytes()); + u_int8_t number_page_segments = get1Byte(); +#ifdef DEBUG + fprintf(stderr, "\tgranule_position 0x%08x%08x, bitstream_serial_number 0x%08x, page_sequence_number 0x%08x, CRC_checksum 0x%08x, number_page_segments %d\n", granule_position2, granule_position1, bitstream_serial_number, page_sequence_number, CRC_checksum, number_page_segments); +#else + // Dummy statements to prevent 'unused variable' compiler warnings: +#define DUMMY_STATEMENT(x) do {x = x;} while (0) + DUMMY_STATEMENT(granule_position1); + DUMMY_STATEMENT(granule_position2); + DUMMY_STATEMENT(page_sequence_number); + DUMMY_STATEMENT(CRC_checksum); +#endif + + // Look at the "segment_table" to count the sizes of the packets in this page: + delete fPacketSizeTable/*if any*/; fPacketSizeTable = new PacketSizeTable(number_page_segments); + u_int8_t lacing_value = 0; +#ifdef DEBUG + fprintf(stderr, "\tsegment_table\n"); +#endif + for (unsigned i = 0; i < number_page_segments; ++i) { + lacing_value = get1Byte(); +#ifdef DEBUG + fprintf(stderr, "\t\t%d:\t%d", i, lacing_value); +#endif + fPacketSizeTable->totSizes += lacing_value; + fPacketSizeTable->size[fPacketSizeTable->numCompletedPackets] += lacing_value; + if (lacing_value < 255) { + // This completes a packet: +#ifdef DEBUG + fprintf(stderr, " (->%d)", fPacketSizeTable->size[fPacketSizeTable->numCompletedPackets]); +#endif + ++fPacketSizeTable->numCompletedPackets; + } +#ifdef DEBUG + fprintf(stderr, "\n"); +#endif + } + + fPacketSizeTable->lastPacketIsIncomplete = lacing_value == 255; +} diff --git a/AnyCore/lib_rtsp/liveMedia/OggFileParser.hh b/AnyCore/lib_rtsp/liveMedia/OggFileParser.hh new file mode 100644 index 0000000..1b1da5c --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/OggFileParser.hh @@ -0,0 +1,91 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A parser for an Ogg file +// C++ header + +#ifndef _OGG_FILE_PARSER_HH + +#ifndef _STREAM_PARSER_HH +#include "StreamParser.hh" +#endif +#ifndef _OGG_FILE_HH +#include "OggFile.hh" +#endif + +// An enum representing the current state of the parser: +enum OggParseState { + PARSING_START_OF_FILE, + PARSING_AND_DELIVERING_PAGES, + DELIVERING_PACKET_WITHIN_PAGE +}; + +// A structure that counts the sizes of 'packets' given by each page's "segment_table": +class PacketSizeTable { +public: + PacketSizeTable(unsigned number_page_segments); + ~PacketSizeTable(); + + unsigned numCompletedPackets; // will be <= "number_page_segments" + unsigned* size; // an array of sizes of each of the packets + unsigned totSizes; + unsigned nextPacketNumToDeliver; + Boolean lastPacketIsIncomplete; // iff the last segment's 'lacing' was 255 +}; + +class OggFileParser: public StreamParser { +public: + OggFileParser(OggFile& ourFile, FramedSource* inputSource, + FramedSource::onCloseFunc* onEndFunc, void* onEndClientData, + OggDemux* ourDemux = NULL); + virtual ~OggFileParser(); + + // StreamParser 'client continue' function: + static void continueParsing(void* clientData, unsigned char* ptr, unsigned size, struct timeval presentationTime); + void continueParsing(); + +private: + Boolean needHeaders() { return fNumUnfulfilledTracks > 0; } + + // Parsing functions: + Boolean parse(); // returns True iff we have finished parsing all BOS pages (on initialization) + + Boolean parseStartOfFile(); + u_int8_t parseInitialPage(); // returns the 'header_type_flag' byte + void parseAndDeliverPages(); + Boolean parseAndDeliverPage(); + Boolean deliverPacketWithinPage(); + void parseStartOfPage(u_int8_t& header_type_flag, u_int32_t& bitstream_serial_number); + + Boolean validateHeader(OggTrack* track, u_int8_t const* p, unsigned headerSize); + +private: + // General state for parsing: + OggFile& fOurFile; + FramedSource* fInputSource; + FramedSource::onCloseFunc* fOnEndFunc; + void* fOnEndClientData; + OggDemux* fOurDemux; + OggParseState fCurrentParseState; + + unsigned fNumUnfulfilledTracks; + PacketSizeTable* fPacketSizeTable; + u_int32_t fCurrentTrackNumber; + u_int8_t* fSavedPacket; // used to temporarily save a copy of a 'packet' from a page +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/OggFileServerDemux.cpp b/AnyCore/lib_rtsp/liveMedia/OggFileServerDemux.cpp new file mode 100644 index 0000000..939b4a2 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/OggFileServerDemux.cpp @@ -0,0 +1,109 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A server demultiplexor for a Ogg file +// Implementation + +#include "OggFileServerDemux.hh" +#include "OggFileServerMediaSubsession.hh" + +void OggFileServerDemux +::createNew(UsageEnvironment& env, char const* fileName, + onCreationFunc* onCreation, void* onCreationClientData) { + (void)new OggFileServerDemux(env, fileName, + onCreation, onCreationClientData); +} + +ServerMediaSubsession* OggFileServerDemux::newServerMediaSubsession() { + u_int32_t dummyResultTrackNumber; + return newServerMediaSubsession(dummyResultTrackNumber); +} + +ServerMediaSubsession* OggFileServerDemux +::newServerMediaSubsession(u_int32_t& resultTrackNumber) { + resultTrackNumber = 0; + + OggTrack* nextTrack = fIter->next(); + if (nextTrack == NULL) return NULL; + + return newServerMediaSubsessionByTrackNumber(nextTrack->trackNumber); +} + +ServerMediaSubsession* OggFileServerDemux +::newServerMediaSubsessionByTrackNumber(u_int32_t trackNumber) { + OggTrack* track = fOurOggFile->lookup(trackNumber); + if (track == NULL) return NULL; + + ServerMediaSubsession* result = OggFileServerMediaSubsession::createNew(*this, track); + if (result != NULL) { +#ifdef DEBUG + fprintf(stderr, "Created 'ServerMediaSubsession' object for track #%d: (%s)\n", track->trackNumber, track->mimeType); +#endif + } + + return result; +} + +FramedSource* OggFileServerDemux::newDemuxedTrack(unsigned clientSessionId, u_int32_t trackNumber) { + OggDemux* demuxToUse = NULL; + + if (clientSessionId != 0 && clientSessionId == fLastClientSessionId) { + demuxToUse = fLastCreatedDemux; // use the same demultiplexor as before + // Note: This code relies upon the fact that the creation of streams for different + // client sessions do not overlap - so all demuxed tracks are created for one "OggDemux" at a time. + // Also, the "clientSessionId != 0" test is a hack, because 'session 0' is special; its audio and video streams + // are created and destroyed one-at-a-time, rather than both streams being + // created, and then (later) both streams being destroyed (as is the case + // for other ('real') session ids). Because of this, a separate demultiplexor is used for each 'session 0' track. + } + + if (demuxToUse == NULL) demuxToUse = fOurOggFile->newDemux(); + + fLastClientSessionId = clientSessionId; + fLastCreatedDemux = demuxToUse; + + return demuxToUse->newDemuxedTrackByTrackNumber(trackNumber); +} + +OggFileServerDemux +::OggFileServerDemux(UsageEnvironment& env, char const* fileName, + onCreationFunc* onCreation, void* onCreationClientData) + : Medium(env), + fFileName(fileName), fOnCreation(onCreation), fOnCreationClientData(onCreationClientData), + fIter(NULL/*until the OggFile is created*/), + fLastClientSessionId(0), fLastCreatedDemux(NULL) { + OggFile::createNew(env, fileName, onOggFileCreation, this); +} + +OggFileServerDemux::~OggFileServerDemux() { + Medium::close(fOurOggFile); + + delete fIter; +} + +void OggFileServerDemux::onOggFileCreation(OggFile* newFile, void* clientData) { + ((OggFileServerDemux*)clientData)->onOggFileCreation(newFile); +} + +void OggFileServerDemux::onOggFileCreation(OggFile* newFile) { + fOurOggFile = newFile; + + fIter = new OggTrackTableIterator(fOurOggFile->trackTable()); + + // Now, call our own creation notification function: + if (fOnCreation != NULL) (*fOnCreation)(this, fOnCreationClientData); +} diff --git a/AnyCore/lib_rtsp/liveMedia/OggFileServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/OggFileServerMediaSubsession.cpp new file mode 100644 index 0000000..caeeed3 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/OggFileServerMediaSubsession.cpp @@ -0,0 +1,54 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a track within an Ogg file. +// Implementation + +#include "OggFileServerMediaSubsession.hh" +#include "OggDemuxedTrack.hh" +#include "FramedFilter.hh" + +OggFileServerMediaSubsession* OggFileServerMediaSubsession +::createNew(OggFileServerDemux& demux, OggTrack* track) { + return new OggFileServerMediaSubsession(demux, track); +} + +OggFileServerMediaSubsession +::OggFileServerMediaSubsession(OggFileServerDemux& demux, OggTrack* track) + : FileServerMediaSubsession(demux.envir(), demux.fileName(), False), + fOurDemux(demux), fTrack(track), fNumFiltersInFrontOfTrack(0) { +} + +OggFileServerMediaSubsession::~OggFileServerMediaSubsession() { +} + +FramedSource* OggFileServerMediaSubsession +::createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate) { + FramedSource* baseSource = fOurDemux.newDemuxedTrack(clientSessionId, fTrack->trackNumber); + if (baseSource == NULL) return NULL; + + return fOurDemux.ourOggFile() + ->createSourceForStreaming(baseSource, fTrack->trackNumber, + estBitrate, fNumFiltersInFrontOfTrack); +} + +RTPSink* OggFileServerMediaSubsession +::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* /*inputSource*/) { + return fOurDemux.ourOggFile() + ->createRTPSinkForTrackNumber(fTrack->trackNumber, rtpGroupsock, rtpPayloadTypeIfDynamic); +} diff --git a/AnyCore/lib_rtsp/liveMedia/OggFileServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/OggFileServerMediaSubsession.hh new file mode 100644 index 0000000..7023b55 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/OggFileServerMediaSubsession.hh @@ -0,0 +1,53 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a track within an Ogg file. +// C++ header + +#ifndef _OGG_FILE_SERVER_MEDIA_SUBSESSION_HH +#define _OGG_FILE_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _FILE_SERVER_MEDIA_SUBSESSION_HH +#include "FileServerMediaSubsession.hh" +#endif +#ifndef _OGG_FILE_SERVER_DEMUX_HH +#include "OggFileServerDemux.hh" +#endif + +class OggFileServerMediaSubsession: public FileServerMediaSubsession { +public: + static OggFileServerMediaSubsession* + createNew(OggFileServerDemux& demux, OggTrack* track); + +protected: + OggFileServerMediaSubsession(OggFileServerDemux& demux, OggTrack* track); + // called only by createNew(), or by subclass constructors + virtual ~OggFileServerMediaSubsession(); + +protected: // redefined virtual functions + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate); + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource); + +protected: + OggFileServerDemux& fOurDemux; + OggTrack* fTrack; + unsigned fNumFiltersInFrontOfTrack; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/OggFileSink.cpp b/AnyCore/lib_rtsp/liveMedia/OggFileSink.cpp new file mode 100644 index 0000000..c3b4aef --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/OggFileSink.cpp @@ -0,0 +1,273 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// 'Ogg' File Sink (recording a single media track only) +// Implementation + +#include "OggFileSink.hh" +#include "OutputFile.hh" +#include "VorbisAudioRTPSource.hh" // for "parseVorbisOrTheoraConfigStr()" +#include "MPEG2TransportStreamMultiplexor.hh" // for calculateCRC() +#include "FramedSource.hh" + +OggFileSink* OggFileSink +::createNew(UsageEnvironment& env, char const* fileName, + unsigned samplingFrequency, char const* configStr, + unsigned bufferSize, Boolean oneFilePerFrame) { + do { + FILE* fid; + char const* perFrameFileNamePrefix; + if (oneFilePerFrame) { + // Create the fid for each frame + fid = NULL; + perFrameFileNamePrefix = fileName; + } else { + // Normal case: create the fid once + fid = OpenOutputFile(env, fileName); + if (fid == NULL) break; + perFrameFileNamePrefix = NULL; + } + + return new OggFileSink(env, fid, samplingFrequency, configStr, bufferSize, perFrameFileNamePrefix); + } while (0); + + return NULL; +} + +OggFileSink::OggFileSink(UsageEnvironment& env, FILE* fid, + unsigned samplingFrequency, char const* configStr, + unsigned bufferSize, char const* perFrameFileNamePrefix) + : FileSink(env, fid, bufferSize, perFrameFileNamePrefix), + fSamplingFrequency(samplingFrequency), fConfigStr(configStr), + fHaveWrittenFirstFrame(False), fHaveSeenEOF(False), + fGranulePosition(0), fGranulePositionAdjustment(0), fPageSequenceNumber(0), + fIsTheora(False), fGranuleIncrementPerFrame(1), + fAltFrameSize(0), fAltNumTruncatedBytes(0) { + fAltBuffer = new unsigned char[bufferSize]; + + // Initialize our 'Ogg page header' array with constant values: + u_int8_t* p = fPageHeaderBytes; + *p++=0x4f; *p++=0x67; *p++=0x67; *p++=0x53; // bytes 0..3: 'capture_pattern': "OggS" + *p++=0; // byte 4: 'stream_structure_version': 0 + *p++=0; // byte 5: 'header_type_flag': set on each write + *p++=0; *p++=0; *p++=0; *p++=0; *p++=0; *p++=0; *p++=0; *p++=0; + // bytes 6..13: 'granule_position': set on each write + *p++=1; *p++=0; *p++=0; *p++=0; // bytes 14..17: 'bitstream_serial_number': 1 + *p++=0; *p++=0; *p++=0; *p++=0; // bytes 18..21: 'page_sequence_number': set on each write + *p++=0; *p++=0; *p++=0; *p++=0; // bytes 22..25: 'CRC_checksum': set on each write + *p=0; // byte 26: 'number_page_segments': set on each write +} + +OggFileSink::~OggFileSink() { + // We still have the previously-arrived frame, so write it to the file before we end: + fHaveSeenEOF = True; + OggFileSink::addData(fAltBuffer, fAltFrameSize, fAltPresentationTime); + + delete[] fAltBuffer; +} + +Boolean OggFileSink::continuePlaying() { + // Identical to "FileSink::continuePlaying()", + // except that we use our own 'on source closure' function: + if (fSource == NULL) return False; + + fSource->getNextFrame(fBuffer, fBufferSize, + FileSink::afterGettingFrame, this, + ourOnSourceClosure, this); + return True; +} + +#define PAGE_DATA_MAX_SIZE (255*255) + +void OggFileSink::addData(unsigned char const* data, unsigned dataSize, + struct timeval presentationTime) { + if (dataSize == 0) return; + + // Set "fGranulePosition" for this frame: + if (fIsTheora) { + // Special case for Theora: "fGranulePosition" is supposed to be made up of a pair: + // (frame count to last key frame) | (frame count since last key frame) + // However, because there appears to be no easy way to figure out which frames are key frames, + // we just assume that all frames are key frames. + if (!(data[0] >= 0x80 && data[0] <= 0x82)) { // for header pages, "fGranulePosition" remains 0 + fGranulePosition += fGranuleIncrementPerFrame; + } + } else { + double ptDiff + = (presentationTime.tv_sec - fFirstPresentationTime.tv_sec) + + (presentationTime.tv_usec - fFirstPresentationTime.tv_usec)/1000000.0; + int64_t newGranulePosition + = (int64_t)(fSamplingFrequency*ptDiff) + fGranulePositionAdjustment; + if (newGranulePosition < fGranulePosition) { + // Update "fGranulePositionAdjustment" so that "fGranulePosition" remains monotonic + fGranulePositionAdjustment += fGranulePosition - newGranulePosition; + } else { + fGranulePosition = newGranulePosition; + } + } + + // Write the frame to the file as a single Ogg 'page' (or perhaps as multiple pages + // if it's too big for a single page). We don't aggregate more than one frame within + // an Ogg page because that's not legal for some headers, and because that would make + // it difficult for us to properly set the 'eos' (end of stream) flag on the last page. + + // First, figure out how many pages to write here + // (a page can contain no more than PAGE_DATA_MAX_SIZE bytes) + unsigned numPagesToWrite = dataSize/PAGE_DATA_MAX_SIZE + 1; + // Note that if "dataSize" is a integral multiple of PAGE_DATA_MAX_SIZE, there will + // be an extra 0-size page at the end + for (unsigned i = 0; i < numPagesToWrite; ++i) { + // First, fill in the changeable parts of our 'page header' array; + u_int8_t header_type_flag = 0x0; + if (!fHaveWrittenFirstFrame && i == 0) { + header_type_flag |= 0x02; // 'bos' + fHaveWrittenFirstFrame = True; // for the future + } + if (i > 0) header_type_flag |= 0x01; // 'continuation' + if (fHaveSeenEOF && i == numPagesToWrite-1) header_type_flag |= 0x04; // 'eos' + fPageHeaderBytes[5] = header_type_flag; + + if (i < numPagesToWrite-1) { + // For pages where the frame does not end, set 'granule_position' in the header to -1: + fPageHeaderBytes[6] = fPageHeaderBytes[7] = fPageHeaderBytes[8] = fPageHeaderBytes[9] = + fPageHeaderBytes[10] = fPageHeaderBytes[11] = fPageHeaderBytes[12] = fPageHeaderBytes[13] + = 0xFF; + } else { + fPageHeaderBytes[6] = (u_int8_t)fGranulePosition; + fPageHeaderBytes[7] = (u_int8_t)(fGranulePosition>>8); + fPageHeaderBytes[8] = (u_int8_t)(fGranulePosition>>16); + fPageHeaderBytes[9] = (u_int8_t)(fGranulePosition>>24); + fPageHeaderBytes[10] = (u_int8_t)(fGranulePosition>>32); + fPageHeaderBytes[11] = (u_int8_t)(fGranulePosition>>40); + fPageHeaderBytes[12] = (u_int8_t)(fGranulePosition>>48); + fPageHeaderBytes[13] = (u_int8_t)(fGranulePosition>>56); + } + + fPageHeaderBytes[18] = (u_int8_t)fPageSequenceNumber; + fPageHeaderBytes[19] = (u_int8_t)(fPageSequenceNumber>>8); + fPageHeaderBytes[20] = (u_int8_t)(fPageSequenceNumber>>16); + fPageHeaderBytes[21] = (u_int8_t)(fPageSequenceNumber>>24); + ++fPageSequenceNumber; + + unsigned pageDataSize; + u_int8_t number_page_segments; + if (dataSize >= PAGE_DATA_MAX_SIZE) { + pageDataSize = PAGE_DATA_MAX_SIZE; + number_page_segments = 255; + } else { + pageDataSize = dataSize; + number_page_segments = (pageDataSize+255)/255; // so that we don't end with a lacing of 255 + } + fPageHeaderBytes[26] = number_page_segments; + + u_int8_t segment_table[255]; + for (unsigned j = 0; j < (unsigned)(number_page_segments-1); ++j) { + segment_table[j] = 255; + } + segment_table[number_page_segments-1] = pageDataSize%255; + + // Compute the CRC from the 'page header' array, the 'segment_table', and the frame data: + u_int32_t crc = 0; + fPageHeaderBytes[22] = fPageHeaderBytes[23] = fPageHeaderBytes[24] = fPageHeaderBytes[25] = 0; + crc = calculateCRC(fPageHeaderBytes, 27, 0); + crc = calculateCRC(segment_table, number_page_segments, crc); + crc = calculateCRC(data, pageDataSize, crc); + fPageHeaderBytes[22] = (u_int8_t)crc; + fPageHeaderBytes[23] = (u_int8_t)(crc>>8); + fPageHeaderBytes[24] = (u_int8_t)(crc>>16); + fPageHeaderBytes[25] = (u_int8_t)(crc>>24); + + // Then write out the 'page header' array: + FileSink::addData(fPageHeaderBytes, 27, presentationTime); + + // Then write out the 'segment_table': + FileSink::addData(segment_table, number_page_segments, presentationTime); + + // Then add frame data, to complete the page: + FileSink::addData(data, pageDataSize, presentationTime); + data += pageDataSize; + dataSize -= pageDataSize; + } +} + +void OggFileSink::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, struct timeval presentationTime) { + if (!fHaveWrittenFirstFrame) { + fFirstPresentationTime = presentationTime; + + // If we have a 'config string' representing 'packed configuration headers' + // ("identification", "comment", "setup"), unpack them and prepend them to the file: + if (fConfigStr != NULL && fConfigStr[0] != '\0') { + u_int8_t* identificationHdr; unsigned identificationHdrSize; + u_int8_t* commentHdr; unsigned commentHdrSize; + u_int8_t* setupHdr; unsigned setupHdrSize; + u_int32_t identField; + parseVorbisOrTheoraConfigStr(fConfigStr, + identificationHdr, identificationHdrSize, + commentHdr, commentHdrSize, + setupHdr, setupHdrSize, + identField); + if (identificationHdrSize >= 42 + && strncmp((const char*)&identificationHdr[1], "theora", 6) == 0) { + // Hack for Theora video: Parse the "identification" hdr to get the "KFGSHIFT" parameter: + fIsTheora = True; + u_int8_t const KFGSHIFT = ((identificationHdr[40]&3)<<3) | (identificationHdr[41]>>5); + fGranuleIncrementPerFrame = (u_int64_t)(1 << KFGSHIFT); + } + OggFileSink::addData(identificationHdr, identificationHdrSize, presentationTime); + OggFileSink::addData(commentHdr, commentHdrSize, presentationTime); + + // Hack: Handle the "setup" header as if had arrived in the previous delivery, so it'll get + // written properly below: + if (setupHdrSize > fBufferSize) { + fAltFrameSize = fBufferSize; + fAltNumTruncatedBytes = setupHdrSize - fBufferSize; + } else { + fAltFrameSize = setupHdrSize; + fAltNumTruncatedBytes = 0; + } + memmove(fAltBuffer, setupHdr, fAltFrameSize); + fAltPresentationTime = presentationTime; + + delete[] identificationHdr; + delete[] commentHdr; + delete[] setupHdr; + } + } + + // Save this input frame for next time, and instead write the previous input frame now: + unsigned char* tmpPtr = fBuffer; fBuffer = fAltBuffer; fAltBuffer = tmpPtr; + unsigned prevFrameSize = fAltFrameSize; fAltFrameSize = frameSize; + unsigned prevNumTruncatedBytes = fAltNumTruncatedBytes; fAltNumTruncatedBytes = numTruncatedBytes; + struct timeval prevPresentationTime = fAltPresentationTime; fAltPresentationTime = presentationTime; + + // Call the parent class to complete the normal file write with the (previous) input frame: + FileSink::afterGettingFrame(prevFrameSize, prevNumTruncatedBytes, prevPresentationTime); +} + +void OggFileSink::ourOnSourceClosure(void* clientData) { + ((OggFileSink*)clientData)->ourOnSourceClosure(); +} + +void OggFileSink::ourOnSourceClosure() { + fHaveSeenEOF = True; + + // We still have the previously-arrived frame, so write it to the file before we end: + OggFileSink::addData(fAltBuffer, fAltFrameSize, fAltPresentationTime); + + // Handle the closure for real: + onSourceClosure(); +} diff --git a/AnyCore/lib_rtsp/liveMedia/OnDemandServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/OnDemandServerMediaSubsession.cpp new file mode 100644 index 0000000..d2f9fef --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/OnDemandServerMediaSubsession.cpp @@ -0,0 +1,570 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand. +// Implementation + +#include "OnDemandServerMediaSubsession.hh" +#include + +OnDemandServerMediaSubsession +::OnDemandServerMediaSubsession(UsageEnvironment& env, + Boolean reuseFirstSource, + portNumBits initialPortNum, + Boolean multiplexRTCPWithRTP) + : ServerMediaSubsession(env), + fSDPLines(NULL), fReuseFirstSource(reuseFirstSource), + fMultiplexRTCPWithRTP(multiplexRTCPWithRTP), fLastStreamToken(NULL) { + fDestinationsHashTable = HashTable::create(ONE_WORD_HASH_KEYS); + if (fMultiplexRTCPWithRTP) { + fInitialPortNum = initialPortNum; + } else { + // Make sure RTP ports are even-numbered: + fInitialPortNum = (initialPortNum+1)&~1; + } + gethostname(fCNAME, sizeof fCNAME); + fCNAME[sizeof fCNAME-1] = '\0'; // just in case +} + +OnDemandServerMediaSubsession::~OnDemandServerMediaSubsession() { + delete[] fSDPLines; + + // Clean out the destinations hash table: + while (1) { + Destinations* destinations + = (Destinations*)(fDestinationsHashTable->RemoveNext()); + if (destinations == NULL) break; + delete destinations; + } + delete fDestinationsHashTable; +} + +char const* +OnDemandServerMediaSubsession::sdpLines() { + if (fSDPLines == NULL) { + // We need to construct a set of SDP lines that describe this + // subsession (as a unicast stream). To do so, we first create + // dummy (unused) source and "RTPSink" objects, + // whose parameters we use for the SDP lines: + unsigned estBitrate; + FramedSource* inputSource = createNewStreamSource(0, estBitrate); + if (inputSource == NULL) return NULL; // file not found + + struct in_addr dummyAddr; + dummyAddr.s_addr = 0; + Groupsock dummyGroupsock(envir(), dummyAddr, 0, 0); + unsigned char rtpPayloadType = 96 + trackNumber()-1; // if dynamic + RTPSink* dummyRTPSink + = createNewRTPSink(&dummyGroupsock, rtpPayloadType, inputSource); + if (dummyRTPSink != NULL && dummyRTPSink->estimatedBitrate() > 0) estBitrate = dummyRTPSink->estimatedBitrate(); + + setSDPLinesFromRTPSink(dummyRTPSink, inputSource, estBitrate); + Medium::close(dummyRTPSink); + closeStreamSource(inputSource); + } + + return fSDPLines; +} + +void OnDemandServerMediaSubsession +::getStreamParameters(unsigned clientSessionId, + netAddressBits clientAddress, + Port const& clientRTPPort, + Port const& clientRTCPPort, + int tcpSocketNum, + unsigned char rtpChannelId, + unsigned char rtcpChannelId, + netAddressBits& destinationAddress, + u_int8_t& /*destinationTTL*/, + Boolean& isMulticast, + Port& serverRTPPort, + Port& serverRTCPPort, + void*& streamToken) { + if (destinationAddress == 0) destinationAddress = clientAddress; + struct in_addr destinationAddr; destinationAddr.s_addr = destinationAddress; + isMulticast = False; + + if (fLastStreamToken != NULL && fReuseFirstSource) { + // Special case: Rather than creating a new 'StreamState', + // we reuse the one that we've already created: + serverRTPPort = ((StreamState*)fLastStreamToken)->serverRTPPort(); + serverRTCPPort = ((StreamState*)fLastStreamToken)->serverRTCPPort(); + ++((StreamState*)fLastStreamToken)->referenceCount(); + streamToken = fLastStreamToken; + } else { + // Normal case: Create a new media source: + unsigned streamBitrate; + FramedSource* mediaSource + = createNewStreamSource(clientSessionId, streamBitrate); + + // Create 'groupsock' and 'sink' objects for the destination, + // using previously unused server port numbers: + RTPSink* rtpSink = NULL; + BasicUDPSink* udpSink = NULL; + Groupsock* rtpGroupsock = NULL; + Groupsock* rtcpGroupsock = NULL; + + if (clientRTPPort.num() != 0 || tcpSocketNum >= 0) { // Normal case: Create destinations + portNumBits serverPortNum; + if (clientRTCPPort.num() == 0) { + // We're streaming raw UDP (not RTP). Create a single groupsock: + NoReuse dummy(envir()); // ensures that we skip over ports that are already in use + for (serverPortNum = fInitialPortNum; ; ++serverPortNum) { + struct in_addr dummyAddr; dummyAddr.s_addr = 0; + + serverRTPPort = serverPortNum; + rtpGroupsock = new Groupsock(envir(), dummyAddr, serverRTPPort, 255); + if (rtpGroupsock->socketNum() >= 0) break; // success + } + + udpSink = BasicUDPSink::createNew(envir(), rtpGroupsock); + } else { + // Normal case: We're streaming RTP (over UDP or TCP). Create a pair of + // groupsocks (RTP and RTCP), with adjacent port numbers (RTP port number even). + // (If we're multiplexing RTCP and RTP over the same port number, it can be odd or even.) + NoReuse dummy(envir()); // ensures that we skip over ports that are already in use + for (portNumBits serverPortNum = fInitialPortNum; ; ++serverPortNum) { + struct in_addr dummyAddr; dummyAddr.s_addr = 0; + + serverRTPPort = serverPortNum; + rtpGroupsock = new Groupsock(envir(), dummyAddr, serverRTPPort, 255); + if (rtpGroupsock->socketNum() < 0) { + delete rtpGroupsock; + continue; // try again + } + + if (fMultiplexRTCPWithRTP) { + // Use the RTP 'groupsock' object for RTCP as well: + serverRTCPPort = serverRTPPort; + rtcpGroupsock = rtpGroupsock; + } else { + // Create a separate 'groupsock' object (with the next (odd) port number) for RTCP: + serverRTCPPort = ++serverPortNum; + rtcpGroupsock = new Groupsock(envir(), dummyAddr, serverRTCPPort, 255); + if (rtcpGroupsock->socketNum() < 0) { + delete rtpGroupsock; + delete rtcpGroupsock; + continue; // try again + } + } + + break; // success + } + + unsigned char rtpPayloadType = 96 + trackNumber()-1; // if dynamic + rtpSink = createNewRTPSink(rtpGroupsock, rtpPayloadType, mediaSource); + if (rtpSink != NULL && rtpSink->estimatedBitrate() > 0) streamBitrate = rtpSink->estimatedBitrate(); + } + + // Turn off the destinations for each groupsock. They'll get set later + // (unless TCP is used instead): + if (rtpGroupsock != NULL) rtpGroupsock->removeAllDestinations(); + if (rtcpGroupsock != NULL) rtcpGroupsock->removeAllDestinations(); + + if (rtpGroupsock != NULL) { + // Try to use a big send buffer for RTP - at least 0.1 second of + // specified bandwidth and at least 50 KB + unsigned rtpBufSize = streamBitrate * 25 / 2; // 1 kbps * 0.1 s = 12.5 bytes + if (rtpBufSize < 50 * 1024) rtpBufSize = 50 * 1024; + increaseSendBufferTo(envir(), rtpGroupsock->socketNum(), rtpBufSize); + } + } + + // Set up the state of the stream. The stream will get started later: + streamToken = fLastStreamToken + = new StreamState(*this, serverRTPPort, serverRTCPPort, rtpSink, udpSink, + streamBitrate, mediaSource, + rtpGroupsock, rtcpGroupsock); + } + + // Record these destinations as being for this client session id: + Destinations* destinations; + if (tcpSocketNum < 0) { // UDP + destinations = new Destinations(destinationAddr, clientRTPPort, clientRTCPPort); + } else { // TCP + destinations = new Destinations(tcpSocketNum, rtpChannelId, rtcpChannelId); + } + fDestinationsHashTable->Add((char const*)clientSessionId, destinations); +} + +void OnDemandServerMediaSubsession::startStream(unsigned clientSessionId, + void* streamToken, + TaskFunc* rtcpRRHandler, + void* rtcpRRHandlerClientData, + unsigned short& rtpSeqNum, + unsigned& rtpTimestamp, + ServerRequestAlternativeByteHandler* serverRequestAlternativeByteHandler, + void* serverRequestAlternativeByteHandlerClientData) { + StreamState* streamState = (StreamState*)streamToken; + Destinations* destinations + = (Destinations*)(fDestinationsHashTable->Lookup((char const*)clientSessionId)); + if (streamState != NULL) { + streamState->startPlaying(destinations, + rtcpRRHandler, rtcpRRHandlerClientData, + serverRequestAlternativeByteHandler, serverRequestAlternativeByteHandlerClientData); + RTPSink* rtpSink = streamState->rtpSink(); // alias + if (rtpSink != NULL) { + rtpSeqNum = rtpSink->currentSeqNo(); + rtpTimestamp = rtpSink->presetNextTimestamp(); + } + } +} + +void OnDemandServerMediaSubsession::pauseStream(unsigned /*clientSessionId*/, + void* streamToken) { + // Pausing isn't allowed if multiple clients are receiving data from + // the same source: + if (fReuseFirstSource) return; + + StreamState* streamState = (StreamState*)streamToken; + if (streamState != NULL) streamState->pause(); +} + +void OnDemandServerMediaSubsession::seekStream(unsigned /*clientSessionId*/, + void* streamToken, double& seekNPT, double streamDuration, u_int64_t& numBytes) { + numBytes = 0; // by default: unknown + + // Seeking isn't allowed if multiple clients are receiving data from the same source: + if (fReuseFirstSource) return; + + StreamState* streamState = (StreamState*)streamToken; + if (streamState != NULL && streamState->mediaSource() != NULL) { + seekStreamSource(streamState->mediaSource(), seekNPT, streamDuration, numBytes); + + streamState->startNPT() = (float)seekNPT; + RTPSink* rtpSink = streamState->rtpSink(); // alias + if (rtpSink != NULL) rtpSink->resetPresentationTimes(); + } +} + +void OnDemandServerMediaSubsession::seekStream(unsigned /*clientSessionId*/, + void* streamToken, char*& absStart, char*& absEnd) { + // Seeking isn't allowed if multiple clients are receiving data from the same source: + if (fReuseFirstSource) return; + + StreamState* streamState = (StreamState*)streamToken; + if (streamState != NULL && streamState->mediaSource() != NULL) { + seekStreamSource(streamState->mediaSource(), absStart, absEnd); + } +} + +void OnDemandServerMediaSubsession::nullSeekStream(unsigned /*clientSessionId*/, void* streamToken, + double streamEndTime, u_int64_t& numBytes) { + numBytes = 0; // by default: unknown + + StreamState* streamState = (StreamState*)streamToken; + if (streamState != NULL && streamState->mediaSource() != NULL) { + // Because we're not seeking here, get the current NPT, and remember it as the new 'start' NPT: + streamState->startNPT() = getCurrentNPT(streamToken); + + double duration = streamEndTime - streamState->startNPT(); + if (duration < 0.0) duration = 0.0; + setStreamSourceDuration(streamState->mediaSource(), duration, numBytes); + + RTPSink* rtpSink = streamState->rtpSink(); // alias + if (rtpSink != NULL) rtpSink->resetPresentationTimes(); + } +} + +void OnDemandServerMediaSubsession::setStreamScale(unsigned /*clientSessionId*/, + void* streamToken, float scale) { + // Changing the scale factor isn't allowed if multiple clients are receiving data + // from the same source: + if (fReuseFirstSource) return; + + StreamState* streamState = (StreamState*)streamToken; + if (streamState != NULL && streamState->mediaSource() != NULL) { + setStreamSourceScale(streamState->mediaSource(), scale); + } +} + +float OnDemandServerMediaSubsession::getCurrentNPT(void* streamToken) { + do { + if (streamToken == NULL) break; + + StreamState* streamState = (StreamState*)streamToken; + RTPSink* rtpSink = streamState->rtpSink(); + if (rtpSink == NULL) break; + + return streamState->startNPT() + + (rtpSink->mostRecentPresentationTime().tv_sec - rtpSink->initialPresentationTime().tv_sec) + + (rtpSink->mostRecentPresentationTime().tv_sec - rtpSink->initialPresentationTime().tv_sec)/1000000.0f; + } while (0); + + return 0.0; +} + +FramedSource* OnDemandServerMediaSubsession::getStreamSource(void* streamToken) { + if (streamToken == NULL) return NULL; + + StreamState* streamState = (StreamState*)streamToken; + return streamState->mediaSource(); +} + +void OnDemandServerMediaSubsession::deleteStream(unsigned clientSessionId, + void*& streamToken) { + StreamState* streamState = (StreamState*)streamToken; + + // Look up (and remove) the destinations for this client session: + Destinations* destinations + = (Destinations*)(fDestinationsHashTable->Lookup((char const*)clientSessionId)); + if (destinations != NULL) { + fDestinationsHashTable->Remove((char const*)clientSessionId); + + // Stop streaming to these destinations: + if (streamState != NULL) streamState->endPlaying(destinations); + } + + // Delete the "StreamState" structure if it's no longer being used: + if (streamState != NULL) { + if (streamState->referenceCount() > 0) --streamState->referenceCount(); + if (streamState->referenceCount() == 0) { + delete streamState; + streamToken = NULL; + } + } + + // Finally, delete the destinations themselves: + delete destinations; +} + +char const* OnDemandServerMediaSubsession +::getAuxSDPLine(RTPSink* rtpSink, FramedSource* /*inputSource*/) { + // Default implementation: + return rtpSink == NULL ? NULL : rtpSink->auxSDPLine(); +} + +void OnDemandServerMediaSubsession::seekStreamSource(FramedSource* /*inputSource*/, + double& /*seekNPT*/, double /*streamDuration*/, u_int64_t& numBytes) { + // Default implementation: Do nothing + numBytes = 0; +} + +void OnDemandServerMediaSubsession::seekStreamSource(FramedSource* /*inputSource*/, + char*& absStart, char*& absEnd) { + // Default implementation: do nothing (but delete[] and assign "absStart" and "absEnd" to NULL, to show that we don't handle this) + delete[] absStart; absStart = NULL; + delete[] absEnd; absEnd = NULL; +} + +void OnDemandServerMediaSubsession +::setStreamSourceScale(FramedSource* /*inputSource*/, float /*scale*/) { + // Default implementation: Do nothing +} + +void OnDemandServerMediaSubsession +::setStreamSourceDuration(FramedSource* /*inputSource*/, double /*streamDuration*/, u_int64_t& numBytes) { + // Default implementation: Do nothing + numBytes = 0; +} + +void OnDemandServerMediaSubsession::closeStreamSource(FramedSource *inputSource) { + Medium::close(inputSource); +} + +void OnDemandServerMediaSubsession +::setSDPLinesFromRTPSink(RTPSink* rtpSink, FramedSource* inputSource, unsigned estBitrate) { + if (rtpSink == NULL) return; + + char const* mediaType = rtpSink->sdpMediaType(); + unsigned char rtpPayloadType = rtpSink->rtpPayloadType(); + AddressString ipAddressStr(fServerAddressForSDP); + char* rtpmapLine = rtpSink->rtpmapLine(); + char const* rtcpmuxLine = fMultiplexRTCPWithRTP ? "a=rtcp-mux\r\n" : ""; + char const* rangeLine = rangeSDPLine(); + char const* auxSDPLine = getAuxSDPLine(rtpSink, inputSource); + if (auxSDPLine == NULL) auxSDPLine = ""; + + char const* const sdpFmt = + "m=%s %u RTP/AVP %d\r\n" + "c=IN IP4 %s\r\n" + "b=AS:%u\r\n" + "%s" + "%s" + "%s" + "%s" + "a=control:%s\r\n"; + unsigned sdpFmtSize = strlen(sdpFmt) + + strlen(mediaType) + 5 /* max short len */ + 3 /* max char len */ + + strlen(ipAddressStr.val()) + + 20 /* max int len */ + + strlen(rtpmapLine) + + strlen(rtcpmuxLine) + + strlen(rangeLine) + + strlen(auxSDPLine) + + strlen(trackId()); + char* sdpLines = new char[sdpFmtSize]; + sprintf(sdpLines, sdpFmt, + mediaType, // m= + fPortNumForSDP, // m= + rtpPayloadType, // m= + ipAddressStr.val(), // c= address + estBitrate, // b=AS: + rtpmapLine, // a=rtpmap:... (if present) + rtcpmuxLine, // a=rtcp-mux:... (if present) + rangeLine, // a=range:... (if present) + auxSDPLine, // optional extra SDP line + trackId()); // a=control: + delete[] (char*)rangeLine; delete[] rtpmapLine; + + fSDPLines = strDup(sdpLines); + delete[] sdpLines; +} + + +////////// StreamState implementation ////////// + +static void afterPlayingStreamState(void* clientData) { + StreamState* streamState = (StreamState*)clientData; + if (streamState->streamDuration() == 0.0) { + // When the input stream ends, tear it down. This will cause a RTCP "BYE" + // to be sent to each client, teling it that the stream has ended. + // (Because the stream didn't have a known duration, there was no other + // way for clients to know when the stream ended.) + streamState->reclaim(); + } + // Otherwise, keep the stream alive, in case a client wants to + // subsequently re-play the stream starting from somewhere other than the end. + // (This can be done only on streams that have a known duration.) +} + +StreamState::StreamState(OnDemandServerMediaSubsession& master, + Port const& serverRTPPort, Port const& serverRTCPPort, + RTPSink* rtpSink, BasicUDPSink* udpSink, + unsigned totalBW, FramedSource* mediaSource, + Groupsock* rtpGS, Groupsock* rtcpGS) + : fMaster(master), fAreCurrentlyPlaying(False), fReferenceCount(1), + fServerRTPPort(serverRTPPort), fServerRTCPPort(serverRTCPPort), + fRTPSink(rtpSink), fUDPSink(udpSink), fStreamDuration(master.duration()), + fTotalBW(totalBW), fRTCPInstance(NULL) /* created later */, + fMediaSource(mediaSource), fStartNPT(0.0), fRTPgs(rtpGS), fRTCPgs(rtcpGS) { +} + +StreamState::~StreamState() { + reclaim(); +} + +void StreamState +::startPlaying(Destinations* dests, + TaskFunc* rtcpRRHandler, void* rtcpRRHandlerClientData, + ServerRequestAlternativeByteHandler* serverRequestAlternativeByteHandler, + void* serverRequestAlternativeByteHandlerClientData) { + if (dests == NULL) return; + + if (fRTCPInstance == NULL && fRTPSink != NULL) { + // Create (and start) a 'RTCP instance' for this RTP sink: + fRTCPInstance + = RTCPInstance::createNew(fRTPSink->envir(), fRTCPgs, + fTotalBW, (unsigned char*)fMaster.fCNAME, + fRTPSink, NULL /* we're a server */); + // Note: This starts RTCP running automatically + } + + if (dests->isTCP) { + // Change RTP and RTCP to use the TCP socket instead of UDP: + if (fRTPSink != NULL) { + fRTPSink->addStreamSocket(dests->tcpSocketNum, dests->rtpChannelId); + RTPInterface + ::setServerRequestAlternativeByteHandler(fRTPSink->envir(), dests->tcpSocketNum, + serverRequestAlternativeByteHandler, serverRequestAlternativeByteHandlerClientData); + // So that we continue to handle RTSP commands from the client + } + if (fRTCPInstance != NULL) { + fRTCPInstance->addStreamSocket(dests->tcpSocketNum, dests->rtcpChannelId); + fRTCPInstance->setSpecificRRHandler(dests->tcpSocketNum, dests->rtcpChannelId, + rtcpRRHandler, rtcpRRHandlerClientData); + } + } else { + // Tell the RTP and RTCP 'groupsocks' about this destination + // (in case they don't already have it): + if (fRTPgs != NULL) fRTPgs->addDestination(dests->addr, dests->rtpPort); + if (fRTCPgs != NULL) fRTCPgs->addDestination(dests->addr, dests->rtcpPort); + if (fRTCPInstance != NULL) { + fRTCPInstance->setSpecificRRHandler(dests->addr.s_addr, dests->rtcpPort, + rtcpRRHandler, rtcpRRHandlerClientData); + } + } + + if (fRTCPInstance != NULL) { + // Hack: Send an initial RTCP "SR" packet, before the initial RTP packet, so that receivers will (likely) be able to + // get RTCP-synchronized presentation times immediately: + fRTCPInstance->sendReport(); + } + + if (!fAreCurrentlyPlaying && fMediaSource != NULL) { + if (fRTPSink != NULL) { + fRTPSink->startPlaying(*fMediaSource, afterPlayingStreamState, this); + fAreCurrentlyPlaying = True; + } else if (fUDPSink != NULL) { + fUDPSink->startPlaying(*fMediaSource, afterPlayingStreamState, this); + fAreCurrentlyPlaying = True; + } + } +} + +void StreamState::pause() { + if (fRTPSink != NULL) fRTPSink->stopPlaying(); + if (fUDPSink != NULL) fUDPSink->stopPlaying(); + fAreCurrentlyPlaying = False; +} + +void StreamState::endPlaying(Destinations* dests) { +#if 0 + // The following code is temporarily disabled, because it erroneously sends RTCP "BYE"s to all clients if multiple + // clients are streaming from the same data source (i.e., if "reuseFirstSource" is True), and we don't want that to happen + // if we're being called as a result of a single one of these clients having sent a "TEARDOWN" (rather than the whole stream + // having been closed, for all clients). + // This will be fixed for real later. + if (fRTCPInstance != NULL) { + // Hack: Explicitly send a RTCP "BYE" packet now, because the code below will prevent that from happening later, + // when "fRTCPInstance" gets deleted: + fRTCPInstance->sendBYE(); + } +#endif + + if (dests->isTCP) { + if (fRTPSink != NULL) { + fRTPSink->removeStreamSocket(dests->tcpSocketNum, dests->rtpChannelId); + } + if (fRTCPInstance != NULL) { + fRTCPInstance->removeStreamSocket(dests->tcpSocketNum, dests->rtcpChannelId); + fRTCPInstance->unsetSpecificRRHandler(dests->tcpSocketNum, dests->rtcpChannelId); + } + } else { + // Tell the RTP and RTCP 'groupsocks' to stop using these destinations: + if (fRTPgs != NULL) fRTPgs->removeDestination(dests->addr, dests->rtpPort); + if (fRTCPgs != NULL) fRTCPgs->removeDestination(dests->addr, dests->rtcpPort); + if (fRTCPInstance != NULL) { + fRTCPInstance->unsetSpecificRRHandler(dests->addr.s_addr, dests->rtcpPort); + } + } +} + +void StreamState::reclaim() { + // Delete allocated media objects + Medium::close(fRTCPInstance) /* will send a RTCP BYE */; fRTCPInstance = NULL; + Medium::close(fRTPSink); fRTPSink = NULL; + Medium::close(fUDPSink); fUDPSink = NULL; + + fMaster.closeStreamSource(fMediaSource); fMediaSource = NULL; + if (fMaster.fLastStreamToken == this) fMaster.fLastStreamToken = NULL; + + delete fRTPgs; + if (fRTCPgs != fRTPgs) delete fRTCPgs; + fRTPgs = NULL; fRTCPgs = NULL; +} diff --git a/AnyCore/lib_rtsp/liveMedia/OutputFile.cpp b/AnyCore/lib_rtsp/liveMedia/OutputFile.cpp new file mode 100644 index 0000000..d974367 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/OutputFile.cpp @@ -0,0 +1,60 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Common routines for opening/closing named output files +// Implementation + +#if (defined(__WIN32__) || defined(_WIN32)) && !defined(_WIN32_WCE) +#include +#include +#endif +#ifndef _WIN32_WCE +#include +#endif +#include + +#include "OutputFile.hh" + +FILE* OpenOutputFile(UsageEnvironment& env, char const* fileName) { + FILE* fid; + + // Check for special case 'file names': "stdout" and "stderr" + if (strcmp(fileName, "stdout") == 0) { + fid = stdout; +#if (defined(__WIN32__) || defined(_WIN32)) && !defined(_WIN32_WCE) + _setmode(_fileno(stdout), _O_BINARY); // convert to binary mode +#endif + } else if (strcmp(fileName, "stderr") == 0) { + fid = stderr; +#if (defined(__WIN32__) || defined(_WIN32)) && !defined(_WIN32_WCE) + _setmode(_fileno(stderr), _O_BINARY); // convert to binary mode +#endif + } else { + fid = fopen(fileName, "wb"); + } + + if (fid == NULL) { + env.setResultMsg("unable to open file \"", fileName, "\""); + } + + return fid; +} + +void CloseOutputFile(FILE* fid) { + // Don't close 'stdout' or 'stderr', in case we want to use it again later. + if (fid != NULL && fid != stdout && fid != stderr) fclose(fid); +} diff --git a/AnyCore/lib_rtsp/liveMedia/PassiveServerMediaSubsession.cpp b/AnyCore/lib_rtsp/liveMedia/PassiveServerMediaSubsession.cpp new file mode 100644 index 0000000..7e061ae --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/PassiveServerMediaSubsession.cpp @@ -0,0 +1,221 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that represents an existing +// 'RTPSink', rather than one that creates new 'RTPSink's on demand. +// Implementation + +#include "PassiveServerMediaSubsession.hh" +#include + +////////// PassiveServerMediaSubsession ////////// + +PassiveServerMediaSubsession* +PassiveServerMediaSubsession::createNew(RTPSink& rtpSink, + RTCPInstance* rtcpInstance) { + return new PassiveServerMediaSubsession(rtpSink, rtcpInstance); +} + +PassiveServerMediaSubsession +::PassiveServerMediaSubsession(RTPSink& rtpSink, RTCPInstance* rtcpInstance) + : ServerMediaSubsession(rtpSink.envir()), + fSDPLines(NULL), fRTPSink(rtpSink), fRTCPInstance(rtcpInstance) { + fClientRTCPSourceRecords = HashTable::create(ONE_WORD_HASH_KEYS); +} + +class RTCPSourceRecord { +public: + RTCPSourceRecord(netAddressBits addr, Port const& port) + : addr(addr), port(port) { + } + + netAddressBits addr; + Port port; +}; + +PassiveServerMediaSubsession::~PassiveServerMediaSubsession() { + delete[] fSDPLines; + + // Clean out the RTCPSourceRecord table: + while (1) { + RTCPSourceRecord* source = (RTCPSourceRecord*)(fClientRTCPSourceRecords->RemoveNext()); + if (source == NULL) break; + delete source; + } + + delete fClientRTCPSourceRecords; +} + +Boolean PassiveServerMediaSubsession::rtcpIsMuxed() { + if (fRTCPInstance == NULL) return False; + + // Check whether RTP and RTCP use the same "groupsock" object: + return &(fRTPSink.groupsockBeingUsed()) == fRTCPInstance->RTCPgs(); +} + +char const* +PassiveServerMediaSubsession::sdpLines() { + if (fSDPLines == NULL ) { + // Construct a set of SDP lines that describe this subsession: + // Use the components from "rtpSink": + Groupsock const& gs = fRTPSink.groupsockBeingUsed(); + AddressString groupAddressStr(gs.groupAddress()); + unsigned short portNum = ntohs(gs.port().num()); + unsigned char ttl = gs.ttl(); + unsigned char rtpPayloadType = fRTPSink.rtpPayloadType(); + char const* mediaType = fRTPSink.sdpMediaType(); + unsigned estBitrate + = fRTCPInstance == NULL ? 50 : fRTCPInstance->totSessionBW(); + char* rtpmapLine = fRTPSink.rtpmapLine(); + char const* rtcpmuxLine = rtcpIsMuxed() ? "a=rtcp-mux\r\n" : ""; + char const* rangeLine = rangeSDPLine(); + char const* auxSDPLine = fRTPSink.auxSDPLine(); + if (auxSDPLine == NULL) auxSDPLine = ""; + + char const* const sdpFmt = + "m=%s %d RTP/AVP %d\r\n" + "c=IN IP4 %s/%d\r\n" + "b=AS:%u\r\n" + "%s" + "%s" + "%s" + "%s" + "a=control:%s\r\n"; + unsigned sdpFmtSize = strlen(sdpFmt) + + strlen(mediaType) + 5 /* max short len */ + 3 /* max char len */ + + strlen(groupAddressStr.val()) + 3 /* max char len */ + + 20 /* max int len */ + + strlen(rtpmapLine) + + strlen(rtcpmuxLine) + + strlen(rangeLine) + + strlen(auxSDPLine) + + strlen(trackId()); + char* sdpLines = new char[sdpFmtSize]; + sprintf(sdpLines, sdpFmt, + mediaType, // m= + portNum, // m= + rtpPayloadType, // m= + groupAddressStr.val(), // c= + ttl, // c= TTL + estBitrate, // b=AS: + rtpmapLine, // a=rtpmap:... (if present) + rtcpmuxLine, // a=rtcp-mux:... (if present) + rangeLine, // a=range:... (if present) + auxSDPLine, // optional extra SDP line + trackId()); // a=control: + delete[] (char*)rangeLine; delete[] rtpmapLine; + + fSDPLines = strDup(sdpLines); + delete[] sdpLines; + } + + return fSDPLines; +} + +void PassiveServerMediaSubsession +::getStreamParameters(unsigned clientSessionId, + netAddressBits clientAddress, + Port const& /*clientRTPPort*/, + Port const& clientRTCPPort, + int /*tcpSocketNum*/, + unsigned char /*rtpChannelId*/, + unsigned char /*rtcpChannelId*/, + netAddressBits& destinationAddress, + u_int8_t& destinationTTL, + Boolean& isMulticast, + Port& serverRTPPort, + Port& serverRTCPPort, + void*& streamToken) { + isMulticast = True; + Groupsock& gs = fRTPSink.groupsockBeingUsed(); + if (destinationTTL == 255) destinationTTL = gs.ttl(); + if (destinationAddress == 0) { // normal case + destinationAddress = gs.groupAddress().s_addr; + } else { // use the client-specified destination address instead: + struct in_addr destinationAddr; destinationAddr.s_addr = destinationAddress; + gs.changeDestinationParameters(destinationAddr, 0, destinationTTL); + if (fRTCPInstance != NULL) { + Groupsock* rtcpGS = fRTCPInstance->RTCPgs(); + rtcpGS->changeDestinationParameters(destinationAddr, 0, destinationTTL); + } + } + serverRTPPort = gs.port(); + if (fRTCPInstance != NULL) { + Groupsock* rtcpGS = fRTCPInstance->RTCPgs(); + serverRTCPPort = rtcpGS->port(); + } + streamToken = NULL; // not used + + // Make a record of this client's source - for RTCP RR handling: + RTCPSourceRecord* source = new RTCPSourceRecord(clientAddress, clientRTCPPort); + fClientRTCPSourceRecords->Add((char const*)clientSessionId, source); +} + +void PassiveServerMediaSubsession::startStream(unsigned clientSessionId, + void* /*streamToken*/, + TaskFunc* rtcpRRHandler, + void* rtcpRRHandlerClientData, + unsigned short& rtpSeqNum, + unsigned& rtpTimestamp, + ServerRequestAlternativeByteHandler* /*serverRequestAlternativeByteHandler*/, + void* /*serverRequestAlternativeByteHandlerClientData*/) { + rtpSeqNum = fRTPSink.currentSeqNo(); + rtpTimestamp = fRTPSink.presetNextTimestamp(); + + // Try to use a big send buffer for RTP - at least 0.1 second of + // specified bandwidth and at least 50 KB + unsigned streamBitrate = fRTCPInstance == NULL ? 50 : fRTCPInstance->totSessionBW(); // in kbps + unsigned rtpBufSize = streamBitrate * 25 / 2; // 1 kbps * 0.1 s = 12.5 bytes + if (rtpBufSize < 50 * 1024) rtpBufSize = 50 * 1024; + increaseSendBufferTo(envir(), fRTPSink.groupsockBeingUsed().socketNum(), rtpBufSize); + + if (fRTCPInstance != NULL) { + // Hack: Send a RTCP "SR" packet now, so that receivers will (likely) be able to + // get RTCP-synchronized presentation times immediately: + fRTCPInstance->sendReport(); + + // Set up the handler for incoming RTCP "RR" packets from this client: + RTCPSourceRecord* source = (RTCPSourceRecord*)(fClientRTCPSourceRecords->Lookup((char const*)clientSessionId)); + if (source != NULL) { + fRTCPInstance->setSpecificRRHandler(source->addr, source->port, + rtcpRRHandler, rtcpRRHandlerClientData); + } + } +} + +float PassiveServerMediaSubsession::getCurrentNPT(void* streamToken) { + // Return the elapsed time between our "RTPSink"s creation time, and the current time: + struct timeval const& creationTime = fRTPSink.creationTime(); // alias + + struct timeval timeNow; + gettimeofday(&timeNow, NULL); + + return (float)(timeNow.tv_sec - creationTime.tv_sec + (timeNow.tv_usec - creationTime.tv_usec)/1000000.0); +} + +void PassiveServerMediaSubsession::deleteStream(unsigned clientSessionId, void*& /*streamToken*/) { + // Lookup and remove the 'RTCPSourceRecord' for this client. Also turn off RTCP "RR" handling: + RTCPSourceRecord* source = (RTCPSourceRecord*)(fClientRTCPSourceRecords->Lookup((char const*)clientSessionId)); + if (source != NULL) { + if (fRTCPInstance != NULL) { + fRTCPInstance->unsetSpecificRRHandler(source->addr, source->port); + } + + fClientRTCPSourceRecords->Remove((char const*)clientSessionId); + delete source; + } +} diff --git a/AnyCore/lib_rtsp/liveMedia/ProxyServerMediaSession.cpp b/AnyCore/lib_rtsp/liveMedia/ProxyServerMediaSession.cpp new file mode 100644 index 0000000..0814e06 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/ProxyServerMediaSession.cpp @@ -0,0 +1,793 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A subclass of "ServerMediaSession" that can be used to create a (unicast) RTSP servers that acts as a 'proxy' for +// another (unicast or multicast) RTSP/RTP stream. +// Implementation + +#include "liveMedia.hh" +#include "RTSPCommon.hh" +#include "GroupsockHelper.hh" // for "our_random()" + +#ifndef MILLION +#define MILLION 1000000 +#endif + +// A "OnDemandServerMediaSubsession" subclass, used to implement a unicast RTSP server that's proxying another RTSP stream: + +class ProxyServerMediaSubsession: public OnDemandServerMediaSubsession { +public: + ProxyServerMediaSubsession(MediaSubsession& mediaSubsession); + virtual ~ProxyServerMediaSubsession(); + + char const* codecName() const { return fClientMediaSubsession.codecName(); } + +private: // redefined virtual functions + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate); + virtual void closeStreamSource(FramedSource *inputSource); + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource); + +private: + static void subsessionByeHandler(void* clientData); + void subsessionByeHandler(); + + int verbosityLevel() const { return ((ProxyServerMediaSession*)fParentSession)->fVerbosityLevel; } + +private: + friend class ProxyRTSPClient; + MediaSubsession& fClientMediaSubsession; // the 'client' media subsession object that corresponds to this 'server' media subsession + ProxyServerMediaSubsession* fNext; // used when we're part of a queue + Boolean fHaveSetupStream; +}; + + +////////// ProxyServerMediaSession implementation ////////// + +UsageEnvironment& operator<<(UsageEnvironment& env, const ProxyServerMediaSession& psms) { // used for debugging + return env << "ProxyServerMediaSession[\"" << psms.url() << "\"]"; +} + +ProxyRTSPClient* +defaultCreateNewProxyRTSPClientFunc(ProxyServerMediaSession& ourServerMediaSession, + char const* rtspURL, + char const* username, char const* password, + portNumBits tunnelOverHTTPPortNum, int verbosityLevel, + int socketNumToServer) { + return new ProxyRTSPClient(ourServerMediaSession, rtspURL, username, password, + tunnelOverHTTPPortNum, verbosityLevel, socketNumToServer); +} + +ProxyServerMediaSession* ProxyServerMediaSession +::createNew(UsageEnvironment& env, RTSPServer* ourRTSPServer, + char const* inputStreamURL, char const* streamName, + char const* username, char const* password, + portNumBits tunnelOverHTTPPortNum, int verbosityLevel, int socketNumToServer) { + return new ProxyServerMediaSession(env, ourRTSPServer, inputStreamURL, streamName, username, password, + tunnelOverHTTPPortNum, verbosityLevel, socketNumToServer); +} + + +ProxyServerMediaSession +::ProxyServerMediaSession(UsageEnvironment& env, RTSPServer* ourRTSPServer, + char const* inputStreamURL, char const* streamName, + char const* username, char const* password, + portNumBits tunnelOverHTTPPortNum, int verbosityLevel, + int socketNumToServer, + createNewProxyRTSPClientFunc* ourCreateNewProxyRTSPClientFunc) + : ServerMediaSession(env, streamName, NULL, NULL, False, NULL), + describeCompletedFlag(0), fOurRTSPServer(ourRTSPServer), fClientMediaSession(NULL), + fVerbosityLevel(verbosityLevel), + fPresentationTimeSessionNormalizer(new PresentationTimeSessionNormalizer(envir())), + fCreateNewProxyRTSPClientFunc(ourCreateNewProxyRTSPClientFunc) { + // Open a RTSP connection to the input stream, and send a "DESCRIBE" command. + // We'll use the SDP description in the response to set ourselves up. + fProxyRTSPClient + = (*fCreateNewProxyRTSPClientFunc)(*this, inputStreamURL, username, password, + tunnelOverHTTPPortNum, + verbosityLevel > 0 ? verbosityLevel-1 : verbosityLevel, + socketNumToServer); + ProxyRTSPClient::sendDESCRIBE(fProxyRTSPClient); +} + +ProxyServerMediaSession::~ProxyServerMediaSession() { + if (fVerbosityLevel > 0) { + envir() << *this << "::~ProxyServerMediaSession()\n"; + } + + // Begin by sending a "TEARDOWN" command (without checking for a response): + if (fProxyRTSPClient != NULL) fProxyRTSPClient->sendTeardownCommand(*fClientMediaSession, NULL, fProxyRTSPClient->auth()); + + // Then delete our state: + Medium::close(fClientMediaSession); + Medium::close(fProxyRTSPClient); + delete fPresentationTimeSessionNormalizer; +} + +char const* ProxyServerMediaSession::url() const { + return fProxyRTSPClient == NULL ? NULL : fProxyRTSPClient->url(); +} + +void ProxyServerMediaSession::continueAfterDESCRIBE(char const* sdpDescription) { + describeCompletedFlag = 1; + + // Create a (client) "MediaSession" object from the stream's SDP description ("resultString"), then iterate through its + // "MediaSubsession" objects, to set up corresponding "ServerMediaSubsession" objects that we'll use to serve the stream's tracks. + do { + fClientMediaSession = MediaSession::createNew(envir(), sdpDescription); + if (fClientMediaSession == NULL) break; + + MediaSubsessionIterator iter(*fClientMediaSession); + for (MediaSubsession* mss = iter.next(); mss != NULL; mss = iter.next()) { + ServerMediaSubsession* smss = new ProxyServerMediaSubsession(*mss); + addSubsession(smss); + if (fVerbosityLevel > 0) { + envir() << *this << " added new \"ProxyServerMediaSubsession\" for " + << mss->protocolName() << "/" << mss->mediumName() << "/" << mss->codecName() << " track\n"; + } + } + } while (0); +} + +void ProxyServerMediaSession::resetDESCRIBEState() { + // Delete all of our "ProxyServerMediaSubsession"s; they'll get set up again once we get a response to the new "DESCRIBE". + if (fOurRTSPServer != NULL) { + // First, close any RTSP client connections that may have already been set up: + fOurRTSPServer->closeAllClientSessionsForServerMediaSession(this); + } + deleteAllSubsessions(); + + // Finally, delete the client "MediaSession" object that we had set up after receiving the response to the previous "DESCRIBE": + Medium::close(fClientMediaSession); fClientMediaSession = NULL; +} + +///////// RTSP 'response handlers' ////////// + +static void continueAfterDESCRIBE(RTSPClient* rtspClient, int resultCode, char* resultString) { + char const* res; + + if (resultCode == 0) { + // The "DESCRIBE" command succeeded, so "resultString" should be the stream's SDP description. + res = resultString; + } else { + // The "DESCRIBE" command failed. + res = NULL; + } + ((ProxyRTSPClient*)rtspClient)->continueAfterDESCRIBE(res); + delete[] resultString; +} + +static void continueAfterSETUP(RTSPClient* rtspClient, int resultCode, char* resultString) { + if (resultCode == 0) { + ((ProxyRTSPClient*)rtspClient)->continueAfterSETUP(); + } + delete[] resultString; +} + +static void continueAfterOPTIONS(RTSPClient* rtspClient, int resultCode, char* resultString) { + Boolean serverSupportsGetParameter = False; + if (resultCode == 0) { + // Note whether the server told us that it supports the "GET_PARAMETER" command: + serverSupportsGetParameter = RTSPOptionIsSupported("GET_PARAMETER", resultString); + } + ((ProxyRTSPClient*)rtspClient)->continueAfterLivenessCommand(resultCode, serverSupportsGetParameter); + delete[] resultString; +} + +#ifdef SEND_GET_PARAMETER_IF_SUPPORTED +static void continueAfterGET_PARAMETER(RTSPClient* rtspClient, int resultCode, char* resultString) { + ((ProxyRTSPClient*)rtspClient)->continueAfterLivenessCommand(resultCode, True); + delete[] resultString; +} +#endif + + +////////// "ProxyRTSPClient" implementation ///////// + +UsageEnvironment& operator<<(UsageEnvironment& env, const ProxyRTSPClient& proxyRTSPClient) { // used for debugging + return env << "ProxyRTSPClient[\"" << proxyRTSPClient.url() << "\"]"; +} + +ProxyRTSPClient::ProxyRTSPClient(ProxyServerMediaSession& ourServerMediaSession, char const* rtspURL, + char const* username, char const* password, + portNumBits tunnelOverHTTPPortNum, int verbosityLevel, int socketNumToServer) + : RTSPClient(ourServerMediaSession.envir(), rtspURL, verbosityLevel, "ProxyRTSPClient", + tunnelOverHTTPPortNum == (portNumBits)(~0) ? 0 : tunnelOverHTTPPortNum, socketNumToServer), + fOurServerMediaSession(ourServerMediaSession), fOurURL(strDup(rtspURL)), fStreamRTPOverTCP(tunnelOverHTTPPortNum != 0), + fSetupQueueHead(NULL), fSetupQueueTail(NULL), fNumSetupsDone(0), fNextDESCRIBEDelay(1), + fServerSupportsGetParameter(False), fLastCommandWasPLAY(False), + fLivenessCommandTask(NULL), fDESCRIBECommandTask(NULL), fSubsessionTimerTask(NULL) { + if (username != NULL && password != NULL) { + fOurAuthenticator = new Authenticator(username, password); + } else { + fOurAuthenticator = NULL; + } +} + +void ProxyRTSPClient::reset() { + envir().taskScheduler().unscheduleDelayedTask(fLivenessCommandTask); fLivenessCommandTask = NULL; + envir().taskScheduler().unscheduleDelayedTask(fDESCRIBECommandTask); fDESCRIBECommandTask = NULL; + envir().taskScheduler().unscheduleDelayedTask(fSubsessionTimerTask); fSubsessionTimerTask = NULL; + + fSetupQueueHead = fSetupQueueTail = NULL; + fNumSetupsDone = 0; + fNextDESCRIBEDelay = 1; + fLastCommandWasPLAY = False; + + RTSPClient::reset(); +} + +ProxyRTSPClient::~ProxyRTSPClient() { + reset(); + + delete fOurAuthenticator; + delete[] fOurURL; +} + +void ProxyRTSPClient::continueAfterDESCRIBE(char const* sdpDescription) { + if (sdpDescription != NULL) { + fOurServerMediaSession.continueAfterDESCRIBE(sdpDescription); + + // Unlike most RTSP streams, there might be a long delay between this "DESCRIBE" command (to the downstream server) and the + // subsequent "SETUP"/"PLAY" - which doesn't occur until the first time that a client requests the stream. + // To prevent the proxied connection (between us and the downstream server) from timing out, we send periodic 'liveness' + // ("OPTIONS" or "GET_PARAMETER") commands. (The usual RTCP liveness mechanism wouldn't work here, because RTCP packets + // don't get sent until after the "PLAY" command.) + scheduleLivenessCommand(); + } else { + // The "DESCRIBE" command failed, most likely because the server or the stream is not yet running. + // Reschedule another "DESCRIBE" command to take place later: + scheduleDESCRIBECommand(); + } +} + +void ProxyRTSPClient::continueAfterLivenessCommand(int resultCode, Boolean serverSupportsGetParameter) { + if (resultCode != 0) { + // The periodic 'liveness' command failed, suggesting that the back-end stream is no longer alive. + // We handle this by resetting our connection state with this server. Any current clients will be closed, but + // subsequent clients will cause new RTSP "SETUP"s and "PLAY"s to get done, restarting the stream. + // Then continue by sending more "DESCRIBE" commands, to try to restore the stream. + + fServerSupportsGetParameter = False; // until we learn otherwise, in response to a future "OPTIONS" command + + if (resultCode < 0) { + // The 'liveness' command failed without getting a response from the server (otherwise "resultCode" would have been > 0). + // This suggests that the RTSP connection itself has failed. Print this error code, in case it's useful for debugging: + if (fVerbosityLevel > 0) { + envir() << *this << ": lost connection to server ('errno': " << -resultCode << "). Resetting...\n"; + } + } + + reset(); + fOurServerMediaSession.resetDESCRIBEState(); + + setBaseURL(fOurURL); // because we'll be sending an initial "DESCRIBE" all over again + sendDESCRIBE(this); + return; + } + + fServerSupportsGetParameter = serverSupportsGetParameter; + + // Schedule the next 'liveness' command (i.e., to tell the back-end server that we're still alive): + scheduleLivenessCommand(); +} + +#define SUBSESSION_TIMEOUT_SECONDS 10 // how many seconds to wait for the last track's "SETUP" to be done (note below) + +void ProxyRTSPClient::continueAfterSETUP() { + if (fVerbosityLevel > 0) { + envir() << *this << "::continueAfterSETUP(): head codec: " << fSetupQueueHead->fClientMediaSubsession.codecName() + << "; numSubsessions " << fSetupQueueHead->fParentSession->numSubsessions() << "\n\tqueue:"; + for (ProxyServerMediaSubsession* p = fSetupQueueHead; p != NULL; p = p->fNext) { + envir() << "\t" << p->fClientMediaSubsession.codecName(); + } + envir() << "\n"; + } + envir().taskScheduler().unscheduleDelayedTask(fSubsessionTimerTask); // in case it had been set + + // Dequeue the first "ProxyServerMediaSubsession" from our 'SETUP queue'. It will be the one for which this "SETUP" was done: + ProxyServerMediaSubsession* smss = fSetupQueueHead; // Assert: != NULL + fSetupQueueHead = fSetupQueueHead->fNext; + if (fSetupQueueHead == NULL) fSetupQueueTail = NULL; + + if (fSetupQueueHead != NULL) { + // There are still entries in the queue, for tracks for which we have still to do a "SETUP". + // "SETUP" the first of these now: + sendSetupCommand(fSetupQueueHead->fClientMediaSubsession, ::continueAfterSETUP, + False, fStreamRTPOverTCP, False, fOurAuthenticator); + ++fNumSetupsDone; + fSetupQueueHead->fHaveSetupStream = True; + } else { + if (fNumSetupsDone >= smss->fParentSession->numSubsessions()) { + // We've now finished setting up each of our subsessions (i.e., 'tracks'). + // Continue by sending a "PLAY" command (an 'aggregate' "PLAY" command, on the whole session): + sendPlayCommand(smss->fClientMediaSubsession.parentSession(), NULL, -1.0f, -1.0f, 1.0f, fOurAuthenticator); + // the "-1.0f" "start" parameter causes the "PLAY" to be sent without a "Range:" header, in case we'd already done + // a "PLAY" before (as a result of a 'subsession timeout' (note below)) + fLastCommandWasPLAY = True; + } else { + // Some of this session's subsessions (i.e., 'tracks') remain to be "SETUP". They might get "SETUP" very soon, but it's + // also possible - if the remote client chose to play only some of the session's tracks - that they might not. + // To allow for this possibility, we set a timer. If the timer expires without the remaining subsessions getting "SETUP", + // then we send a "PLAY" command anyway: + fSubsessionTimerTask + = envir().taskScheduler().scheduleDelayedTask(SUBSESSION_TIMEOUT_SECONDS*MILLION, (TaskFunc*)subsessionTimeout, this); + } + } +} + +void ProxyRTSPClient::scheduleLivenessCommand() { + // Delay a random time before sending another 'liveness' command. + unsigned delayMax = sessionTimeoutParameter(); // if the server specified a maximum time between 'liveness' probes, then use that + if (delayMax == 0) { + delayMax = 60; + } + + // Choose a random time from [delayMax/2,delayMax-1) seconds: + unsigned const us_1stPart = delayMax*500000; + unsigned uSecondsToDelay; + if (us_1stPart <= 1000000) { + uSecondsToDelay = us_1stPart; + } else { + unsigned const us_2ndPart = us_1stPart-1000000; + uSecondsToDelay = us_1stPart + (us_2ndPart*our_random())%us_2ndPart; + } + fLivenessCommandTask = envir().taskScheduler().scheduleDelayedTask(uSecondsToDelay, sendLivenessCommand, this); +} + +void ProxyRTSPClient::sendLivenessCommand(void* clientData) { + ProxyRTSPClient* rtspClient = (ProxyRTSPClient*)clientData; + + // Note. By default, we do not send "GET_PARAMETER" as our 'liveness notification' command, even if the server previously + // indicated (in its response to our earlier "OPTIONS" command) that it supported "GET_PARAMETER". This is because + // "GET_PARAMETER" crashes some camera servers (even though they claimed to support "GET_PARAMETER"). +#ifdef SEND_GET_PARAMETER_IF_SUPPORTED + MediaSession* sess = rtspClient->fOurServerMediaSession.fClientMediaSession; + + if (rtspClient->fServerSupportsGetParameter && rtspClient->fNumSetupsDone > 0 && sess != NULL) { + rtspClient->sendGetParameterCommand(*sess, ::continueAfterGET_PARAMETER, "", rtspClient->auth()); + } else { +#endif + rtspClient->sendOptionsCommand(::continueAfterOPTIONS, rtspClient->auth()); +#ifdef SEND_GET_PARAMETER_IF_SUPPORTED + } +#endif +} + +void ProxyRTSPClient::scheduleDESCRIBECommand() { + // Delay 1s, 2s, 4s, 8s ... 256s until sending the next "DESCRIBE". Then, keep delaying a random time from [256..511] seconds: + unsigned secondsToDelay; + if (fNextDESCRIBEDelay <= 256) { + secondsToDelay = fNextDESCRIBEDelay; + fNextDESCRIBEDelay *= 2; + } else { + secondsToDelay = 256 + (our_random()&0xFF); // [256..511] seconds + } + + if (fVerbosityLevel > 0) { + envir() << *this << ": RTSP \"DESCRIBE\" command failed; trying again in " << secondsToDelay << " seconds\n"; + } + fDESCRIBECommandTask = envir().taskScheduler().scheduleDelayedTask(secondsToDelay*MILLION, sendDESCRIBE, this); +} + +void ProxyRTSPClient::sendDESCRIBE(void* clientData) { + ProxyRTSPClient* rtspClient = (ProxyRTSPClient*)clientData; + if (rtspClient != NULL) rtspClient->sendDescribeCommand(::continueAfterDESCRIBE, rtspClient->auth()); +} + +void ProxyRTSPClient::subsessionTimeout(void* clientData) { + ((ProxyRTSPClient*)clientData)->handleSubsessionTimeout(); +} + +void ProxyRTSPClient::handleSubsessionTimeout() { + // We still have one or more subsessions ('tracks') left to "SETUP". But we can't wait any longer for them. Send a "PLAY" now: + MediaSession* sess = fOurServerMediaSession.fClientMediaSession; + if (sess != NULL) sendPlayCommand(*sess, NULL, -1.0f, -1.0f, 1.0f, fOurAuthenticator); + fLastCommandWasPLAY = True; +} + + +//////// "ProxyServerMediaSubsession" implementation ////////// + +ProxyServerMediaSubsession::ProxyServerMediaSubsession(MediaSubsession& mediaSubsession) + : OnDemandServerMediaSubsession(mediaSubsession.parentSession().envir(), True/*reuseFirstSource*/), + fClientMediaSubsession(mediaSubsession), fNext(NULL), fHaveSetupStream(False) { +} + +UsageEnvironment& operator<<(UsageEnvironment& env, const ProxyServerMediaSubsession& psmss) { // used for debugging + return env << "ProxyServerMediaSubsession[\"" << psmss.codecName() << "\"]"; +} + +ProxyServerMediaSubsession::~ProxyServerMediaSubsession() { + if (verbosityLevel() > 0) { + envir() << *this << "::~ProxyServerMediaSubsession()\n"; + } +} + +FramedSource* ProxyServerMediaSubsession::createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate) { + ProxyServerMediaSession* const sms = (ProxyServerMediaSession*)fParentSession; + + if (verbosityLevel() > 0) { + envir() << *this << "::createNewStreamSource(session id " << clientSessionId << ")\n"; + } + + // If we haven't yet created a data source from our 'media subsession' object, initiate() it to do so: + if (fClientMediaSubsession.readSource() == NULL) { + fClientMediaSubsession.receiveRawMP3ADUs(); // hack for MPA-ROBUST streams + fClientMediaSubsession.receiveRawJPEGFrames(); // hack for proxying JPEG/RTP streams. (Don't do this if we're transcoding.) + fClientMediaSubsession.initiate(); + if (verbosityLevel() > 0) { + envir() << "\tInitiated: " << *this << "\n"; + } + + if (fClientMediaSubsession.readSource() != NULL) { + // Add to the front of all data sources a filter that will 'normalize' their frames' presentation times, + // before the frames get re-transmitted by our server: + char const* const codecName = fClientMediaSubsession.codecName(); + FramedFilter* normalizerFilter = sms->fPresentationTimeSessionNormalizer + ->createNewPresentationTimeSubsessionNormalizer(fClientMediaSubsession.readSource(), fClientMediaSubsession.rtpSource(), + codecName); + fClientMediaSubsession.addFilter(normalizerFilter); + + // Some data sources require a 'framer' object to be added, before they can be fed into + // a "RTPSink". Adjust for this now: + if (strcmp(codecName, "H264") == 0) { + fClientMediaSubsession.addFilter(H264VideoStreamDiscreteFramer + ::createNew(envir(), fClientMediaSubsession.readSource())); + } else if (strcmp(codecName, "H265") == 0) { + fClientMediaSubsession.addFilter(H265VideoStreamDiscreteFramer + ::createNew(envir(), fClientMediaSubsession.readSource())); + } else if (strcmp(codecName, "MP4V-ES") == 0) { + fClientMediaSubsession.addFilter(MPEG4VideoStreamDiscreteFramer + ::createNew(envir(), fClientMediaSubsession.readSource(), + True/* leave PTs unmodified*/)); + } else if (strcmp(codecName, "MPV") == 0) { + fClientMediaSubsession.addFilter(MPEG1or2VideoStreamDiscreteFramer + ::createNew(envir(), fClientMediaSubsession.readSource(), + False, 5.0, True/* leave PTs unmodified*/)); + } else if (strcmp(codecName, "DV") == 0) { + fClientMediaSubsession.addFilter(DVVideoStreamFramer + ::createNew(envir(), fClientMediaSubsession.readSource(), + False, True/* leave PTs unmodified*/)); + } + } + + if (fClientMediaSubsession.rtcpInstance() != NULL) { + fClientMediaSubsession.rtcpInstance()->setByeHandler(subsessionByeHandler, this); + } + } + + ProxyRTSPClient* const proxyRTSPClient = sms->fProxyRTSPClient; + if (clientSessionId != 0) { + // We're being called as a result of implementing a RTSP "SETUP". + if (!fHaveSetupStream) { + // This is our first "SETUP". Send RTSP "SETUP" and later "PLAY" commands to the proxied server, to start streaming: + // (Before sending "SETUP", enqueue ourselves on the "RTSPClient"s 'SETUP queue', so we'll be able to get the correct + // "ProxyServerMediaSubsession" to handle the response. (Note that responses come back in the same order as requests.)) + Boolean queueWasEmpty = proxyRTSPClient->fSetupQueueHead == NULL; + if (queueWasEmpty) { + proxyRTSPClient->fSetupQueueHead = this; + } else { + proxyRTSPClient->fSetupQueueTail->fNext = this; + } + proxyRTSPClient->fSetupQueueTail = this; + + // Hack: If there's already a pending "SETUP" request (for another track), don't send this track's "SETUP" right away, because + // the server might not properly handle 'pipelined' requests. Instead, wait until after previous "SETUP" responses come back. + if (queueWasEmpty) { + proxyRTSPClient->sendSetupCommand(fClientMediaSubsession, ::continueAfterSETUP, + False, proxyRTSPClient->fStreamRTPOverTCP, False, proxyRTSPClient->auth()); + ++proxyRTSPClient->fNumSetupsDone; + fHaveSetupStream = True; + } + } else { + // This is a "SETUP" from a new client. We know that there are no other currently active clients (otherwise we wouldn't + // have been called here), so we know that the substream was previously "PAUSE"d. Send "PLAY" downstream once again, + // to resume the stream: + if (!proxyRTSPClient->fLastCommandWasPLAY) { // so that we send only one "PLAY"; not one for each subsession + proxyRTSPClient->sendPlayCommand(fClientMediaSubsession.parentSession(), NULL, -1.0f/*resume from previous point*/, + -1.0f, 1.0f, proxyRTSPClient->auth()); + proxyRTSPClient->fLastCommandWasPLAY = True; + } + } + } + + estBitrate = fClientMediaSubsession.bandwidth(); + if (estBitrate == 0) estBitrate = 50; // kbps, estimate + return fClientMediaSubsession.readSource(); +} + +void ProxyServerMediaSubsession::closeStreamSource(FramedSource* inputSource) { + if (verbosityLevel() > 0) { + envir() << *this << "::closeStreamSource()\n"; + } + // Because there's only one input source for this 'subsession' (regardless of how many downstream clients are proxying it), + // we don't close the input source here. (Instead, we wait until *this* object gets deleted.) + // However, because (as evidenced by this function having been called) we no longer have any clients accessing the stream, + // then we "PAUSE" the downstream proxied stream, until a new client arrives: + if (fHaveSetupStream) { + ProxyServerMediaSession* const sms = (ProxyServerMediaSession*)fParentSession; + ProxyRTSPClient* const proxyRTSPClient = sms->fProxyRTSPClient; + if (proxyRTSPClient->fLastCommandWasPLAY) { // so that we send only one "PAUSE"; not one for each subsession + proxyRTSPClient->sendPauseCommand(fClientMediaSubsession.parentSession(), NULL, proxyRTSPClient->auth()); + proxyRTSPClient->fLastCommandWasPLAY = False; + } + } +} + +RTPSink* ProxyServerMediaSubsession +::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource) { + if (verbosityLevel() > 0) { + envir() << *this << "::createNewRTPSink()\n"; + } + + // Create (and return) the appropriate "RTPSink" object for our codec: + RTPSink* newSink; + char const* const codecName = fClientMediaSubsession.codecName(); + if (strcmp(codecName, "AC3") == 0 || strcmp(codecName, "EAC3") == 0) { + newSink = AC3AudioRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + fClientMediaSubsession.rtpTimestampFrequency()); +#if 0 // This code does not work; do *not* enable it: + } else if (strcmp(codecName, "AMR") == 0 || strcmp(codecName, "AMR-WB") == 0) { + Boolean isWideband = strcmp(codecName, "AMR-WB") == 0; + newSink = AMRAudioRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + isWideband, fClientMediaSubsession.numChannels()); +#endif + } else if (strcmp(codecName, "DV") == 0) { + newSink = DVVideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); + } else if (strcmp(codecName, "GSM") == 0) { + newSink = GSMAudioRTPSink::createNew(envir(), rtpGroupsock); + } else if (strcmp(codecName, "H263-1998") == 0 || strcmp(codecName, "H263-2000") == 0) { + newSink = H263plusVideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + fClientMediaSubsession.rtpTimestampFrequency()); + } else if (strcmp(codecName, "H264") == 0) { + newSink = H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + fClientMediaSubsession.fmtp_spropparametersets()); + } else if (strcmp(codecName, "H265") == 0) { + newSink = H265VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + fClientMediaSubsession.fmtp_spropvps(), + fClientMediaSubsession.fmtp_spropsps(), + fClientMediaSubsession.fmtp_sproppps()); + } else if (strcmp(codecName, "JPEG") == 0) { + newSink = SimpleRTPSink::createNew(envir(), rtpGroupsock, 26, 90000, "video", "JPEG", + 1/*numChannels*/, False/*allowMultipleFramesPerPacket*/, False/*doNormalMBitRule*/); + } else if (strcmp(codecName, "MP4A-LATM") == 0) { + newSink = MPEG4LATMAudioRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + fClientMediaSubsession.rtpTimestampFrequency(), + fClientMediaSubsession.fmtp_config(), + fClientMediaSubsession.numChannels()); + } else if (strcmp(codecName, "MP4V-ES") == 0) { + newSink = MPEG4ESVideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + fClientMediaSubsession.rtpTimestampFrequency(), + fClientMediaSubsession.attrVal_unsigned("profile-level-id"), + fClientMediaSubsession.fmtp_config()); + } else if (strcmp(codecName, "MPA") == 0) { + newSink = MPEG1or2AudioRTPSink::createNew(envir(), rtpGroupsock); + } else if (strcmp(codecName, "MPA-ROBUST") == 0) { + newSink = MP3ADURTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); + } else if (strcmp(codecName, "MPEG4-GENERIC") == 0) { + newSink = MPEG4GenericRTPSink::createNew(envir(), rtpGroupsock, + rtpPayloadTypeIfDynamic, fClientMediaSubsession.rtpTimestampFrequency(), + fClientMediaSubsession.mediumName(), + fClientMediaSubsession.attrVal_strToLower("mode"), + fClientMediaSubsession.fmtp_config(), fClientMediaSubsession.numChannels()); + } else if (strcmp(codecName, "MPV") == 0) { + newSink = MPEG1or2VideoRTPSink::createNew(envir(), rtpGroupsock); + } else if (strcmp(codecName, "OPUS") == 0) { + newSink = SimpleRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + 48000, "audio", "OPUS", 2, False/*only 1 Opus 'packet' in each RTP packet*/); + } else if (strcmp(codecName, "T140") == 0) { + newSink = T140TextRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); + } else if (strcmp(codecName, "THEORA") == 0) { + newSink = TheoraVideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + fClientMediaSubsession.fmtp_config()); + } else if (strcmp(codecName, "VORBIS") == 0) { + newSink = VorbisAudioRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, + fClientMediaSubsession.rtpTimestampFrequency(), fClientMediaSubsession.numChannels(), + fClientMediaSubsession.fmtp_config()); + } else if (strcmp(codecName, "VP8") == 0) { + newSink = VP8VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); + } else if (strcmp(codecName, "AMR") == 0 || strcmp(codecName, "AMR-WB") == 0) { + // Proxying of these codecs is currently *not* supported, because the data received by the "RTPSource" object is not in a + // form that can be fed directly into a corresponding "RTPSink" object. + if (verbosityLevel() > 0) { + envir() << "\treturns NULL (because we currently don't support the proxying of \"" + << fClientMediaSubsession.mediumName() << "/" << codecName << "\" streams)\n"; + } + return NULL; + } else if (strcmp(codecName, "QCELP") == 0 || + strcmp(codecName, "H261") == 0 || + strcmp(codecName, "H263-1998") == 0 || strcmp(codecName, "H263-2000") == 0 || + strcmp(codecName, "X-QT") == 0 || strcmp(codecName, "X-QUICKTIME") == 0) { + // This codec requires a specialized RTP payload format; however, we don't yet have an appropriate "RTPSink" subclass for it: + if (verbosityLevel() > 0) { + envir() << "\treturns NULL (because we don't have a \"RTPSink\" subclass for this RTP payload format)\n"; + } + return NULL; + } else { + // This codec is assumed to have a simple RTP payload format that can be implemented just with a "SimpleRTPSink": + Boolean allowMultipleFramesPerPacket = True; // by default + Boolean doNormalMBitRule = True; // by default + // Some codecs change the above default parameters: + if (strcmp(codecName, "MP2T") == 0) { + doNormalMBitRule = False; // no RTP 'M' bit + } + newSink = SimpleRTPSink::createNew(envir(), rtpGroupsock, + rtpPayloadTypeIfDynamic, fClientMediaSubsession.rtpTimestampFrequency(), + fClientMediaSubsession.mediumName(), fClientMediaSubsession.codecName(), + fClientMediaSubsession.numChannels(), allowMultipleFramesPerPacket, doNormalMBitRule); + } + + // Because our relayed frames' presentation times are inaccurate until the input frames have been RTCP-synchronized, + // we temporarily disable RTCP "SR" reports for this "RTPSink" object: + newSink->enableRTCPReports() = False; + + // Also tell our "PresentationTimeSubsessionNormalizer" object about the "RTPSink", so it can enable RTCP "SR" reports later: + PresentationTimeSubsessionNormalizer* ssNormalizer; + if (strcmp(codecName, "H264") == 0 || + strcmp(codecName, "H265") == 0 || + strcmp(codecName, "MP4V-ES") == 0 || + strcmp(codecName, "MPV") == 0 || + strcmp(codecName, "DV") == 0) { + // There was a separate 'framer' object in front of the "PresentationTimeSubsessionNormalizer", so go back one object to get it: + ssNormalizer = (PresentationTimeSubsessionNormalizer*)(((FramedFilter*)inputSource)->inputSource()); + } else { + ssNormalizer = (PresentationTimeSubsessionNormalizer*)inputSource; + } + ssNormalizer->setRTPSink(newSink); + + return newSink; +} + +void ProxyServerMediaSubsession::subsessionByeHandler(void* clientData) { + ((ProxyServerMediaSubsession*)clientData)->subsessionByeHandler(); +} + +void ProxyServerMediaSubsession::subsessionByeHandler() { + if (verbosityLevel() > 0) { + envir() << *this << ": received RTCP \"BYE\". (The back-end stream has ended.)\n"; + } + + // This "BYE" signals that our input source has (effectively) closed, so pass this onto the front-end clients: + fHaveSetupStream = False; // hack to stop "PAUSE" getting sent by: + fClientMediaSubsession.readSource()->handleClosure(); + + // And then treat this as if we had lost connection to the back-end server, + // and can reestablish streaming from it only by sending another "DESCRIBE": + ProxyServerMediaSession* const sms = (ProxyServerMediaSession*)fParentSession; + ProxyRTSPClient* const proxyRTSPClient = sms->fProxyRTSPClient; + proxyRTSPClient->continueAfterLivenessCommand(1/*hack*/, proxyRTSPClient->fServerSupportsGetParameter); +} + + +////////// PresentationTimeSessionNormalizer and PresentationTimeSubsessionNormalizer implementations ////////// + +// PresentationTimeSessionNormalizer: + +PresentationTimeSessionNormalizer::PresentationTimeSessionNormalizer(UsageEnvironment& env) + : Medium(env), + fSubsessionNormalizers(NULL), fMasterSSNormalizer(NULL) { +} + +PresentationTimeSessionNormalizer::~PresentationTimeSessionNormalizer() { + while (fSubsessionNormalizers != NULL) { + delete fSubsessionNormalizers; + } +} + +PresentationTimeSubsessionNormalizer* +PresentationTimeSessionNormalizer::createNewPresentationTimeSubsessionNormalizer(FramedSource* inputSource, RTPSource* rtpSource, + char const* codecName) { + fSubsessionNormalizers + = new PresentationTimeSubsessionNormalizer(*this, inputSource, rtpSource, codecName, fSubsessionNormalizers); + return fSubsessionNormalizers; +} + +void PresentationTimeSessionNormalizer::normalizePresentationTime(PresentationTimeSubsessionNormalizer* ssNormalizer, + struct timeval& toPT, struct timeval const& fromPT) { + Boolean const hasBeenSynced = ssNormalizer->fRTPSource->hasBeenSynchronizedUsingRTCP(); + + if (!hasBeenSynced) { + // If "fromPT" has not yet been RTCP-synchronized, then it was generated by our own receiving code, and thus + // is already aligned with 'wall-clock' time. Just copy it 'as is' to "toPT": + toPT = fromPT; + } else { + if (fMasterSSNormalizer == NULL) { + // Make "ssNormalizer" the 'master' subsession - meaning that its presentation time is adjusted to align with 'wall clock' + // time, and the presentation times of other subsessions (if any) are adjusted to retain their relative separation with + // those of the master: + fMasterSSNormalizer = ssNormalizer; + + struct timeval timeNow; + gettimeofday(&timeNow, NULL); + + // Compute: fPTAdjustment = timeNow - fromPT + fPTAdjustment.tv_sec = timeNow.tv_sec - fromPT.tv_sec; + fPTAdjustment.tv_usec = timeNow.tv_usec - fromPT.tv_usec; + // Note: It's OK if one or both of these fields underflows; the result still works out OK later. + } + + // Compute a normalized presentation time: toPT = fromPT + fPTAdjustment + toPT.tv_sec = fromPT.tv_sec + fPTAdjustment.tv_sec - 1; + toPT.tv_usec = fromPT.tv_usec + fPTAdjustment.tv_usec + MILLION; + while (toPT.tv_usec > MILLION) { ++toPT.tv_sec; toPT.tv_usec -= MILLION; } + + // Because "ssNormalizer"s relayed presentation times are accurate from now on, enable RTCP "SR" reports for its "RTPSink": + RTPSink* const rtpSink = ssNormalizer->fRTPSink; + if (rtpSink != NULL) { // sanity check; should always be true + rtpSink->enableRTCPReports() = True; + } + } +} + +void PresentationTimeSessionNormalizer +::removePresentationTimeSubsessionNormalizer(PresentationTimeSubsessionNormalizer* ssNormalizer) { + // Unlink "ssNormalizer" from the linked list (starting with "fSubsessionNormalizers"): + if (fSubsessionNormalizers == ssNormalizer) { + fSubsessionNormalizers = fSubsessionNormalizers->fNext; + } else { + PresentationTimeSubsessionNormalizer** ssPtrPtr = &(fSubsessionNormalizers->fNext); + while (*ssPtrPtr != ssNormalizer) ssPtrPtr = &((*ssPtrPtr)->fNext); + *ssPtrPtr = (*ssPtrPtr)->fNext; + } +} + +// PresentationTimeSubsessionNormalizer: + +PresentationTimeSubsessionNormalizer +::PresentationTimeSubsessionNormalizer(PresentationTimeSessionNormalizer& parent, FramedSource* inputSource, RTPSource* rtpSource, + char const* codecName, PresentationTimeSubsessionNormalizer* next) + : FramedFilter(parent.envir(), inputSource), + fParent(parent), fRTPSource(rtpSource), fRTPSink(NULL), fCodecName(codecName), fNext(next) { +} + +PresentationTimeSubsessionNormalizer::~PresentationTimeSubsessionNormalizer() { + fParent.removePresentationTimeSubsessionNormalizer(this); +} + +void PresentationTimeSubsessionNormalizer::afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + ((PresentationTimeSubsessionNormalizer*)clientData) + ->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds); +} + +void PresentationTimeSubsessionNormalizer::afterGettingFrame(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds) { + // This filter is implemented by passing all frames through unchanged, except that "fPresentationTime" is changed: + fFrameSize = frameSize; + fNumTruncatedBytes = numTruncatedBytes; + fDurationInMicroseconds = durationInMicroseconds; + + fParent.normalizePresentationTime(this, fPresentationTime, presentationTime); + + // Hack for JPEG/RTP proxying. Because we're proxying JPEG by just copying the raw JPEG/RTP payloads, without interpreting them, + // we need to also 'copy' the RTP 'M' (marker) bit from the "RTPSource" to the "RTPSink": + if (fRTPSource->curPacketMarkerBit() && strcmp(fCodecName, "JPEG") == 0) ((SimpleRTPSink*)fRTPSink)->setMBitOnNextPacket(); + + // Complete delivery: + FramedSource::afterGetting(this); +} + +void PresentationTimeSubsessionNormalizer::doGetNextFrame() { + fInputSource->getNextFrame(fTo, fMaxSize, afterGettingFrame, this, FramedSource::handleClosure, this); +} diff --git a/AnyCore/lib_rtsp/liveMedia/QCELPAudioRTPSource.cpp b/AnyCore/lib_rtsp/liveMedia/QCELPAudioRTPSource.cpp new file mode 100644 index 0000000..051d298 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/QCELPAudioRTPSource.cpp @@ -0,0 +1,504 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Qualcomm "PureVoice" (aka. "QCELP") Audio RTP Sources +// Implementation + +#include "QCELPAudioRTPSource.hh" +#include "MultiFramedRTPSource.hh" +#include "FramedFilter.hh" +#include +#include + +// This source is implemented internally by two separate sources: +// (i) a RTP source for the raw (interleaved) QCELP frames, and +// (ii) a deinterleaving filter that reads from this. +// Define these two new classes here: + +class RawQCELPRTPSource: public MultiFramedRTPSource { +public: + static RawQCELPRTPSource* createNew(UsageEnvironment& env, + Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + + unsigned char interleaveL() const { return fInterleaveL; } + unsigned char interleaveN() const { return fInterleaveN; } + unsigned char& frameIndex() { return fFrameIndex; } // index within pkt + +private: + RawQCELPRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + // called only by createNew() + + virtual ~RawQCELPRTPSource(); + +private: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual char const* MIMEtype() const; + + virtual Boolean hasBeenSynchronizedUsingRTCP(); + +private: + unsigned char fInterleaveL, fInterleaveN, fFrameIndex; + unsigned fNumSuccessiveSyncedPackets; +}; + +class QCELPDeinterleaver: public FramedFilter { +public: + static QCELPDeinterleaver* createNew(UsageEnvironment& env, + RawQCELPRTPSource* inputSource); + +private: + QCELPDeinterleaver(UsageEnvironment& env, + RawQCELPRTPSource* inputSource); + // called only by "createNew()" + + virtual ~QCELPDeinterleaver(); + + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize, struct timeval presentationTime); + +private: + // Redefined virtual functions: + void doGetNextFrame(); + virtual void doStopGettingFrames(); + +private: + class QCELPDeinterleavingBuffer* fDeinterleavingBuffer; + Boolean fNeedAFrame; +}; + + +////////// QCELPAudioRTPSource implementation ////////// + +FramedSource* +QCELPAudioRTPSource::createNew(UsageEnvironment& env, + Groupsock* RTPgs, + RTPSource*& resultRTPSource, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) { + RawQCELPRTPSource* rawRTPSource; + resultRTPSource = rawRTPSource + = RawQCELPRTPSource::createNew(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency); + if (resultRTPSource == NULL) return NULL; + + QCELPDeinterleaver* deinterleaver + = QCELPDeinterleaver::createNew(env, rawRTPSource); + if (deinterleaver == NULL) { + Medium::close(resultRTPSource); + resultRTPSource = NULL; + } + + return deinterleaver; +} + + +////////// QCELPBufferedPacket and QCELPBufferedPacketFactory ////////// + +// A subclass of BufferedPacket, used to separate out QCELP frames. + +class QCELPBufferedPacket: public BufferedPacket { +public: + QCELPBufferedPacket(RawQCELPRTPSource& ourSource); + virtual ~QCELPBufferedPacket(); + +private: // redefined virtual functions + virtual unsigned nextEnclosedFrameSize(unsigned char*& framePtr, + unsigned dataSize); +private: + RawQCELPRTPSource& fOurSource; +}; + +class QCELPBufferedPacketFactory: public BufferedPacketFactory { +private: // redefined virtual functions + virtual BufferedPacket* createNewPacket(MultiFramedRTPSource* ourSource); +}; + + +///////// RawQCELPRTPSource implementation //////// + +RawQCELPRTPSource* +RawQCELPRTPSource::createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) { + return new RawQCELPRTPSource(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency); +} + +RawQCELPRTPSource::RawQCELPRTPSource(UsageEnvironment& env, + Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency) + : MultiFramedRTPSource(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency, + new QCELPBufferedPacketFactory), + fInterleaveL(0), fInterleaveN(0), fFrameIndex(0), + fNumSuccessiveSyncedPackets(0) { +} + +RawQCELPRTPSource::~RawQCELPRTPSource() { +} + +Boolean RawQCELPRTPSource +::processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize) { + unsigned char* headerStart = packet->data(); + unsigned packetSize = packet->dataSize(); + + // First, check whether this packet's RTP timestamp is synchronized: + if (RTPSource::hasBeenSynchronizedUsingRTCP()) { + ++fNumSuccessiveSyncedPackets; + } else { + fNumSuccessiveSyncedPackets = 0; + } + + // There's a 1-byte header indicating the interleave parameters + if (packetSize < 1) return False; + + // Get the interleaving parameters from the 1-byte header, + // and check them for validity: + unsigned char const firstByte = headerStart[0]; + unsigned char const interleaveL = (firstByte&0x38)>>3; + unsigned char const interleaveN = firstByte&0x07; +#ifdef DEBUG + fprintf(stderr, "packetSize: %d, interleaveL: %d, interleaveN: %d\n", packetSize, interleaveL, interleaveN); +#endif + if (interleaveL > 5 || interleaveN > interleaveL) return False; //invalid + + fInterleaveL = interleaveL; + fInterleaveN = interleaveN; + fFrameIndex = 0; // initially + + resultSpecialHeaderSize = 1; + return True; +} + +char const* RawQCELPRTPSource::MIMEtype() const { + return "audio/QCELP"; +} + +Boolean RawQCELPRTPSource::hasBeenSynchronizedUsingRTCP() { + // Don't report ourselves as being synchronized until we've received + // at least a complete interleave cycle of synchronized packets. + // This ensures that the receiver is currently getting a frame from + // a packet that was synchronized. + if (fNumSuccessiveSyncedPackets > (unsigned)(fInterleaveL+1)) { + fNumSuccessiveSyncedPackets = fInterleaveL+2; // prevents overflow + return True; + } + return False; +} + + +///// QCELPBufferedPacket and QCELPBufferedPacketFactory implementation + +QCELPBufferedPacket::QCELPBufferedPacket(RawQCELPRTPSource& ourSource) + : fOurSource(ourSource) { +} + +QCELPBufferedPacket::~QCELPBufferedPacket() { +} + +unsigned QCELPBufferedPacket:: + nextEnclosedFrameSize(unsigned char*& framePtr, unsigned dataSize) { + // The size of the QCELP frame is determined by the first byte: + if (dataSize == 0) return 0; // sanity check + unsigned char const firstByte = framePtr[0]; + + unsigned frameSize; + switch (firstByte) { + case 0: { frameSize = 1; break; } + case 1: { frameSize = 4; break; } + case 2: { frameSize = 8; break; } + case 3: { frameSize = 17; break; } + case 4: { frameSize = 35; break; } + default: { frameSize = 0; break; } + } + +#ifdef DEBUG + fprintf(stderr, "QCELPBufferedPacket::nextEnclosedFrameSize(): frameSize: %d, dataSize: %d\n", frameSize, dataSize); +#endif + if (dataSize < frameSize) return 0; + + ++fOurSource.frameIndex(); + return frameSize; +} + +BufferedPacket* QCELPBufferedPacketFactory +::createNewPacket(MultiFramedRTPSource* ourSource) { + return new QCELPBufferedPacket((RawQCELPRTPSource&)(*ourSource)); +} + +///////// QCELPDeinterleavingBuffer ///////// +// (used to implement QCELPDeinterleaver) + +#define QCELP_MAX_FRAME_SIZE 35 +#define QCELP_MAX_INTERLEAVE_L 5 +#define QCELP_MAX_FRAMES_PER_PACKET 10 +#define QCELP_MAX_INTERLEAVE_GROUP_SIZE \ + ((QCELP_MAX_INTERLEAVE_L+1)*QCELP_MAX_FRAMES_PER_PACKET) + +class QCELPDeinterleavingBuffer { +public: + QCELPDeinterleavingBuffer(); + virtual ~QCELPDeinterleavingBuffer(); + + void deliverIncomingFrame(unsigned frameSize, + unsigned char interleaveL, + unsigned char interleaveN, + unsigned char frameIndex, + unsigned short packetSeqNum, + struct timeval presentationTime); + Boolean retrieveFrame(unsigned char* to, unsigned maxSize, + unsigned& resultFrameSize, unsigned& resultNumTruncatedBytes, + struct timeval& resultPresentationTime); + + unsigned char* inputBuffer() { return fInputBuffer; } + unsigned inputBufferSize() const { return QCELP_MAX_FRAME_SIZE; } + +private: + class FrameDescriptor { + public: + FrameDescriptor(); + virtual ~FrameDescriptor(); + + unsigned frameSize; + unsigned char* frameData; + struct timeval presentationTime; + }; + + // Use two banks of descriptors - one for incoming, one for outgoing + FrameDescriptor fFrames[QCELP_MAX_INTERLEAVE_GROUP_SIZE][2]; + unsigned char fIncomingBankId; // toggles between 0 and 1 + unsigned char fIncomingBinMax; // in the incoming bank + unsigned char fOutgoingBinMax; // in the outgoing bank + unsigned char fNextOutgoingBin; + Boolean fHaveSeenPackets; + u_int16_t fLastPacketSeqNumForGroup; + unsigned char* fInputBuffer; + struct timeval fLastRetrievedPresentationTime; +}; + + +////////// QCELPDeinterleaver implementation ///////// + +QCELPDeinterleaver* +QCELPDeinterleaver::createNew(UsageEnvironment& env, + RawQCELPRTPSource* inputSource) { + return new QCELPDeinterleaver(env, inputSource); +} + +QCELPDeinterleaver::QCELPDeinterleaver(UsageEnvironment& env, + RawQCELPRTPSource* inputSource) + : FramedFilter(env, inputSource), + fNeedAFrame(False) { + fDeinterleavingBuffer = new QCELPDeinterleavingBuffer(); +} + +QCELPDeinterleaver::~QCELPDeinterleaver() { + delete fDeinterleavingBuffer; +} + +static unsigned const uSecsPerFrame = 20000; // 20 ms + +void QCELPDeinterleaver::doGetNextFrame() { + // First, try getting a frame from the deinterleaving buffer: + if (fDeinterleavingBuffer->retrieveFrame(fTo, fMaxSize, + fFrameSize, fNumTruncatedBytes, + fPresentationTime)) { + // Success! + fNeedAFrame = False; + + fDurationInMicroseconds = uSecsPerFrame; + + // Call our own 'after getting' function. Because we're not a 'leaf' + // source, we can call this directly, without risking + // infinite recursion + afterGetting(this); + return; + } + + // No luck, so ask our source for help: + fNeedAFrame = True; + if (!fInputSource->isCurrentlyAwaitingData()) { + fInputSource->getNextFrame(fDeinterleavingBuffer->inputBuffer(), + fDeinterleavingBuffer->inputBufferSize(), + afterGettingFrame, this, + FramedSource::handleClosure, this); + } +} + +void QCELPDeinterleaver::doStopGettingFrames() { + fNeedAFrame = False; + fInputSource->stopGettingFrames(); +} + +void QCELPDeinterleaver +::afterGettingFrame(void* clientData, unsigned frameSize, + unsigned /*numTruncatedBytes*/, + struct timeval presentationTime, + unsigned /*durationInMicroseconds*/) { + QCELPDeinterleaver* deinterleaver = (QCELPDeinterleaver*)clientData; + deinterleaver->afterGettingFrame1(frameSize, presentationTime); +} + +void QCELPDeinterleaver +::afterGettingFrame1(unsigned frameSize, struct timeval presentationTime) { + RawQCELPRTPSource* source = (RawQCELPRTPSource*)fInputSource; + + // First, put the frame into our deinterleaving buffer: + fDeinterleavingBuffer + ->deliverIncomingFrame(frameSize, source->interleaveL(), + source->interleaveN(), source->frameIndex(), + source->curPacketRTPSeqNum(), + presentationTime); + + // Then, try delivering a frame to the client (if he wants one): + if (fNeedAFrame) doGetNextFrame(); +} + + +////////// QCELPDeinterleavingBuffer implementation ///////// + +QCELPDeinterleavingBuffer::QCELPDeinterleavingBuffer() + : fIncomingBankId(0), fIncomingBinMax(0), + fOutgoingBinMax(0), fNextOutgoingBin(0), + fHaveSeenPackets(False) { + fInputBuffer = new unsigned char[QCELP_MAX_FRAME_SIZE]; +} + +QCELPDeinterleavingBuffer::~QCELPDeinterleavingBuffer() { + delete[] fInputBuffer; +} + +void QCELPDeinterleavingBuffer +::deliverIncomingFrame(unsigned frameSize, + unsigned char interleaveL, + unsigned char interleaveN, + unsigned char frameIndex, + unsigned short packetSeqNum, + struct timeval presentationTime) { + // First perform a sanity check on the parameters: + // (This is overkill, as the source should have already done this.) + if (frameSize > QCELP_MAX_FRAME_SIZE + || interleaveL > QCELP_MAX_INTERLEAVE_L || interleaveN > interleaveL + || frameIndex == 0 || frameIndex > QCELP_MAX_FRAMES_PER_PACKET) { +#ifdef DEBUG + fprintf(stderr, "QCELPDeinterleavingBuffer::deliverIncomingFrame() param sanity check failed (%d,%d,%d,%d)\n", frameSize, interleaveL, interleaveN, frameIndex); +#endif + return; + } + + // The input "presentationTime" was that of the first frame in this + // packet. Update it for the current frame: + unsigned uSecIncrement = (frameIndex-1)*(interleaveL+1)*uSecsPerFrame; + presentationTime.tv_usec += uSecIncrement; + presentationTime.tv_sec += presentationTime.tv_usec/1000000; + presentationTime.tv_usec = presentationTime.tv_usec%1000000; + + // Next, check whether this packet is part of a new interleave group + if (!fHaveSeenPackets + || seqNumLT(fLastPacketSeqNumForGroup, packetSeqNum)) { + // We've moved to a new interleave group + fHaveSeenPackets = True; + fLastPacketSeqNumForGroup = packetSeqNum + interleaveL - interleaveN; + + // Switch the incoming and outgoing banks: + fIncomingBankId ^= 1; + unsigned char tmp = fIncomingBinMax; + fIncomingBinMax = fOutgoingBinMax; + fOutgoingBinMax = tmp; + fNextOutgoingBin = 0; + } + + // Now move the incoming frame into the appropriate bin: + unsigned const binNumber + = interleaveN + (frameIndex-1)*(interleaveL+1); + FrameDescriptor& inBin = fFrames[binNumber][fIncomingBankId]; + unsigned char* curBuffer = inBin.frameData; + inBin.frameData = fInputBuffer; + inBin.frameSize = frameSize; + inBin.presentationTime = presentationTime; + + if (curBuffer == NULL) curBuffer = new unsigned char[QCELP_MAX_FRAME_SIZE]; + fInputBuffer = curBuffer; + + if (binNumber >= fIncomingBinMax) { + fIncomingBinMax = binNumber + 1; + } +} + +Boolean QCELPDeinterleavingBuffer +::retrieveFrame(unsigned char* to, unsigned maxSize, + unsigned& resultFrameSize, unsigned& resultNumTruncatedBytes, + struct timeval& resultPresentationTime) { + if (fNextOutgoingBin >= fOutgoingBinMax) return False; // none left + + FrameDescriptor& outBin = fFrames[fNextOutgoingBin][fIncomingBankId^1]; + unsigned char* fromPtr; + unsigned char fromSize = outBin.frameSize; + outBin.frameSize = 0; // for the next time this bin is used + + // Check whether this frame is missing; if so, return an 'erasure' frame: + unsigned char erasure = 14; + if (fromSize == 0) { + fromPtr = &erasure; + fromSize = 1; + + // Compute this erasure frame's presentation time via extrapolation: + resultPresentationTime = fLastRetrievedPresentationTime; + resultPresentationTime.tv_usec += uSecsPerFrame; + if (resultPresentationTime.tv_usec >= 1000000) { + ++resultPresentationTime.tv_sec; + resultPresentationTime.tv_usec -= 1000000; + } + } else { + // Normal case - a frame exists: + fromPtr = outBin.frameData; + resultPresentationTime = outBin.presentationTime; + } + + fLastRetrievedPresentationTime = resultPresentationTime; + + if (fromSize > maxSize) { + resultNumTruncatedBytes = fromSize - maxSize; + resultFrameSize = maxSize; + } else { + resultNumTruncatedBytes = 0; + resultFrameSize = fromSize; + } + memmove(to, fromPtr, resultFrameSize); + + ++fNextOutgoingBin; + return True; +} + +QCELPDeinterleavingBuffer::FrameDescriptor::FrameDescriptor() + : frameSize(0), frameData(NULL) { +} + +QCELPDeinterleavingBuffer::FrameDescriptor::~FrameDescriptor() { + delete[] frameData; +} diff --git a/AnyCore/lib_rtsp/liveMedia/QuickTimeFileSink.cpp b/AnyCore/lib_rtsp/liveMedia/QuickTimeFileSink.cpp new file mode 100644 index 0000000..e564ec8 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/QuickTimeFileSink.cpp @@ -0,0 +1,2317 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A sink that generates a QuickTime file from a composite media session +// Implementation + +#include "QuickTimeFileSink.hh" +#include "QuickTimeGenericRTPSource.hh" +#include "GroupsockHelper.hh" +#include "InputFile.hh" +#include "OutputFile.hh" +#include "H263plusVideoRTPSource.hh" // for the special header +#include "MPEG4GenericRTPSource.hh" //for "samplingFrequencyFromAudioSpecificConfig()" +#include "MPEG4LATMAudioRTPSource.hh" // for "parseGeneralConfigStr()" +#include "Base64.hh" + +#include + +#define fourChar(x,y,z,w) ( ((x)<<24)|((y)<<16)|((z)<<8)|(w) ) + +#define H264_IDR_FRAME 0x65 //bit 8 == 0, bits 7-6 (ref) == 3, bits 5-0 (type) == 5 + +////////// SubsessionIOState, ChunkDescriptor /////////// +// A structure used to represent the I/O state of each input 'subsession': + +class ChunkDescriptor { +public: + ChunkDescriptor(int64_t offsetInFile, unsigned size, + unsigned frameSize, unsigned frameDuration, + struct timeval presentationTime); + + ChunkDescriptor* extendChunk(int64_t newOffsetInFile, unsigned newSize, + unsigned newFrameSize, + unsigned newFrameDuration, + struct timeval newPresentationTime); + // this may end up allocating a new chunk instead +public: + ChunkDescriptor* fNextChunk; + int64_t fOffsetInFile; + unsigned fNumFrames; + unsigned fFrameSize; + unsigned fFrameDuration; + struct timeval fPresentationTime; // of the start of the data +}; + +class SubsessionBuffer { +public: + SubsessionBuffer(unsigned bufferSize) + : fBufferSize(bufferSize) { + reset(); + fData = new unsigned char[bufferSize]; + } + virtual ~SubsessionBuffer() { delete[] fData; } + void reset() { fBytesInUse = 0; } + void addBytes(unsigned numBytes) { fBytesInUse += numBytes; } + + unsigned char* dataStart() { return &fData[0]; } + unsigned char* dataEnd() { return &fData[fBytesInUse]; } + unsigned bytesInUse() const { return fBytesInUse; } + unsigned bytesAvailable() const { return fBufferSize - fBytesInUse; } + + void setPresentationTime(struct timeval const& presentationTime) { + fPresentationTime = presentationTime; + } + struct timeval const& presentationTime() const {return fPresentationTime;} + +private: + unsigned fBufferSize; + struct timeval fPresentationTime; + unsigned char* fData; + unsigned fBytesInUse; +}; + +class SyncFrame { +public: + SyncFrame(unsigned frameNum); + +public: + class SyncFrame *nextSyncFrame; + unsigned sfFrameNum; +}; + +// A 64-bit counter, used below: +class Count64 { +public: + Count64() + : hi(0), lo(0) { + } + + void operator+=(unsigned arg); + + u_int32_t hi, lo; +}; + +class SubsessionIOState { +public: + SubsessionIOState(QuickTimeFileSink& sink, MediaSubsession& subsession); + virtual ~SubsessionIOState(); + + Boolean setQTstate(); + void setFinalQTstate(); + + void afterGettingFrame(unsigned packetDataSize, + struct timeval presentationTime); + void onSourceClosure(); + + Boolean syncOK(struct timeval presentationTime); + // returns true iff data is usable despite a sync check + + static void setHintTrack(SubsessionIOState* hintedTrack, + SubsessionIOState* hintTrack); + Boolean isHintTrack() const { return fTrackHintedByUs != NULL; } + Boolean hasHintTrack() const { return fHintTrackForUs != NULL; } + + UsageEnvironment& envir() const { return fOurSink.envir(); } + +public: + static unsigned fCurrentTrackNumber; + unsigned fTrackID; + SubsessionIOState* fHintTrackForUs; SubsessionIOState* fTrackHintedByUs; + + SubsessionBuffer *fBuffer, *fPrevBuffer; + QuickTimeFileSink& fOurSink; + MediaSubsession& fOurSubsession; + + unsigned short fLastPacketRTPSeqNum; + Boolean fOurSourceIsActive; + + Boolean fHaveBeenSynced; // used in synchronizing with other streams + struct timeval fSyncTime; + + Boolean fQTEnableTrack; + unsigned fQTcomponentSubtype; + char const* fQTcomponentName; + typedef unsigned (QuickTimeFileSink::*atomCreationFunc)(); + atomCreationFunc fQTMediaInformationAtomCreator; + atomCreationFunc fQTMediaDataAtomCreator; + char const* fQTAudioDataType; + unsigned short fQTSoundSampleVersion; + unsigned fQTTimeScale; + unsigned fQTTimeUnitsPerSample; + unsigned fQTBytesPerFrame; + unsigned fQTSamplesPerFrame; + // These next fields are derived from the ones above, + // plus the information from each chunk: + unsigned fQTTotNumSamples; + unsigned fQTDurationM; // in media time units + unsigned fQTDurationT; // in track time units + int64_t fTKHD_durationPosn; + // position of the duration in the output 'tkhd' atom + unsigned fQTInitialOffsetDuration; + // if there's a pause at the beginning + + ChunkDescriptor *fHeadChunk, *fTailChunk; + unsigned fNumChunks; + SyncFrame *fHeadSyncFrame, *fTailSyncFrame; + + // Counters to be used in the hint track's 'udta'/'hinf' atom; + struct hinf { + Count64 trpy; + Count64 nump; + Count64 tpyl; + // Is 'maxr' needed? Computing this would be a PITA. ##### + Count64 dmed; + Count64 dimm; + // 'drep' is always 0 + // 'tmin' and 'tmax' are always 0 + unsigned pmax; + unsigned dmax; + } fHINF; + +private: + void useFrame(SubsessionBuffer& buffer); + void useFrameForHinting(unsigned frameSize, + struct timeval presentationTime, + unsigned startSampleNumber); + + // used by the above two routines: + unsigned useFrame1(unsigned sourceDataSize, + struct timeval presentationTime, + unsigned frameDuration, int64_t destFileOffset); + // returns the number of samples in this data + +private: + // A structure used for temporarily storing frame state: + struct { + unsigned frameSize; + struct timeval presentationTime; + int64_t destFileOffset; // used for non-hint tracks only + + // The remaining fields are used for hint tracks only: + unsigned startSampleNumber; + unsigned short seqNum; + unsigned rtpHeader; + unsigned char numSpecialHeaders; // used when our RTP source has special headers + unsigned specialHeaderBytesLength; // ditto + unsigned char specialHeaderBytes[SPECIAL_HEADER_BUFFER_SIZE]; // ditto + unsigned packetSizes[256]; + } fPrevFrameState; +}; + + +////////// QuickTimeFileSink implementation ////////// + +QuickTimeFileSink::QuickTimeFileSink(UsageEnvironment& env, + MediaSession& inputSession, + char const* outputFileName, + unsigned bufferSize, + unsigned short movieWidth, + unsigned short movieHeight, + unsigned movieFPS, + Boolean packetLossCompensate, + Boolean syncStreams, + Boolean generateHintTracks, + Boolean generateMP4Format) + : Medium(env), fInputSession(inputSession), + fBufferSize(bufferSize), fPacketLossCompensate(packetLossCompensate), + fSyncStreams(syncStreams), fGenerateMP4Format(generateMP4Format), + fAreCurrentlyBeingPlayed(False), + fLargestRTPtimestampFrequency(0), + fNumSubsessions(0), fNumSyncedSubsessions(0), + fHaveCompletedOutputFile(False), + fMovieWidth(movieWidth), fMovieHeight(movieHeight), + fMovieFPS(movieFPS), fMaxTrackDurationM(0) { + fOutFid = OpenOutputFile(env, outputFileName); + if (fOutFid == NULL) return; + + fNewestSyncTime.tv_sec = fNewestSyncTime.tv_usec = 0; + fFirstDataTime.tv_sec = fFirstDataTime.tv_usec = (unsigned)(~0); + + // Set up I/O state for each input subsession: + MediaSubsessionIterator iter(fInputSession); + MediaSubsession* subsession; + while ((subsession = iter.next()) != NULL) { + // Ignore subsessions without a data source: + FramedSource* subsessionSource = subsession->readSource(); + if (subsessionSource == NULL) continue; + + // If "subsession's" SDP description specified screen dimension + // or frame rate parameters, then use these. (Note that this must + // be done before the call to "setQTState()" below.) + if (subsession->videoWidth() != 0) { + fMovieWidth = subsession->videoWidth(); + } + if (subsession->videoHeight() != 0) { + fMovieHeight = subsession->videoHeight(); + } + if (subsession->videoFPS() != 0) { + fMovieFPS = subsession->videoFPS(); + } + + SubsessionIOState* ioState + = new SubsessionIOState(*this, *subsession); + if (ioState == NULL || !ioState->setQTstate()) { + // We're not able to output a QuickTime track for this subsession + delete ioState; ioState = NULL; + continue; + } + subsession->miscPtr = (void*)ioState; + + if (generateHintTracks) { + // Also create a hint track for this track: + SubsessionIOState* hintTrack + = new SubsessionIOState(*this, *subsession); + SubsessionIOState::setHintTrack(ioState, hintTrack); + if (!hintTrack->setQTstate()) { + delete hintTrack; + SubsessionIOState::setHintTrack(ioState, NULL); + } + } + + // Also set a 'BYE' handler for this subsession's RTCP instance: + if (subsession->rtcpInstance() != NULL) { + subsession->rtcpInstance()->setByeHandler(onRTCPBye, ioState); + } + + unsigned rtpTimestampFrequency = subsession->rtpTimestampFrequency(); + if (rtpTimestampFrequency > fLargestRTPtimestampFrequency) { + fLargestRTPtimestampFrequency = rtpTimestampFrequency; + } + + ++fNumSubsessions; + } + + // Use the current time as the file's creation and modification + // time. Use Apple's time format: seconds since January 1, 1904 + + gettimeofday(&fStartTime, NULL); + fAppleCreationTime = fStartTime.tv_sec - 0x83dac000; + + // Begin by writing a "mdat" atom at the start of the file. + // (Later, when we've finished copying data to the file, we'll come + // back and fill in its size.) + fMDATposition = TellFile64(fOutFid); + addAtomHeader64("mdat"); + // add 64Bit offset + fMDATposition += 8; +} + +QuickTimeFileSink::~QuickTimeFileSink() { + completeOutputFile(); + + // Then, stop streaming and delete each active "SubsessionIOState": + MediaSubsessionIterator iter(fInputSession); + MediaSubsession* subsession; + while ((subsession = iter.next()) != NULL) { + if (subsession->readSource() != NULL) subsession->readSource()->stopGettingFrames(); + + SubsessionIOState* ioState + = (SubsessionIOState*)(subsession->miscPtr); + if (ioState == NULL) continue; + + delete ioState->fHintTrackForUs; // if any + delete ioState; + } + + // Finally, close our output file: + CloseOutputFile(fOutFid); +} + +QuickTimeFileSink* +QuickTimeFileSink::createNew(UsageEnvironment& env, + MediaSession& inputSession, + char const* outputFileName, + unsigned bufferSize, + unsigned short movieWidth, + unsigned short movieHeight, + unsigned movieFPS, + Boolean packetLossCompensate, + Boolean syncStreams, + Boolean generateHintTracks, + Boolean generateMP4Format) { + QuickTimeFileSink* newSink = + new QuickTimeFileSink(env, inputSession, outputFileName, bufferSize, movieWidth, movieHeight, movieFPS, + packetLossCompensate, syncStreams, generateHintTracks, generateMP4Format); + if (newSink == NULL || newSink->fOutFid == NULL) { + Medium::close(newSink); + return NULL; + } + + return newSink; +} + +Boolean QuickTimeFileSink::startPlaying(afterPlayingFunc* afterFunc, + void* afterClientData) { + // Make sure we're not already being played: + if (fAreCurrentlyBeingPlayed) { + envir().setResultMsg("This sink has already been played"); + return False; + } + + fAreCurrentlyBeingPlayed = True; + fAfterFunc = afterFunc; + fAfterClientData = afterClientData; + + return continuePlaying(); +} + +Boolean QuickTimeFileSink::continuePlaying() { + // Run through each of our input session's 'subsessions', + // asking for a frame from each one: + Boolean haveActiveSubsessions = False; + MediaSubsessionIterator iter(fInputSession); + MediaSubsession* subsession; + while ((subsession = iter.next()) != NULL) { + FramedSource* subsessionSource = subsession->readSource(); + if (subsessionSource == NULL) continue; + + if (subsessionSource->isCurrentlyAwaitingData()) continue; + + SubsessionIOState* ioState + = (SubsessionIOState*)(subsession->miscPtr); + if (ioState == NULL) continue; + + haveActiveSubsessions = True; + unsigned char* toPtr = ioState->fBuffer->dataEnd(); + unsigned toSize = ioState->fBuffer->bytesAvailable(); + subsessionSource->getNextFrame(toPtr, toSize, + afterGettingFrame, ioState, + onSourceClosure, ioState); + } + if (!haveActiveSubsessions) { + envir().setResultMsg("No subsessions are currently active"); + return False; + } + + return True; +} + +void QuickTimeFileSink +::afterGettingFrame(void* clientData, unsigned packetDataSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned /*durationInMicroseconds*/) { + SubsessionIOState* ioState = (SubsessionIOState*)clientData; + if (!ioState->syncOK(presentationTime)) { + // Ignore this data: + ioState->fOurSink.continuePlaying(); + return; + } + if (numTruncatedBytes > 0) { + ioState->envir() << "QuickTimeFileSink::afterGettingFrame(): The input frame data was too large for our buffer. " + << numTruncatedBytes + << " bytes of trailing data was dropped! Correct this by increasing the \"bufferSize\" parameter in the \"createNew()\" call.\n"; + } + ioState->afterGettingFrame(packetDataSize, presentationTime); +} + +void QuickTimeFileSink::onSourceClosure(void* clientData) { + SubsessionIOState* ioState = (SubsessionIOState*)clientData; + ioState->onSourceClosure(); +} + +void QuickTimeFileSink::onSourceClosure1() { + // Check whether *all* of the subsession sources have closed. + // If not, do nothing for now: + MediaSubsessionIterator iter(fInputSession); + MediaSubsession* subsession; + while ((subsession = iter.next()) != NULL) { + SubsessionIOState* ioState + = (SubsessionIOState*)(subsession->miscPtr); + if (ioState == NULL) continue; + + if (ioState->fOurSourceIsActive) return; // this source hasn't closed + } + + completeOutputFile(); + + // Call our specified 'after' function: + if (fAfterFunc != NULL) { + (*fAfterFunc)(fAfterClientData); + } +} + +void QuickTimeFileSink::onRTCPBye(void* clientData) { + SubsessionIOState* ioState = (SubsessionIOState*)clientData; + + struct timeval timeNow; + gettimeofday(&timeNow, NULL); + unsigned secsDiff + = timeNow.tv_sec - ioState->fOurSink.fStartTime.tv_sec; + + MediaSubsession& subsession = ioState->fOurSubsession; + ioState->envir() << "Received RTCP \"BYE\" on \"" + << subsession.mediumName() + << "/" << subsession.codecName() + << "\" subsession (after " + << secsDiff << " seconds)\n"; + + // Handle the reception of a RTCP "BYE" as if the source had closed: + ioState->onSourceClosure(); +} + +static Boolean timevalGE(struct timeval const& tv1, + struct timeval const& tv2) { + return (unsigned)tv1.tv_sec > (unsigned)tv2.tv_sec + || (tv1.tv_sec == tv2.tv_sec + && (unsigned)tv1.tv_usec >= (unsigned)tv2.tv_usec); +} + +void QuickTimeFileSink::completeOutputFile() { + if (fHaveCompletedOutputFile || fOutFid == NULL) return; + + // Begin by filling in the initial "mdat" atom with the current + // file size: + int64_t curFileSize = TellFile64(fOutFid); + setWord64(fMDATposition, (u_int64_t)curFileSize); + + // Then, note the time of the first received data: + MediaSubsessionIterator iter(fInputSession); + MediaSubsession* subsession; + while ((subsession = iter.next()) != NULL) { + SubsessionIOState* ioState + = (SubsessionIOState*)(subsession->miscPtr); + if (ioState == NULL) continue; + + ChunkDescriptor* const headChunk = ioState->fHeadChunk; + if (headChunk != NULL + && timevalGE(fFirstDataTime, headChunk->fPresentationTime)) { + fFirstDataTime = headChunk->fPresentationTime; + } + } + + // Then, update the QuickTime-specific state for each active track: + iter.reset(); + while ((subsession = iter.next()) != NULL) { + SubsessionIOState* ioState + = (SubsessionIOState*)(subsession->miscPtr); + if (ioState == NULL) continue; + + ioState->setFinalQTstate(); + // Do the same for a hint track (if any): + if (ioState->hasHintTrack()) { + ioState->fHintTrackForUs->setFinalQTstate(); + } + } + + if (fGenerateMP4Format) { + // Begin with a "ftyp" atom: + addAtom_ftyp(); + } + + // Then, add a "moov" atom for the file metadata: + addAtom_moov(); + + // We're done: + fHaveCompletedOutputFile = True; +} + + +////////// SubsessionIOState, ChunkDescriptor implementation /////////// + +unsigned SubsessionIOState::fCurrentTrackNumber = 0; + +SubsessionIOState::SubsessionIOState(QuickTimeFileSink& sink, + MediaSubsession& subsession) + : fHintTrackForUs(NULL), fTrackHintedByUs(NULL), + fOurSink(sink), fOurSubsession(subsession), + fLastPacketRTPSeqNum(0), fHaveBeenSynced(False), fQTTotNumSamples(0), + fHeadChunk(NULL), fTailChunk(NULL), fNumChunks(0), + fHeadSyncFrame(NULL), fTailSyncFrame(NULL) { + fTrackID = ++fCurrentTrackNumber; + + fBuffer = new SubsessionBuffer(fOurSink.fBufferSize); + fPrevBuffer = sink.fPacketLossCompensate + ? new SubsessionBuffer(fOurSink.fBufferSize) : NULL; + + FramedSource* subsessionSource = subsession.readSource(); + fOurSourceIsActive = subsessionSource != NULL; + + fPrevFrameState.presentationTime.tv_sec = 0; + fPrevFrameState.presentationTime.tv_usec = 0; + fPrevFrameState.seqNum = 0; +} + +SubsessionIOState::~SubsessionIOState() { + delete fBuffer; delete fPrevBuffer; + + // Delete the list of chunk descriptors: + ChunkDescriptor* chunk = fHeadChunk; + while (chunk != NULL) { + ChunkDescriptor* next = chunk->fNextChunk; + delete chunk; + chunk = next; + } + + // Delete the list of sync frames: + SyncFrame* syncFrame = fHeadSyncFrame; + while (syncFrame != NULL) { + SyncFrame* next = syncFrame->nextSyncFrame; + delete syncFrame; + syncFrame = next; + } +} + +Boolean SubsessionIOState::setQTstate() { + char const* noCodecWarning1 = "Warning: We don't implement a QuickTime "; + char const* noCodecWarning2 = " Media Data Type for the \""; + char const* noCodecWarning3 = "\" track, so we'll insert a dummy \"????\" Media Data Atom instead. A separate, codec-specific editing pass will be needed before this track can be played.\n"; + + do { + fQTEnableTrack = True; // enable this track in the movie by default + fQTTimeScale = fOurSubsession.rtpTimestampFrequency(); // by default + fQTTimeUnitsPerSample = 1; // by default + fQTBytesPerFrame = 0; + // by default - indicates that the whole packet data is a frame + fQTSamplesPerFrame = 1; // by default + + // Make sure our subsession's medium is one that we know how to + // represent in a QuickTime file: + if (isHintTrack()) { + // Hint tracks are treated specially + fQTEnableTrack = False; // hint tracks are marked as inactive + fQTcomponentSubtype = fourChar('h','i','n','t'); + fQTcomponentName = "hint media handler"; + fQTMediaInformationAtomCreator = &QuickTimeFileSink::addAtom_gmhd; + fQTMediaDataAtomCreator = &QuickTimeFileSink::addAtom_rtp; + } else if (strcmp(fOurSubsession.mediumName(), "audio") == 0) { + fQTcomponentSubtype = fourChar('s','o','u','n'); + fQTcomponentName = "Apple Sound Media Handler"; + fQTMediaInformationAtomCreator = &QuickTimeFileSink::addAtom_smhd; + fQTMediaDataAtomCreator + = &QuickTimeFileSink::addAtom_soundMediaGeneral; // by default + fQTSoundSampleVersion = 0; // by default + + // Make sure that our subsession's codec is one that we can handle: + if (strcmp(fOurSubsession.codecName(), "X-QT") == 0 || + strcmp(fOurSubsession.codecName(), "X-QUICKTIME") == 0) { + fQTMediaDataAtomCreator = &QuickTimeFileSink::addAtom_genericMedia; + } else if (strcmp(fOurSubsession.codecName(), "PCMU") == 0) { + fQTAudioDataType = "ulaw"; + fQTBytesPerFrame = 1; + } else if (strcmp(fOurSubsession.codecName(), "GSM") == 0) { + fQTAudioDataType = "agsm"; + fQTBytesPerFrame = 33; + fQTSamplesPerFrame = 160; + } else if (strcmp(fOurSubsession.codecName(), "PCMA") == 0) { + fQTAudioDataType = "alaw"; + fQTBytesPerFrame = 1; + } else if (strcmp(fOurSubsession.codecName(), "QCELP") == 0) { + fQTMediaDataAtomCreator = &QuickTimeFileSink::addAtom_Qclp; + fQTSamplesPerFrame = 160; + } else if (strcmp(fOurSubsession.codecName(), "MPEG4-GENERIC") == 0 || + strcmp(fOurSubsession.codecName(), "MP4A-LATM") == 0) { + fQTMediaDataAtomCreator = &QuickTimeFileSink::addAtom_mp4a; + fQTTimeUnitsPerSample = 1024; // QT considers each frame to be a 'sample' + // The time scale (frequency) comes from the 'config' information. + // It might be different from the RTP timestamp frequency (e.g., aacPlus). + unsigned frequencyFromConfig + = samplingFrequencyFromAudioSpecificConfig(fOurSubsession.fmtp_config()); + if (frequencyFromConfig != 0) fQTTimeScale = frequencyFromConfig; + } else { + envir() << noCodecWarning1 << "Audio" << noCodecWarning2 + << fOurSubsession.codecName() << noCodecWarning3; + fQTMediaDataAtomCreator = &QuickTimeFileSink::addAtom_dummy; + fQTEnableTrack = False; // disable this track in the movie + } + } else if (strcmp(fOurSubsession.mediumName(), "video") == 0) { + fQTcomponentSubtype = fourChar('v','i','d','e'); + fQTcomponentName = "Apple Video Media Handler"; + fQTMediaInformationAtomCreator = &QuickTimeFileSink::addAtom_vmhd; + + // Make sure that our subsession's codec is one that we can handle: + if (strcmp(fOurSubsession.codecName(), "X-QT") == 0 || + strcmp(fOurSubsession.codecName(), "X-QUICKTIME") == 0) { + fQTMediaDataAtomCreator = &QuickTimeFileSink::addAtom_genericMedia; + } else if (strcmp(fOurSubsession.codecName(), "H263-1998") == 0 || + strcmp(fOurSubsession.codecName(), "H263-2000") == 0) { + fQTMediaDataAtomCreator = &QuickTimeFileSink::addAtom_h263; + fQTTimeScale = 600; + fQTTimeUnitsPerSample = fQTTimeScale/fOurSink.fMovieFPS; + } else if (strcmp(fOurSubsession.codecName(), "H264") == 0) { + fQTMediaDataAtomCreator = &QuickTimeFileSink::addAtom_avc1; + fQTTimeScale = 600; + fQTTimeUnitsPerSample = fQTTimeScale/fOurSink.fMovieFPS; + } else if (strcmp(fOurSubsession.codecName(), "MP4V-ES") == 0) { + fQTMediaDataAtomCreator = &QuickTimeFileSink::addAtom_mp4v; + fQTTimeScale = 600; + fQTTimeUnitsPerSample = fQTTimeScale/fOurSink.fMovieFPS; + } else { + envir() << noCodecWarning1 << "Video" << noCodecWarning2 + << fOurSubsession.codecName() << noCodecWarning3; + fQTMediaDataAtomCreator = &QuickTimeFileSink::addAtom_dummy; + fQTEnableTrack = False; // disable this track in the movie + } + } else { + envir() << "Warning: We don't implement a QuickTime Media Handler for media type \"" + << fOurSubsession.mediumName() << "\""; + break; + } + +#ifdef QT_SUPPORT_PARTIALLY_ONLY + envir() << "Warning: We don't have sufficient codec-specific information (e.g., sample sizes) to fully generate the \"" + << fOurSubsession.mediumName() << "/" << fOurSubsession.codecName() + << "\" track, so we'll disable this track in the movie. A separate, codec-specific editing pass will be needed before this track can be played\n"; + fQTEnableTrack = False; // disable this track in the movie +#endif + + return True; + } while (0); + + envir() << ", so a track for the \"" << fOurSubsession.mediumName() + << "/" << fOurSubsession.codecName() + << "\" subsession will not be included in the output QuickTime file\n"; + return False; +} + +void SubsessionIOState::setFinalQTstate() { + // Compute derived parameters, by running through the list of chunks: + fQTDurationT = 0; + + ChunkDescriptor* chunk = fHeadChunk; + while (chunk != NULL) { + unsigned const numFrames = chunk->fNumFrames; + unsigned const dur = numFrames*chunk->fFrameDuration; + fQTDurationT += dur; + + chunk = chunk->fNextChunk; + } + + // Convert this duration from track to movie time scale: + double scaleFactor = fOurSink.movieTimeScale()/(double)fQTTimeScale; + fQTDurationM = (unsigned)(fQTDurationT*scaleFactor); + + if (fQTDurationM > fOurSink.fMaxTrackDurationM) { + fOurSink.fMaxTrackDurationM = fQTDurationM; + } +} + +void SubsessionIOState::afterGettingFrame(unsigned packetDataSize, + struct timeval presentationTime) { + // Begin by checking whether there was a gap in the RTP stream. + // If so, try to compensate for this (if desired): + unsigned short rtpSeqNum + = fOurSubsession.rtpSource()->curPacketRTPSeqNum(); + if (fOurSink.fPacketLossCompensate && fPrevBuffer->bytesInUse() > 0) { + short seqNumGap = rtpSeqNum - fLastPacketRTPSeqNum; + for (short i = 1; i < seqNumGap; ++i) { + // Insert a copy of the previous frame, to compensate for the loss: + useFrame(*fPrevBuffer); + } + } + fLastPacketRTPSeqNum = rtpSeqNum; + + // Now, continue working with the frame that we just got + if (fBuffer->bytesInUse() == 0) { + fBuffer->setPresentationTime(presentationTime); + } + fBuffer->addBytes(packetDataSize); + + // If our RTP source is a "QuickTimeGenericRTPSource", then + // use its 'qtState' to set some parameters that we need: + if (fQTMediaDataAtomCreator == &QuickTimeFileSink::addAtom_genericMedia){ + QuickTimeGenericRTPSource* rtpSource + = (QuickTimeGenericRTPSource*)fOurSubsession.rtpSource(); + QuickTimeGenericRTPSource::QTState& qtState = rtpSource->qtState; + fQTTimeScale = qtState.timescale; + if (qtState.width != 0) { + fOurSink.fMovieWidth = qtState.width; + } + if (qtState.height != 0) { + fOurSink.fMovieHeight = qtState.height; + } + + // Also, if the media type in the "sdAtom" is one that we recognize + // to have a special parameters, then fix this here: + if (qtState.sdAtomSize >= 8) { + char const* atom = qtState.sdAtom; + unsigned mediaType = fourChar(atom[4],atom[5],atom[6],atom[7]); + switch (mediaType) { + case fourChar('a','g','s','m'): { + fQTBytesPerFrame = 33; + fQTSamplesPerFrame = 160; + break; + } + case fourChar('Q','c','l','p'): { + fQTBytesPerFrame = 35; + fQTSamplesPerFrame = 160; + break; + } + case fourChar('H','c','l','p'): { + fQTBytesPerFrame = 17; + fQTSamplesPerFrame = 160; + break; + } + case fourChar('h','2','6','3'): { + fQTTimeUnitsPerSample = fQTTimeScale/fOurSink.fMovieFPS; + break; + } + } + } + } else if (fQTMediaDataAtomCreator == &QuickTimeFileSink::addAtom_Qclp) { + // For QCELP data, make a note of the frame size (even though it's the + // same as the packet data size), because it varies depending on the + // 'rate' of the stream, and this size gets used later when setting up + // the 'Qclp' QuickTime atom: + fQTBytesPerFrame = packetDataSize; + } + + useFrame(*fBuffer); + if (fOurSink.fPacketLossCompensate) { + // Save this frame, in case we need it for recovery: + SubsessionBuffer* tmp = fPrevBuffer; // assert: != NULL + fPrevBuffer = fBuffer; + fBuffer = tmp; + } + fBuffer->reset(); // for the next input + + // Now, try getting more frames: + fOurSink.continuePlaying(); +} + +void SubsessionIOState::useFrame(SubsessionBuffer& buffer) { + unsigned char* const frameSource = buffer.dataStart(); + unsigned const frameSize = buffer.bytesInUse(); + struct timeval const& presentationTime = buffer.presentationTime(); + int64_t const destFileOffset = TellFile64(fOurSink.fOutFid); + unsigned sampleNumberOfFrameStart = fQTTotNumSamples + 1; + Boolean avcHack = fQTMediaDataAtomCreator == &QuickTimeFileSink::addAtom_avc1; + + // If we're not syncing streams, or this subsession is not video, then + // just give this frame a fixed duration: + if (!fOurSink.fSyncStreams + || fQTcomponentSubtype != fourChar('v','i','d','e')) { + unsigned const frameDuration = fQTTimeUnitsPerSample*fQTSamplesPerFrame; + unsigned frameSizeToUse = frameSize; + if (avcHack) frameSizeToUse += 4; // H.264/AVC gets the frame size prefix + + fQTTotNumSamples += useFrame1(frameSizeToUse, presentationTime, frameDuration, destFileOffset); + } else { + // For synced video streams, we use the difference between successive + // frames' presentation times as the 'frame duration'. So, record + // information about the *previous* frame: + struct timeval const& ppt = fPrevFrameState.presentationTime; //abbrev + if (ppt.tv_sec != 0 || ppt.tv_usec != 0) { + // There has been a previous frame. + double duration = (presentationTime.tv_sec - ppt.tv_sec) + + (presentationTime.tv_usec - ppt.tv_usec)/1000000.0; + if (duration < 0.0) duration = 0.0; + unsigned frameDuration + = (unsigned)((2*duration*fQTTimeScale+1)/2); // round + unsigned frameSizeToUse = fPrevFrameState.frameSize; + if (avcHack) frameSizeToUse += 4; // H.264/AVC gets the frame size prefix + + unsigned numSamples + = useFrame1(frameSizeToUse, ppt, frameDuration, fPrevFrameState.destFileOffset); + fQTTotNumSamples += numSamples; + sampleNumberOfFrameStart = fQTTotNumSamples + 1; + } + + if (avcHack && (*frameSource == H264_IDR_FRAME)) { + SyncFrame* newSyncFrame = new SyncFrame(fQTTotNumSamples + 1); + if (fTailSyncFrame == NULL) { + fHeadSyncFrame = newSyncFrame; + } else { + fTailSyncFrame->nextSyncFrame = newSyncFrame; + } + fTailSyncFrame = newSyncFrame; + } + + // Remember the current frame for next time: + fPrevFrameState.frameSize = frameSize; + fPrevFrameState.presentationTime = presentationTime; + fPrevFrameState.destFileOffset = destFileOffset; + } + + if (avcHack) fOurSink.addWord(frameSize); + + // Write the data into the file: + fwrite(frameSource, 1, frameSize, fOurSink.fOutFid); + + // If we have a hint track, then write to it also: + if (hasHintTrack()) { + // Because presentation times are used for RTP packet timestamps, + // we don't starting writing to the hint track until we've been synced: + if (!fHaveBeenSynced) { + fHaveBeenSynced + = fOurSubsession.rtpSource()->hasBeenSynchronizedUsingRTCP(); + } + if (fHaveBeenSynced) { + fHintTrackForUs->useFrameForHinting(frameSize, presentationTime, + sampleNumberOfFrameStart); + } + } +} + +void SubsessionIOState::useFrameForHinting(unsigned frameSize, + struct timeval presentationTime, + unsigned startSampleNumber) { + // At this point, we have a single, combined frame - not individual packets. + // For the hint track, we need to split the frame back up into separate packets. + // However, for some RTP sources, then we also need to reuse the special + // header bytes that were at the start of each of the RTP packets. + Boolean hack263 = strcmp(fOurSubsession.codecName(), "H263-1998") == 0; + Boolean hackm4a_generic = strcmp(fOurSubsession.mediumName(), "audio") == 0 + && strcmp(fOurSubsession.codecName(), "MPEG4-GENERIC") == 0; + Boolean hackm4a_latm = strcmp(fOurSubsession.mediumName(), "audio") == 0 + && strcmp(fOurSubsession.codecName(), "MP4A-LATM") == 0; + Boolean hackm4a = hackm4a_generic || hackm4a_latm; + Boolean haveSpecialHeaders = (hack263 || hackm4a_generic); + + // If there has been a previous frame, then output a 'hint sample' for it. + // (We use the current frame's presentation time to compute the previous + // hint sample's duration.) + RTPSource* const rs = fOurSubsession.rtpSource(); // abbrev + struct timeval const& ppt = fPrevFrameState.presentationTime; //abbrev + if (ppt.tv_sec != 0 || ppt.tv_usec != 0) { + double duration = (presentationTime.tv_sec - ppt.tv_sec) + + (presentationTime.tv_usec - ppt.tv_usec)/1000000.0; + if (duration < 0.0) duration = 0.0; + unsigned msDuration = (unsigned)(duration*1000); // milliseconds + if (msDuration > fHINF.dmax) fHINF.dmax = msDuration; + unsigned hintSampleDuration + = (unsigned)((2*duration*fQTTimeScale+1)/2); // round + if (hackm4a) { + // Because multiple AAC frames can appear in a RTP packet, the presentation + // times of the second and subsequent frames will not be accurate. + // So, use the known "hintSampleDuration" instead: + hintSampleDuration = fTrackHintedByUs->fQTTimeUnitsPerSample; + + // Also, if the 'time scale' was different from the RTP timestamp frequency, + // (as can happen with aacPlus), then we need to scale "hintSampleDuration" + // accordingly: + if (fTrackHintedByUs->fQTTimeScale != fOurSubsession.rtpTimestampFrequency()) { + unsigned const scalingFactor + = fOurSubsession.rtpTimestampFrequency()/fTrackHintedByUs->fQTTimeScale ; + hintSampleDuration *= scalingFactor; + } + } + + int64_t const hintSampleDestFileOffset = TellFile64(fOurSink.fOutFid); + + unsigned const maxPacketSize = 1450; + unsigned short numPTEntries + = (fPrevFrameState.frameSize + (maxPacketSize-1))/maxPacketSize; // normal case + unsigned char* immediateDataPtr = NULL; + unsigned immediateDataBytesRemaining = 0; + if (haveSpecialHeaders) { // special case + numPTEntries = fPrevFrameState.numSpecialHeaders; + immediateDataPtr = fPrevFrameState.specialHeaderBytes; + immediateDataBytesRemaining + = fPrevFrameState.specialHeaderBytesLength; + } + unsigned hintSampleSize + = fOurSink.addHalfWord(numPTEntries);// Entry count + hintSampleSize += fOurSink.addHalfWord(0x0000); // Reserved + + unsigned offsetWithinSample = 0; + for (unsigned i = 0; i < numPTEntries; ++i) { + // Output a Packet Table entry (representing a single RTP packet): + unsigned short numDTEntries = 1; + unsigned short seqNum = fPrevFrameState.seqNum++; + // Note: This assumes that the input stream had no packets lost ##### + unsigned rtpHeader = fPrevFrameState.rtpHeader; + if (i+1 < numPTEntries) { + // This is not the last RTP packet, so clear the marker bit: + rtpHeader &=~ (1<<23); + } + unsigned dataFrameSize = (i+1 < numPTEntries) + ? maxPacketSize : fPrevFrameState.frameSize - i*maxPacketSize; // normal case + unsigned sampleNumber = fPrevFrameState.startSampleNumber; + + unsigned char immediateDataLen = 0; + if (haveSpecialHeaders) { // special case + ++numDTEntries; // to include a Data Table entry for the special hdr + if (immediateDataBytesRemaining > 0) { + if (hack263) { + immediateDataLen = *immediateDataPtr++; + --immediateDataBytesRemaining; + if (immediateDataLen > immediateDataBytesRemaining) { + // shouldn't happen (length byte was bad) + immediateDataLen = immediateDataBytesRemaining; + } + } else { + immediateDataLen = fPrevFrameState.specialHeaderBytesLength; + } + } + dataFrameSize = fPrevFrameState.packetSizes[i] - immediateDataLen; + + if (hack263) { + Boolean PbitSet + = immediateDataLen >= 1 && (immediateDataPtr[0]&0x4) != 0; + if (PbitSet) { + offsetWithinSample += 2; // to omit the two leading 0 bytes + } + } + } + + // Output the Packet Table: + hintSampleSize += fOurSink.addWord(0); // Relative transmission time + hintSampleSize += fOurSink.addWord(rtpHeader|seqNum); + // RTP header info + RTP sequence number + hintSampleSize += fOurSink.addHalfWord(0x0000); // Flags + hintSampleSize += fOurSink.addHalfWord(numDTEntries); // Entry count + unsigned totalPacketSize = 0; + + // Output the Data Table: + if (haveSpecialHeaders) { + // use the "Immediate Data" format (1): + hintSampleSize += fOurSink.addByte(1); // Source + unsigned char len = immediateDataLen > 14 ? 14 : immediateDataLen; + hintSampleSize += fOurSink.addByte(len); // Length + totalPacketSize += len; fHINF.dimm += len; + unsigned char j; + for (j = 0; j < len; ++j) { + hintSampleSize += fOurSink.addByte(immediateDataPtr[j]); // Data + } + for (j = len; j < 14; ++j) { + hintSampleSize += fOurSink.addByte(0); // Data (padding) + } + + immediateDataPtr += immediateDataLen; + immediateDataBytesRemaining -= immediateDataLen; + } + // use the "Sample Data" format (2): + hintSampleSize += fOurSink.addByte(2); // Source + hintSampleSize += fOurSink.addByte(0); // Track ref index + hintSampleSize += fOurSink.addHalfWord(dataFrameSize); // Length + totalPacketSize += dataFrameSize; fHINF.dmed += dataFrameSize; + hintSampleSize += fOurSink.addWord(sampleNumber); // Sample number + hintSampleSize += fOurSink.addWord(offsetWithinSample); // Offset + // Get "bytes|samples per compression block" from the hinted track: + unsigned short const bytesPerCompressionBlock + = fTrackHintedByUs->fQTBytesPerFrame; + unsigned short const samplesPerCompressionBlock + = fTrackHintedByUs->fQTSamplesPerFrame; + hintSampleSize += fOurSink.addHalfWord(bytesPerCompressionBlock); + hintSampleSize += fOurSink.addHalfWord(samplesPerCompressionBlock); + + offsetWithinSample += dataFrameSize;// for the next iteration (if any) + + // Tally statistics for this packet: + fHINF.nump += 1; + fHINF.tpyl += totalPacketSize; + totalPacketSize += 12; // add in the size of the RTP header + fHINF.trpy += totalPacketSize; + if (totalPacketSize > fHINF.pmax) fHINF.pmax = totalPacketSize; + } + + // Make note of this completed hint sample frame: + fQTTotNumSamples += useFrame1(hintSampleSize, ppt, hintSampleDuration, + hintSampleDestFileOffset); + } + + // Remember this frame for next time: + fPrevFrameState.frameSize = frameSize; + fPrevFrameState.presentationTime = presentationTime; + fPrevFrameState.startSampleNumber = startSampleNumber; + fPrevFrameState.rtpHeader + = rs->curPacketMarkerBit()<<23 + | (rs->rtpPayloadFormat()&0x7F)<<16; + if (hack263) { + H263plusVideoRTPSource* rs_263 = (H263plusVideoRTPSource*)rs; + fPrevFrameState.numSpecialHeaders = rs_263->fNumSpecialHeaders; + fPrevFrameState.specialHeaderBytesLength = rs_263->fSpecialHeaderBytesLength; + unsigned i; + for (i = 0; i < rs_263->fSpecialHeaderBytesLength; ++i) { + fPrevFrameState.specialHeaderBytes[i] = rs_263->fSpecialHeaderBytes[i]; + } + for (i = 0; i < rs_263->fNumSpecialHeaders; ++i) { + fPrevFrameState.packetSizes[i] = rs_263->fPacketSizes[i]; + } + } else if (hackm4a_generic) { + // Synthesize a special header, so that this frame can be in its own RTP packet. + unsigned const sizeLength = fOurSubsession.attrVal_unsigned("sizelength"); + unsigned const indexLength = fOurSubsession.attrVal_unsigned("indexlength"); + if (sizeLength + indexLength != 16) { + envir() << "Warning: unexpected 'sizeLength' " << sizeLength + << " and 'indexLength' " << indexLength + << "seen when creating hint track\n"; + } + fPrevFrameState.numSpecialHeaders = 1; + fPrevFrameState.specialHeaderBytesLength = 4; + fPrevFrameState.specialHeaderBytes[0] = 0; // AU_headers_length (high byte) + fPrevFrameState.specialHeaderBytes[1] = 16; // AU_headers_length (low byte) + fPrevFrameState.specialHeaderBytes[2] = ((frameSize<>8; + fPrevFrameState.specialHeaderBytes[3] = (frameSize<extendChunk(destFileOffset, sourceDataSize, + frameSize, frameDuration, + presentationTime); + } + if (newTailChunk != fTailChunk) { + // This data created a new chunk, rather than extending the old one + ++fNumChunks; + fTailChunk = newTailChunk; + } + + return numSamples; +} + +void SubsessionIOState::onSourceClosure() { + fOurSourceIsActive = False; + fOurSink.onSourceClosure1(); +} + +Boolean SubsessionIOState::syncOK(struct timeval presentationTime) { + QuickTimeFileSink& s = fOurSink; // abbreviation + if (!s.fSyncStreams) return True; // we don't care + + if (s.fNumSyncedSubsessions < s.fNumSubsessions) { + // Not all subsessions have yet been synced. Check whether ours was + // one of the unsynced ones, and, if so, whether it is now synced: + if (!fHaveBeenSynced) { + // We weren't synchronized before + if (fOurSubsession.rtpSource()->hasBeenSynchronizedUsingRTCP()) { + // H264 ? + if (fQTMediaDataAtomCreator == &QuickTimeFileSink::addAtom_avc1) { + // special case: audio + H264 video: wait until audio is in sync + if ((s.fNumSubsessions == 2) && (s.fNumSyncedSubsessions < (s.fNumSubsessions - 1))) return False; + + // if audio is in sync, wait for the next IDR frame to start + unsigned char* const frameSource = fBuffer->dataStart(); + if (*frameSource != H264_IDR_FRAME) return False; + } + // But now we are + fHaveBeenSynced = True; + fSyncTime = presentationTime; + ++s.fNumSyncedSubsessions; + + if (timevalGE(fSyncTime, s.fNewestSyncTime)) { + s.fNewestSyncTime = fSyncTime; + } + } + } + } + + // Check again whether all subsessions have been synced: + if (s.fNumSyncedSubsessions < s.fNumSubsessions) return False; + + // Allow this data if it is more recent than the newest sync time: + return timevalGE(presentationTime, s.fNewestSyncTime); +} + +void SubsessionIOState::setHintTrack(SubsessionIOState* hintedTrack, + SubsessionIOState* hintTrack) { + if (hintedTrack != NULL) hintedTrack->fHintTrackForUs = hintTrack; + if (hintTrack != NULL) hintTrack->fTrackHintedByUs = hintedTrack; +} + +SyncFrame::SyncFrame(unsigned frameNum) + : nextSyncFrame(NULL), sfFrameNum(frameNum) { +} + +void Count64::operator+=(unsigned arg) { + unsigned newLo = lo + arg; + if (newLo < lo) { // lo has overflowed + ++hi; + } + lo = newLo; +} + +ChunkDescriptor +::ChunkDescriptor(int64_t offsetInFile, unsigned size, + unsigned frameSize, unsigned frameDuration, + struct timeval presentationTime) + : fNextChunk(NULL), fOffsetInFile(offsetInFile), + fNumFrames(size/frameSize), + fFrameSize(frameSize), fFrameDuration(frameDuration), + fPresentationTime(presentationTime) { +} + +ChunkDescriptor* ChunkDescriptor +::extendChunk(int64_t newOffsetInFile, unsigned newSize, + unsigned newFrameSize, unsigned newFrameDuration, + struct timeval newPresentationTime) { + // First, check whether the new space is just at the end of this + // existing chunk: + if (newOffsetInFile == fOffsetInFile + fNumFrames*fFrameSize) { + // We can extend this existing chunk, provided that the frame size + // and frame duration have not changed: + if (newFrameSize == fFrameSize && newFrameDuration == fFrameDuration) { + fNumFrames += newSize/fFrameSize; + return this; + } + } + + // We'll allocate a new ChunkDescriptor, and link it to the end of us: + ChunkDescriptor* newDescriptor + = new ChunkDescriptor(newOffsetInFile, newSize, + newFrameSize, newFrameDuration, + newPresentationTime); + + fNextChunk = newDescriptor; + + return newDescriptor; +} + + +////////// QuickTime-specific implementation ////////// + +unsigned QuickTimeFileSink::addWord64(u_int64_t word) { + addByte((unsigned char)(word>>56)); addByte((unsigned char)(word>>48)); + addByte((unsigned char)(word>>40)); addByte((unsigned char)(word>>32)); + addByte((unsigned char)(word>>24)); addByte((unsigned char)(word>>16)); + addByte((unsigned char)(word>>8)); addByte((unsigned char)(word)); + + return 8; +} + +unsigned QuickTimeFileSink::addWord(unsigned word) { + addByte(word>>24); addByte(word>>16); + addByte(word>>8); addByte(word); + + return 4; +} + +unsigned QuickTimeFileSink::addHalfWord(unsigned short halfWord) { + addByte((unsigned char)(halfWord>>8)); addByte((unsigned char)halfWord); + + return 2; +} + +unsigned QuickTimeFileSink::addZeroWords(unsigned numWords) { + for (unsigned i = 0; i < numWords; ++i) { + addWord(0); + } + + return numWords*4; +} + +unsigned QuickTimeFileSink::add4ByteString(char const* str) { + addByte(str[0]); addByte(str[1]); addByte(str[2]); addByte(str[3]); + + return 4; +} + +unsigned QuickTimeFileSink::addArbitraryString(char const* str, + Boolean oneByteLength) { + unsigned size = 0; + if (oneByteLength) { + // Begin with a byte containing the string length: + unsigned strLength = strlen(str); + if (strLength >= 256) { + envir() << "QuickTimeFileSink::addArbitraryString(\"" + << str << "\") saw string longer than we know how to handle (" + << strLength << "\n"; + } + size += addByte((unsigned char)strLength); + } + + while (*str != '\0') { + size += addByte(*str++); + } + + return size; +} + +unsigned QuickTimeFileSink::addAtomHeader(char const* atomName) { + // Output a placeholder for the 4-byte size: + addWord(0); + + // Output the 4-byte atom name: + add4ByteString(atomName); + + return 8; +} + +unsigned QuickTimeFileSink::addAtomHeader64(char const* atomName) { + // Output 64Bit size marker + addWord(1); + + // Output the 4-byte atom name: + add4ByteString(atomName); + + addWord64(0); + + return 16; +} + +void QuickTimeFileSink::setWord(int64_t filePosn, unsigned size) { + do { + if (SeekFile64(fOutFid, filePosn, SEEK_SET) < 0) break; + addWord(size); + if (SeekFile64(fOutFid, 0, SEEK_END) < 0) break; // go back to where we were + + return; + } while (0); + + // One of the SeekFile64()s failed, probable because we're not a seekable file + envir() << "QuickTimeFileSink::setWord(): SeekFile64 failed (err " + << envir().getErrno() << ")\n"; +} + +void QuickTimeFileSink::setWord64(int64_t filePosn, u_int64_t size) { + do { + if (SeekFile64(fOutFid, filePosn, SEEK_SET) < 0) break; + addWord64(size); + if (SeekFile64(fOutFid, 0, SEEK_END) < 0) break; // go back to where we were + + return; + } while (0); + + // One of the SeekFile64()s failed, probable because we're not a seekable file + envir() << "QuickTimeFileSink::setWord64(): SeekFile64 failed (err " + << envir().getErrno() << ")\n"; +} + +// Methods for writing particular atoms. Note the following macros: + +#define addAtom(name) \ + unsigned QuickTimeFileSink::addAtom_##name() { \ + int64_t initFilePosn = TellFile64(fOutFid); \ + unsigned size = addAtomHeader("" #name "") + +#define addAtomEnd \ + setWord(initFilePosn, size); \ + return size; \ +} + +addAtom(ftyp); + size += add4ByteString("mp42"); + size += addWord(0x00000000); + size += add4ByteString("mp42"); + size += add4ByteString("isom"); +addAtomEnd; + +addAtom(moov); + size += addAtom_mvhd(); + + if (fGenerateMP4Format) { + size += addAtom_iods(); + } + + // Add a 'trak' atom for each subsession: + // (For some unknown reason, QuickTime Player (5.0 at least) + // doesn't display the movie correctly unless the audio track + // (if present) appears before the video track. So ensure this here.) + MediaSubsessionIterator iter(fInputSession); + MediaSubsession* subsession; + while ((subsession = iter.next()) != NULL) { + fCurrentIOState = (SubsessionIOState*)(subsession->miscPtr); + if (fCurrentIOState == NULL) continue; + if (strcmp(subsession->mediumName(), "audio") != 0) continue; + + size += addAtom_trak(); + + if (fCurrentIOState->hasHintTrack()) { + // This track has a hint track; output it also: + fCurrentIOState = fCurrentIOState->fHintTrackForUs; + size += addAtom_trak(); + } + } + iter.reset(); + while ((subsession = iter.next()) != NULL) { + fCurrentIOState = (SubsessionIOState*)(subsession->miscPtr); + if (fCurrentIOState == NULL) continue; + if (strcmp(subsession->mediumName(), "audio") == 0) continue; + + size += addAtom_trak(); + + if (fCurrentIOState->hasHintTrack()) { + // This track has a hint track; output it also: + fCurrentIOState = fCurrentIOState->fHintTrackForUs; + size += addAtom_trak(); + } + } +addAtomEnd; + +addAtom(mvhd); + size += addWord(0x00000000); // Version + Flags + size += addWord(fAppleCreationTime); // Creation time + size += addWord(fAppleCreationTime); // Modification time + + // For the "Time scale" field, use the largest RTP timestamp frequency + // that we saw in any of the subsessions. + size += addWord(movieTimeScale()); // Time scale + + unsigned const duration = fMaxTrackDurationM; + fMVHD_durationPosn = TellFile64(fOutFid); + size += addWord(duration); // Duration + + size += addWord(0x00010000); // Preferred rate + size += addWord(0x01000000); // Preferred volume + Reserved[0] + size += addZeroWords(2); // Reserved[1-2] + size += addWord(0x00010000); // matrix top left corner + size += addZeroWords(3); // matrix + size += addWord(0x00010000); // matrix center + size += addZeroWords(3); // matrix + size += addWord(0x40000000); // matrix bottom right corner + size += addZeroWords(6); // various time fields + size += addWord(SubsessionIOState::fCurrentTrackNumber+1);// Next track ID +addAtomEnd; + +addAtom(iods); + size += addWord(0x00000000); // Version + Flags + size += addWord(0x10808080); + size += addWord(0x07004FFF); + size += addWord(0xFF0FFFFF); +addAtomEnd; + +addAtom(trak); + size += addAtom_tkhd(); + + // If we're synchronizing the media streams (or are a hint track), + // add an edit list that helps do this: + if (fCurrentIOState->fHeadChunk != NULL + && (fSyncStreams || fCurrentIOState->isHintTrack())) { + size += addAtom_edts(); + } + + // If we're generating a hint track, add a 'tref' atom: + if (fCurrentIOState->isHintTrack()) size += addAtom_tref(); + + size += addAtom_mdia(); + + // If we're generating a hint track, add a 'udta' atom: + if (fCurrentIOState->isHintTrack()) size += addAtom_udta(); +addAtomEnd; + +addAtom(tkhd); + if (fCurrentIOState->fQTEnableTrack) { + size += addWord(0x0000000F); // Version + Flags + } else { + // Disable this track in the movie: + size += addWord(0x00000000); // Version + Flags + } + size += addWord(fAppleCreationTime); // Creation time + size += addWord(fAppleCreationTime); // Modification time + size += addWord(fCurrentIOState->fTrackID); // Track ID + size += addWord(0x00000000); // Reserved + + unsigned const duration = fCurrentIOState->fQTDurationM; // movie units + fCurrentIOState->fTKHD_durationPosn = TellFile64(fOutFid); + size += addWord(duration); // Duration + size += addZeroWords(3); // Reserved+Layer+Alternate grp + size += addWord(0x01000000); // Volume + Reserved + size += addWord(0x00010000); // matrix top left corner + size += addZeroWords(3); // matrix + size += addWord(0x00010000); // matrix center + size += addZeroWords(3); // matrix + size += addWord(0x40000000); // matrix bottom right corner + if (strcmp(fCurrentIOState->fOurSubsession.mediumName(), "video") == 0) { + size += addWord(fMovieWidth<<16); // Track width + size += addWord(fMovieHeight<<16); // Track height + } else { + size += addZeroWords(2); // not video: leave width and height fields zero + } +addAtomEnd; + +addAtom(edts); + size += addAtom_elst(); +addAtomEnd; + +#define addEdit1(duration,trackPosition) do { \ + unsigned trackDuration \ + = (unsigned) ((2*(duration)*movieTimeScale()+1)/2); \ + /* in movie time units */ \ + size += addWord(trackDuration); /* Track duration */ \ + totalDurationOfEdits += trackDuration; \ + size += addWord(trackPosition); /* Media time */ \ + size += addWord(0x00010000); /* Media rate (1x) */ \ + ++numEdits; \ +} while (0) +#define addEdit(duration) addEdit1((duration),editTrackPosition) +#define addEmptyEdit(duration) addEdit1((duration),(~0)) + +addAtom(elst); + size += addWord(0x00000000); // Version + Flags + + // Add a dummy "Number of entries" field + // (and remember its position). We'll fill this field in later: + int64_t numEntriesPosition = TellFile64(fOutFid); + size += addWord(0); // dummy for "Number of entries" + unsigned numEdits = 0; + unsigned totalDurationOfEdits = 0; // in movie time units + + // Run through our chunks, looking at their presentation times. + // From these, figure out the edits that need to be made to keep + // the track media data in sync with the presentation times. + + double const syncThreshold = 0.1; // 100 ms + // don't allow the track to get out of sync by more than this + + struct timeval editStartTime = fFirstDataTime; + unsigned editTrackPosition = 0; + unsigned currentTrackPosition = 0; + double trackDurationOfEdit = 0.0; + unsigned chunkDuration = 0; + + ChunkDescriptor* chunk = fCurrentIOState->fHeadChunk; + while (chunk != NULL) { + struct timeval const& chunkStartTime = chunk->fPresentationTime; + double movieDurationOfEdit + = (chunkStartTime.tv_sec - editStartTime.tv_sec) + + (chunkStartTime.tv_usec - editStartTime.tv_usec)/1000000.0; + trackDurationOfEdit = (currentTrackPosition-editTrackPosition) + / (double)(fCurrentIOState->fQTTimeScale); + + double outOfSync = movieDurationOfEdit - trackDurationOfEdit; + + if (outOfSync > syncThreshold) { + // The track's data is too short, so end this edit, add a new + // 'empty' edit after it, and start a new edit + // (at the current track posn.): + if (trackDurationOfEdit > 0.0) addEdit(trackDurationOfEdit); + addEmptyEdit(outOfSync); + + editStartTime = chunkStartTime; + editTrackPosition = currentTrackPosition; + } else if (outOfSync < -syncThreshold) { + // The track's data is too long, so end this edit, and start + // a new edit (pointing at the current track posn.): + if (movieDurationOfEdit > 0.0) addEdit(movieDurationOfEdit); + + editStartTime = chunkStartTime; + editTrackPosition = currentTrackPosition; + } + + // Note the duration of this chunk: + unsigned numChannels = fCurrentIOState->fOurSubsession.numChannels(); + chunkDuration = chunk->fNumFrames*chunk->fFrameDuration/numChannels; + currentTrackPosition += chunkDuration; + + chunk = chunk->fNextChunk; + } + + // Write out the final edit + trackDurationOfEdit + += (double)chunkDuration/fCurrentIOState->fQTTimeScale; + if (trackDurationOfEdit > 0.0) addEdit(trackDurationOfEdit); + + // Now go back and fill in the "Number of entries" field: + setWord(numEntriesPosition, numEdits); + + // Also, if the sum of all of the edit durations exceeds the + // track duration that we already computed (from sample durations), + // then reset the track duration to this new value: + if (totalDurationOfEdits > fCurrentIOState->fQTDurationM) { + fCurrentIOState->fQTDurationM = totalDurationOfEdits; + setWord(fCurrentIOState->fTKHD_durationPosn, totalDurationOfEdits); + + // Also, check whether the overall movie duration needs to change: + if (totalDurationOfEdits > fMaxTrackDurationM) { + fMaxTrackDurationM = totalDurationOfEdits; + setWord(fMVHD_durationPosn, totalDurationOfEdits); + } + + // Also, convert to track time scale: + double scaleFactor + = fCurrentIOState->fQTTimeScale/(double)movieTimeScale(); + fCurrentIOState->fQTDurationT + = (unsigned)(totalDurationOfEdits*scaleFactor); + } +addAtomEnd; + +addAtom(tref); + size += addAtom_hint(); +addAtomEnd; + +addAtom(hint); + SubsessionIOState* hintedTrack = fCurrentIOState->fTrackHintedByUs; + // Assert: hintedTrack != NULL + size += addWord(hintedTrack->fTrackID); +addAtomEnd; + +addAtom(mdia); + size += addAtom_mdhd(); + size += addAtom_hdlr(); + size += addAtom_minf(); +addAtomEnd; + +addAtom(mdhd); + size += addWord(0x00000000); // Version + Flags + size += addWord(fAppleCreationTime); // Creation time + size += addWord(fAppleCreationTime); // Modification time + + unsigned const timeScale = fCurrentIOState->fQTTimeScale; + size += addWord(timeScale); // Time scale + + unsigned const duration = fCurrentIOState->fQTDurationT; // track units + size += addWord(duration); // Duration + + size += addWord(0x00000000); // Language+Quality +addAtomEnd; + +addAtom(hdlr); + size += addWord(0x00000000); // Version + Flags + size += add4ByteString("mhlr"); // Component type + size += addWord(fCurrentIOState->fQTcomponentSubtype); + // Component subtype + size += add4ByteString("appl"); // Component manufacturer + size += addWord(0x00000000); // Component flags + size += addWord(0x00000000); // Component flags mask + size += addArbitraryString(fCurrentIOState->fQTcomponentName); + // Component name +addAtomEnd; + +addAtom(minf); + SubsessionIOState::atomCreationFunc mediaInformationAtomCreator + = fCurrentIOState->fQTMediaInformationAtomCreator; + size += (this->*mediaInformationAtomCreator)(); + size += addAtom_hdlr2(); + size += addAtom_dinf(); + size += addAtom_stbl(); +addAtomEnd; + +addAtom(smhd); + size += addZeroWords(2); // Version+Flags+Balance+Reserved +addAtomEnd; + +addAtom(vmhd); + size += addWord(0x00000001); // Version + Flags + size += addWord(0x00408000); // Graphics mode + Opcolor[red] + size += addWord(0x80008000); // Opcolor[green} + Opcolor[blue] +addAtomEnd; + +addAtom(gmhd); + size += addAtom_gmin(); +addAtomEnd; + +addAtom(gmin); + size += addWord(0x00000000); // Version + Flags + // The following fields probably aren't used for hint tracks, so just + // use values that I've seen in other files: + size += addWord(0x00408000); // Graphics mode + Opcolor (1st 2 bytes) + size += addWord(0x80008000); // Opcolor (last 4 bytes) + size += addWord(0x00000000); // Balance + Reserved +addAtomEnd; + +unsigned QuickTimeFileSink::addAtom_hdlr2() { + int64_t initFilePosn = TellFile64(fOutFid); + unsigned size = addAtomHeader("hdlr"); + size += addWord(0x00000000); // Version + Flags + size += add4ByteString("dhlr"); // Component type + size += add4ByteString("alis"); // Component subtype + size += add4ByteString("appl"); // Component manufacturer + size += addZeroWords(2); // Component flags+Component flags mask + size += addArbitraryString("Apple Alias Data Handler"); // Component name +addAtomEnd; + +addAtom(dinf); + size += addAtom_dref(); +addAtomEnd; + +addAtom(dref); + size += addWord(0x00000000); // Version + Flags + size += addWord(0x00000001); // Number of entries + size += addAtom_alis(); +addAtomEnd; + +addAtom(alis); + size += addWord(0x00000001); // Version + Flags +addAtomEnd; + +addAtom(stbl); + size += addAtom_stsd(); + size += addAtom_stts(); + if (fCurrentIOState->fQTcomponentSubtype == fourChar('v','i','d','e')) { + size += addAtom_stss(); // only for video streams + } + size += addAtom_stsc(); + size += addAtom_stsz(); + size += addAtom_co64(); +addAtomEnd; + +addAtom(stsd); + size += addWord(0x00000000); // Version+Flags + size += addWord(0x00000001); // Number of entries + SubsessionIOState::atomCreationFunc mediaDataAtomCreator + = fCurrentIOState->fQTMediaDataAtomCreator; + size += (this->*mediaDataAtomCreator)(); +addAtomEnd; + +unsigned QuickTimeFileSink::addAtom_genericMedia() { + int64_t initFilePosn = TellFile64(fOutFid); + + // Our source is assumed to be a "QuickTimeGenericRTPSource" + // Use its "sdAtom" state for our contents: + QuickTimeGenericRTPSource* rtpSource = (QuickTimeGenericRTPSource*) + fCurrentIOState->fOurSubsession.rtpSource(); + QuickTimeGenericRTPSource::QTState& qtState = rtpSource->qtState; + char const* from = qtState.sdAtom; + unsigned size = qtState.sdAtomSize; + for (unsigned i = 0; i < size; ++i) addByte(from[i]); +addAtomEnd; + +unsigned QuickTimeFileSink::addAtom_soundMediaGeneral() { + int64_t initFilePosn = TellFile64(fOutFid); + unsigned size = addAtomHeader(fCurrentIOState->fQTAudioDataType); + +// General sample description fields: + size += addWord(0x00000000); // Reserved + size += addWord(0x00000001); // Reserved+Data reference index +// Sound sample description fields: + unsigned short const version = fCurrentIOState->fQTSoundSampleVersion; + size += addWord(version<<16); // Version+Revision level + size += addWord(0x00000000); // Vendor + unsigned short numChannels + = (unsigned short)(fCurrentIOState->fOurSubsession.numChannels()); + size += addHalfWord(numChannels); // Number of channels + size += addHalfWord(0x0010); // Sample size + // size += addWord(0x00000000); // Compression ID+Packet size + size += addWord(0xfffe0000); // Compression ID+Packet size ##### + + unsigned const sampleRateFixedPoint = fCurrentIOState->fQTTimeScale << 16; + size += addWord(sampleRateFixedPoint); // Sample rate +addAtomEnd; + +unsigned QuickTimeFileSink::addAtom_Qclp() { + // The beginning of this atom looks just like a general Sound Media atom, + // except with a version field of 1: + int64_t initFilePosn = TellFile64(fOutFid); + fCurrentIOState->fQTAudioDataType = "Qclp"; + fCurrentIOState->fQTSoundSampleVersion = 1; + unsigned size = addAtom_soundMediaGeneral(); + + // Next, add the four fields that are particular to version 1: + // (Later, parameterize these #####) + size += addWord(0x000000a0); // samples per packet + size += addWord(0x00000000); // ??? + size += addWord(0x00000000); // ??? + size += addWord(0x00000002); // bytes per sample (uncompressed) + + // Other special fields are in a 'wave' atom that follows: + size += addAtom_wave(); +addAtomEnd; + +addAtom(wave); + size += addAtom_frma(); + if (strcmp(fCurrentIOState->fQTAudioDataType, "Qclp") == 0) { + size += addWord(0x00000014); // ??? + size += add4ByteString("Qclp"); // ??? + if (fCurrentIOState->fQTBytesPerFrame == 35) { + size += addAtom_Fclp(); // full-rate QCELP + } else { + size += addAtom_Hclp(); // half-rate QCELP + } // what about other QCELP 'rates'??? ##### + size += addWord(0x00000008); // ??? + size += addWord(0x00000000); // ??? + size += addWord(0x00000000); // ??? + size += addWord(0x00000008); // ??? + } else if (strcmp(fCurrentIOState->fQTAudioDataType, "mp4a") == 0) { + size += addWord(0x0000000c); // ??? + size += add4ByteString("mp4a"); // ??? + size += addWord(0x00000000); // ??? + size += addAtom_esds(); // ESDescriptor + size += addWord(0x00000008); // ??? + size += addWord(0x00000000); // ??? + } +addAtomEnd; + +addAtom(frma); + size += add4ByteString(fCurrentIOState->fQTAudioDataType); // ??? +addAtomEnd; + +addAtom(Fclp); + size += addWord(0x00000000); // ??? +addAtomEnd; + +addAtom(Hclp); + size += addWord(0x00000000); // ??? +addAtomEnd; + +unsigned QuickTimeFileSink::addAtom_mp4a() { + unsigned size = 0; + // The beginning of this atom looks just like a general Sound Media atom, + // except with a version field of 1: + int64_t initFilePosn = TellFile64(fOutFid); + fCurrentIOState->fQTAudioDataType = "mp4a"; + + if (fGenerateMP4Format) { + fCurrentIOState->fQTSoundSampleVersion = 0; + size = addAtom_soundMediaGeneral(); + size += addAtom_esds(); + } else { + fCurrentIOState->fQTSoundSampleVersion = 1; + size = addAtom_soundMediaGeneral(); + + // Next, add the four fields that are particular to version 1: + // (Later, parameterize these #####) + size += addWord(fCurrentIOState->fQTTimeUnitsPerSample); + size += addWord(0x00000001); // ??? + size += addWord(0x00000001); // ??? + size += addWord(0x00000002); // bytes per sample (uncompressed) + + // Other special fields are in a 'wave' atom that follows: + size += addAtom_wave(); + } +addAtomEnd; + +addAtom(esds); + //##### + MediaSubsession& subsession = fCurrentIOState->fOurSubsession; + if (strcmp(subsession.mediumName(), "audio") == 0) { + // MPEG-4 audio + size += addWord(0x00000000); // ??? + size += addWord(0x03808080); // ??? + size += addWord(0x2a000000); // ??? + size += addWord(0x04808080); // ??? + size += addWord(0x1c401500); // ??? + size += addWord(0x18000000); // ??? + size += addWord(0x6d600000); // ??? + size += addWord(0x6d600580); // ??? + size += addByte(0x80); size += addByte(0x80); // ??? + } else if (strcmp(subsession.mediumName(), "video") == 0) { + // MPEG-4 video + size += addWord(0x00000000); // ??? + size += addWord(0x03330000); // ??? + size += addWord(0x1f042b20); // ??? + size += addWord(0x1104fd46); // ??? + size += addWord(0x000d4e10); // ??? + size += addWord(0x000d4e10); // ??? + size += addByte(0x05); // ??? + } + + // Add the source's 'config' information: + unsigned configSize; + unsigned char* config + = parseGeneralConfigStr(subsession.fmtp_config(), configSize); + size += addByte(configSize); + for (unsigned i = 0; i < configSize; ++i) { + size += addByte(config[i]); + } + delete[] config; + + if (strcmp(subsession.mediumName(), "audio") == 0) { + // MPEG-4 audio + size += addWord(0x06808080); // ??? + size += addHalfWord(0x0102); // ??? + } else { + // MPEG-4 video + size += addHalfWord(0x0601); // ??? + size += addByte(0x02); // ??? + } + //##### +addAtomEnd; + +addAtom(srcq); + //##### + size += addWord(0x00000040); // ??? + //##### +addAtomEnd; + +addAtom(h263); +// General sample description fields: + size += addWord(0x00000000); // Reserved + size += addWord(0x00000001); // Reserved+Data reference index +// Video sample description fields: + size += addWord(0x00020001); // Version+Revision level + size += add4ByteString("appl"); // Vendor + size += addWord(0x00000000); // Temporal quality + size += addWord(0x000002fc); // Spatial quality + unsigned const widthAndHeight = (fMovieWidth<<16)|fMovieHeight; + size += addWord(widthAndHeight); // Width+height + size += addWord(0x00480000); // Horizontal resolution + size += addWord(0x00480000); // Vertical resolution + size += addWord(0x00000000); // Data size + size += addWord(0x00010548); // Frame count+Compressor name (start) + // "H.263" + size += addWord(0x2e323633); // Compressor name (continued) + size += addZeroWords(6); // Compressor name (continued - zero) + size += addWord(0x00000018); // Compressor name (final)+Depth + size += addHalfWord(0xffff); // Color table id +addAtomEnd; + +addAtom(avc1); +// General sample description fields: + size += addWord(0x00000000); // Reserved + size += addWord(0x00000001); // Reserved+Data reference index +// Video sample description fields: + size += addWord(0x00000000); // Version+Revision level + size += add4ByteString("appl"); // Vendor + size += addWord(0x00000000); // Temporal quality + size += addWord(0x00000000); // Spatial quality + unsigned const widthAndHeight = (fMovieWidth<<16)|fMovieHeight; + size += addWord(widthAndHeight); // Width+height + size += addWord(0x00480000); // Horizontal resolution + size += addWord(0x00480000); // Vertical resolution + size += addWord(0x00000000); // Data size + size += addWord(0x00010548); // Frame count+Compressor name (start) + // "H.264" + size += addWord(0x2e323634); // Compressor name (continued) + size += addZeroWords(6); // Compressor name (continued - zero) + size += addWord(0x00000018); // Compressor name (final)+Depth + size += addHalfWord(0xffff); // Color table id + size += addAtom_avcC(); +addAtomEnd; + +addAtom(avcC); +// Begin by Base-64 decoding the "sprop" parameter sets strings: + char* psets = strDup(fCurrentIOState->fOurSubsession.fmtp_spropparametersets()); + if (psets == NULL) return 0; + + size_t comma_pos = strcspn(psets, ","); + psets[comma_pos] = '\0'; + char const* sps_b64 = psets; + char const* pps_b64 = &psets[comma_pos+1]; + unsigned sps_count; + unsigned char* sps_data = base64Decode(sps_b64, sps_count, false); + unsigned pps_count; + unsigned char* pps_data = base64Decode(pps_b64, pps_count, false); + +// Then add the decoded data: + size += addByte(0x01); // configuration version + size += addByte(sps_data[1]); // profile + size += addByte(sps_data[2]); // profile compat + size += addByte(sps_data[3]); // level + size += addByte(0xff); /* 0b11111100 | lengthsize = 0x11 */ + size += addByte(0xe0 | (sps_count > 0 ? 1 : 0) ); + if (sps_count > 0) { + size += addHalfWord(sps_count); + for (unsigned i = 0; i < sps_count; i++) { + size += addByte(sps_data[i]); + } + } + size += addByte(pps_count > 0 ? 1 : 0); + if (pps_count > 0) { + size += addHalfWord(pps_count); + for (unsigned i = 0; i < pps_count; i++) { + size += addByte(pps_data[i]); + } + } + +// Finally, delete the data that we allocated: + delete[] pps_data; delete[] sps_data; + delete[] psets; +addAtomEnd; + +addAtom(mp4v); +// General sample description fields: + size += addWord(0x00000000); // Reserved + size += addWord(0x00000001); // Reserved+Data reference index +// Video sample description fields: + size += addWord(0x00020001); // Version+Revision level + size += add4ByteString("appl"); // Vendor + size += addWord(0x00000200); // Temporal quality + size += addWord(0x00000400); // Spatial quality + unsigned const widthAndHeight = (fMovieWidth<<16)|fMovieHeight; + size += addWord(widthAndHeight); // Width+height + size += addWord(0x00480000); // Horizontal resolution + size += addWord(0x00480000); // Vertical resolution + size += addWord(0x00000000); // Data size + size += addWord(0x00010c4d); // Frame count+Compressor name (start) + // "MPEG-4 Video" + size += addWord(0x5045472d); // Compressor name (continued) + size += addWord(0x34205669); // Compressor name (continued) + size += addWord(0x64656f00); // Compressor name (continued) + size += addZeroWords(4); // Compressor name (continued - zero) + size += addWord(0x00000018); // Compressor name (final)+Depth + size += addHalfWord(0xffff); // Color table id + size += addAtom_esds(); // ESDescriptor + size += addWord(0x00000000); // ??? +addAtomEnd; + +unsigned QuickTimeFileSink::addAtom_rtp() { + int64_t initFilePosn = TellFile64(fOutFid); + unsigned size = addAtomHeader("rtp "); + + size += addWord(0x00000000); // Reserved (1st 4 bytes) + size += addWord(0x00000001); // Reserved (last 2 bytes) + Data ref index + size += addWord(0x00010001); // Hint track version + Last compat htv + size += addWord(1450); // Max packet size + + size += addAtom_tims(); +addAtomEnd; + +addAtom(tims); + size += addWord(fCurrentIOState->fOurSubsession.rtpTimestampFrequency()); +addAtomEnd; + +addAtom(stts); // Time-to-Sample + size += addWord(0x00000000); // Version+flags + + // First, add a dummy "Number of entries" field + // (and remember its position). We'll fill this field in later: + int64_t numEntriesPosition = TellFile64(fOutFid); + size += addWord(0); // dummy for "Number of entries" + + // Then, run through the chunk descriptors, and enter the entries + // in this (compressed) Time-to-Sample table: + unsigned numEntries = 0, numSamplesSoFar = 0; + unsigned prevSampleDuration = 0; + unsigned const samplesPerFrame = fCurrentIOState->fQTSamplesPerFrame; + ChunkDescriptor* chunk = fCurrentIOState->fHeadChunk; + while (chunk != NULL) { + unsigned const sampleDuration = chunk->fFrameDuration/samplesPerFrame; + if (sampleDuration != prevSampleDuration) { + // This chunk will start a new table entry, + // so write out the old one (if any): + if (chunk != fCurrentIOState->fHeadChunk) { + ++numEntries; + size += addWord(numSamplesSoFar); // Sample count + size += addWord(prevSampleDuration); // Sample duration + numSamplesSoFar = 0; + } + } + + unsigned const numSamples = chunk->fNumFrames*samplesPerFrame; + numSamplesSoFar += numSamples; + prevSampleDuration = sampleDuration; + chunk = chunk->fNextChunk; + } + + // Then, write out the last entry: + ++numEntries; + size += addWord(numSamplesSoFar); // Sample count + size += addWord(prevSampleDuration); // Sample duration + + // Now go back and fill in the "Number of entries" field: + setWord(numEntriesPosition, numEntries); +addAtomEnd; + +addAtom(stss); // Sync-Sample + size += addWord(0x00000000); // Version+flags + + // First, add a dummy "Number of entries" field + // (and remember its position). We'll fill this field in later: + int64_t numEntriesPosition = TellFile64(fOutFid); + size += addWord(0); // dummy for "Number of entries" + + unsigned numEntries = 0, numSamplesSoFar = 0; + if (fCurrentIOState->fHeadSyncFrame != NULL) { + SyncFrame* currentSyncFrame = fCurrentIOState->fHeadSyncFrame; + while(currentSyncFrame != NULL) { + ++numEntries; + size += addWord(currentSyncFrame->sfFrameNum); + currentSyncFrame = currentSyncFrame->nextSyncFrame; + } + } else { + // Then, run through the chunk descriptors, counting up the total nuber of samples: + unsigned const samplesPerFrame = fCurrentIOState->fQTSamplesPerFrame; + ChunkDescriptor* chunk = fCurrentIOState->fHeadChunk; + while (chunk != NULL) { + unsigned const numSamples = chunk->fNumFrames*samplesPerFrame; + numSamplesSoFar += numSamples; + chunk = chunk->fNextChunk; + } + + // Then, write out the sample numbers that we deem correspond to 'sync samples': + unsigned i; + for (i = 0; i < numSamplesSoFar; i += 12) { + // For an explanation of the constant "12", see http://lists.live555.com/pipermail/live-devel/2009-July/010969.html + // (Perhaps we should really try to keep track of which 'samples' ('frames' for video) really are 'key frames'?) + size += addWord(i+1); + ++numEntries; + } + + // Then, write out the last entry (if we haven't already done so): + if (i != (numSamplesSoFar - 1)) { + size += addWord(numSamplesSoFar); + ++numEntries; + } + } + + // Now go back and fill in the "Number of entries" field: + setWord(numEntriesPosition, numEntries); +addAtomEnd; + +addAtom(stsc); // Sample-to-Chunk + size += addWord(0x00000000); // Version+flags + + // First, add a dummy "Number of entries" field + // (and remember its position). We'll fill this field in later: + int64_t numEntriesPosition = TellFile64(fOutFid); + size += addWord(0); // dummy for "Number of entries" + + // Then, run through the chunk descriptors, and enter the entries + // in this (compressed) Sample-to-Chunk table: + unsigned numEntries = 0, chunkNumber = 0; + unsigned prevSamplesPerChunk = ~0; + unsigned const samplesPerFrame = fCurrentIOState->fQTSamplesPerFrame; + ChunkDescriptor* chunk = fCurrentIOState->fHeadChunk; + while (chunk != NULL) { + ++chunkNumber; + unsigned const samplesPerChunk = chunk->fNumFrames*samplesPerFrame; + if (samplesPerChunk != prevSamplesPerChunk) { + // This chunk will be a new table entry: + ++numEntries; + size += addWord(chunkNumber); // Chunk number + size += addWord(samplesPerChunk); // Samples per chunk + size += addWord(0x00000001); // Sample description ID + + prevSamplesPerChunk = samplesPerChunk; + } + chunk = chunk->fNextChunk; + } + + // Now go back and fill in the "Number of entries" field: + setWord(numEntriesPosition, numEntries); +addAtomEnd; + +addAtom(stsz); // Sample Size + size += addWord(0x00000000); // Version+flags + + // Begin by checking whether our chunks all have the same + // 'bytes-per-sample'. This determines whether this atom's table + // has just a single entry, or multiple entries. + Boolean haveSingleEntryTable = True; + double firstBPS = 0.0; + ChunkDescriptor* chunk = fCurrentIOState->fHeadChunk; + while (chunk != NULL) { + double bps + = (double)(chunk->fFrameSize)/(fCurrentIOState->fQTSamplesPerFrame); + if (bps < 1.0) { + // I don't think a multiple-entry table would make sense in + // this case, so assume a single entry table ??? ##### + break; + } + + if (firstBPS == 0.0) { + firstBPS = bps; + } else if (bps != firstBPS) { + haveSingleEntryTable = False; + break; + } + + chunk = chunk->fNextChunk; + } + + unsigned sampleSize; + if (haveSingleEntryTable) { + if (fCurrentIOState->isHintTrack() + && fCurrentIOState->fHeadChunk != NULL) { + sampleSize = fCurrentIOState->fHeadChunk->fFrameSize + / fCurrentIOState->fQTSamplesPerFrame; + } else { + // The following doesn't seem right, but seems to do the right thing: + sampleSize = fCurrentIOState->fQTTimeUnitsPerSample; //??? + } + } else { + sampleSize = 0; // indicates a multiple-entry table + } + size += addWord(sampleSize); // Sample size + unsigned const totNumSamples = fCurrentIOState->fQTTotNumSamples; + size += addWord(totNumSamples); // Number of entries + + if (!haveSingleEntryTable) { + // Multiple-entry table: + // Run through the chunk descriptors, entering the sample sizes: + ChunkDescriptor* chunk = fCurrentIOState->fHeadChunk; + while (chunk != NULL) { + unsigned numSamples + = chunk->fNumFrames*(fCurrentIOState->fQTSamplesPerFrame); + unsigned sampleSize + = chunk->fFrameSize/(fCurrentIOState->fQTSamplesPerFrame); + for (unsigned i = 0; i < numSamples; ++i) { + size += addWord(sampleSize); + } + + chunk = chunk->fNextChunk; + } + } +addAtomEnd; + +addAtom(co64); // Chunk Offset + size += addWord(0x00000000); // Version+flags + size += addWord(fCurrentIOState->fNumChunks); // Number of entries + + // Run through the chunk descriptors, entering the file offsets: + ChunkDescriptor* chunk = fCurrentIOState->fHeadChunk; + while (chunk != NULL) { + size += addWord64(chunk->fOffsetInFile); + + chunk = chunk->fNextChunk; + } +addAtomEnd; + +addAtom(udta); + size += addAtom_name(); + size += addAtom_hnti(); + size += addAtom_hinf(); +addAtomEnd; + +addAtom(name); + char description[100]; + sprintf(description, "Hinted %s track", + fCurrentIOState->fOurSubsession.mediumName()); + size += addArbitraryString(description, False); // name of object +addAtomEnd; + +addAtom(hnti); + size += addAtom_sdp(); +addAtomEnd; + +unsigned QuickTimeFileSink::addAtom_sdp() { + int64_t initFilePosn = TellFile64(fOutFid); + unsigned size = addAtomHeader("sdp "); + + // Add this subsession's SDP lines: + char const* sdpLines = fCurrentIOState->fOurSubsession.savedSDPLines(); + // We need to change any "a=control:trackID=" values to be this + // track's actual track id: + char* newSDPLines = new char[strlen(sdpLines)+100/*overkill*/]; + char const* searchStr = "a=control:trackid="; + Boolean foundSearchString = False; + char const *p1, *p2, *p3; + for (p1 = sdpLines; *p1 != '\0'; ++p1) { + for (p2 = p1,p3 = searchStr; tolower(*p2) == *p3; ++p2,++p3) {} + if (*p3 == '\0') { + // We found the end of the search string, at p2. + int beforeTrackNumPosn = p2-sdpLines; + // Look for the subsequent track number, and skip over it: + int trackNumLength; + if (sscanf(p2, " %*d%n", &trackNumLength) < 0) break; + int afterTrackNumPosn = beforeTrackNumPosn + trackNumLength; + + // Replace the old track number with the correct one: + int i; + for (i = 0; i < beforeTrackNumPosn; ++i) newSDPLines[i] = sdpLines[i]; + sprintf(&newSDPLines[i], "%d", fCurrentIOState->fTrackID); + i = afterTrackNumPosn; + int j = i + strlen(&newSDPLines[i]); + while (1) { + if ((newSDPLines[j] = sdpLines[i]) == '\0') break; + ++i; ++j; + } + + foundSearchString = True; + break; + } + } + + if (!foundSearchString) { + // Because we didn't find a "a=control:trackID=" line, + // add one of our own: + sprintf(newSDPLines, "%s%s%d\r\n", + sdpLines, searchStr, fCurrentIOState->fTrackID); + } + + size += addArbitraryString(newSDPLines, False); + delete[] newSDPLines; +addAtomEnd; + +addAtom(hinf); + size += addAtom_totl(); + size += addAtom_npck(); + size += addAtom_tpay(); + size += addAtom_trpy(); + size += addAtom_nump(); + size += addAtom_tpyl(); + // Is 'maxr' required? ##### + size += addAtom_dmed(); + size += addAtom_dimm(); + size += addAtom_drep(); + size += addAtom_tmin(); + size += addAtom_tmax(); + size += addAtom_pmax(); + size += addAtom_dmax(); + size += addAtom_payt(); +addAtomEnd; + +addAtom(totl); + size += addWord(fCurrentIOState->fHINF.trpy.lo); +addAtomEnd; + +addAtom(npck); + size += addWord(fCurrentIOState->fHINF.nump.lo); +addAtomEnd; + +addAtom(tpay); + size += addWord(fCurrentIOState->fHINF.tpyl.lo); +addAtomEnd; + +addAtom(trpy); + size += addWord(fCurrentIOState->fHINF.trpy.hi); + size += addWord(fCurrentIOState->fHINF.trpy.lo); +addAtomEnd; + +addAtom(nump); + size += addWord(fCurrentIOState->fHINF.nump.hi); + size += addWord(fCurrentIOState->fHINF.nump.lo); +addAtomEnd; + +addAtom(tpyl); + size += addWord(fCurrentIOState->fHINF.tpyl.hi); + size += addWord(fCurrentIOState->fHINF.tpyl.lo); +addAtomEnd; + +addAtom(dmed); + size += addWord(fCurrentIOState->fHINF.dmed.hi); + size += addWord(fCurrentIOState->fHINF.dmed.lo); +addAtomEnd; + +addAtom(dimm); + size += addWord(fCurrentIOState->fHINF.dimm.hi); + size += addWord(fCurrentIOState->fHINF.dimm.lo); +addAtomEnd; + +addAtom(drep); + size += addWord(0); + size += addWord(0); +addAtomEnd; + +addAtom(tmin); + size += addWord(0); +addAtomEnd; + +addAtom(tmax); + size += addWord(0); +addAtomEnd; + +addAtom(pmax); + size += addWord(fCurrentIOState->fHINF.pmax); +addAtomEnd; + +addAtom(dmax); + size += addWord(fCurrentIOState->fHINF.dmax); +addAtomEnd; + +addAtom(payt); + MediaSubsession& ourSubsession = fCurrentIOState->fOurSubsession; + RTPSource* rtpSource = ourSubsession.rtpSource(); + size += addWord(rtpSource->rtpPayloadFormat()); + + // Also, add a 'rtpmap' string: / + unsigned rtpmapStringLength = strlen(ourSubsession.codecName()) + 20; + char* rtpmapString = new char[rtpmapStringLength]; + sprintf(rtpmapString, "%s/%d", + ourSubsession.codecName(), rtpSource->timestampFrequency()); + size += addArbitraryString(rtpmapString); + delete[] rtpmapString; +addAtomEnd; + +// A dummy atom (with name "????"): +unsigned QuickTimeFileSink::addAtom_dummy() { + int64_t initFilePosn = TellFile64(fOutFid); + unsigned size = addAtomHeader("????"); +addAtomEnd; diff --git a/AnyCore/lib_rtsp/liveMedia/QuickTimeGenericRTPSource.cpp b/AnyCore/lib_rtsp/liveMedia/QuickTimeGenericRTPSource.cpp new file mode 100644 index 0000000..01a2b42 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/QuickTimeGenericRTPSource.cpp @@ -0,0 +1,274 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP Sources containing generic QuickTime stream data, as defined in +// +// Implementation + +#include "QuickTimeGenericRTPSource.hh" + +///// QTGenericBufferedPacket and QTGenericBufferedPacketFactory ///// + +// A subclass of BufferedPacket, used to separate out +// individual frames (when PCK == 2) + +class QTGenericBufferedPacket: public BufferedPacket { +public: + QTGenericBufferedPacket(QuickTimeGenericRTPSource& ourSource); + virtual ~QTGenericBufferedPacket(); + +private: // redefined virtual functions + virtual unsigned nextEnclosedFrameSize(unsigned char*& framePtr, + unsigned dataSize); +private: + QuickTimeGenericRTPSource& fOurSource; +}; + +class QTGenericBufferedPacketFactory: public BufferedPacketFactory { +private: // redefined virtual functions + virtual BufferedPacket* createNewPacket(MultiFramedRTPSource* ourSource); +}; + + +////////// QuickTimeGenericRTPSource ////////// + +QuickTimeGenericRTPSource* +QuickTimeGenericRTPSource::createNew(UsageEnvironment& env, + Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency, + char const* mimeTypeString) { + return new QuickTimeGenericRTPSource(env, RTPgs, rtpPayloadFormat, + rtpTimestampFrequency, + mimeTypeString); +} + +QuickTimeGenericRTPSource +::QuickTimeGenericRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency, + char const* mimeTypeString) + : MultiFramedRTPSource(env, RTPgs, + rtpPayloadFormat, rtpTimestampFrequency, + new QTGenericBufferedPacketFactory), + fMIMEtypeString(strDup(mimeTypeString)) { + qtState.PCK = 0; + qtState.timescale = 0; + qtState.sdAtom = NULL; + qtState.sdAtomSize = qtState.width = qtState.height = 0; +} + +QuickTimeGenericRTPSource::~QuickTimeGenericRTPSource() { + delete[] qtState.sdAtom; + delete[] (char*)fMIMEtypeString; +} + +Boolean QuickTimeGenericRTPSource +::processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize) { + unsigned char* headerStart = packet->data(); + unsigned packetSize = packet->dataSize(); + + // The "QuickTime Header" must be at least 4 bytes in size: + // Extract the known fields from the first 4 bytes: + unsigned expectedHeaderSize = 4; + if (packetSize < expectedHeaderSize) return False; + + unsigned char VER = (headerStart[0]&0xF0)>>4; + if (VER > 1) return False; // unknown header version + qtState.PCK = (headerStart[0]&0x0C)>>2; +#ifdef DEBUG + Boolean S = (headerStart[0]&0x02) != 0; +#endif + Boolean Q = (headerStart[0]&0x01) != 0; + + Boolean L = (headerStart[1]&0x80) != 0; + +#ifdef DEBUG + Boolean D = (headerStart[2]&0x80) != 0; + unsigned short payloadId = ((headerStart[2]&0x7F)<<8)|headerStart[3]; +#endif + headerStart += 4; + +#ifdef DEBUG + fprintf(stderr, "PCK: %d, S: %d, Q: %d, L: %d, D: %d, payloadId: %d\n", qtState.PCK, S, Q, L, D, payloadId); +#endif + + if (Q) { // A "QuickTime Payload Description" follows + expectedHeaderSize += 4; + if (packetSize < expectedHeaderSize) return False; + +#ifdef DEBUG + Boolean K = (headerStart[0]&0x80) != 0; + Boolean F = (headerStart[0]&0x40) != 0; + Boolean A = (headerStart[0]&0x20) != 0; + Boolean Z = (headerStart[0]&0x10) != 0; +#endif + unsigned payloadDescriptionLength = (headerStart[2]<<8)|headerStart[3]; + headerStart += 4; + +#ifdef DEBUG + fprintf(stderr, "\tK: %d, F: %d, A: %d, Z: %d, payloadDescriptionLength: %d\n", K, F, A, Z, payloadDescriptionLength); +#endif + // Make sure "payloadDescriptionLength" is valid + if (payloadDescriptionLength < 12) return False; + expectedHeaderSize += (payloadDescriptionLength - 4); + unsigned nonPaddedSize = expectedHeaderSize; + expectedHeaderSize += 3; + expectedHeaderSize -= expectedHeaderSize%4; // adds padding + if (packetSize < expectedHeaderSize) return False; + unsigned char padding = expectedHeaderSize - nonPaddedSize; + +#ifdef DEBUG + unsigned mediaType = (headerStart[0]<<24)|(headerStart[1]<<16) + |(headerStart[2]<<8)|headerStart[3]; +#endif + qtState.timescale = (headerStart[4]<<24)|(headerStart[5]<<16) + |(headerStart[6]<<8)|headerStart[7]; + headerStart += 8; + + payloadDescriptionLength -= 12; +#ifdef DEBUG + fprintf(stderr, "\tmediaType: '%c%c%c%c', timescale: %d, %d bytes of TLVs left\n", mediaType>>24, (mediaType&0xFF0000)>>16, (mediaType&0xFF00)>>8, mediaType&0xFF, qtState.timescale, payloadDescriptionLength); +#endif + + while (payloadDescriptionLength > 3) { + unsigned short tlvLength = (headerStart[0]<<8)|headerStart[1]; + unsigned short tlvType = (headerStart[2]<<8)|headerStart[3]; + payloadDescriptionLength -= 4; + if (tlvLength > payloadDescriptionLength) return False; // bad TLV + headerStart += 4; +#ifdef DEBUG + fprintf(stderr, "\t\tTLV '%c%c', length %d, leaving %d remaining bytes\n", tlvType>>8, tlvType&0xFF, tlvLength, payloadDescriptionLength - tlvLength); + for (int i = 0; i < tlvLength; ++i) fprintf(stderr, "%02x:", headerStart[i]); fprintf(stderr, "\n"); +#endif + + // Check for 'TLV's that we can use for our 'qtState' + switch (tlvType) { + case ('s'<<8|'d'): { // session description atom + // Sanity check: the first 4 bytes of this must equal "tlvLength": + unsigned atomLength = (headerStart[0]<<24)|(headerStart[1]<<16) + |(headerStart[2]<<8)|(headerStart[3]); + if (atomLength != (unsigned)tlvLength) break; + + delete[] qtState.sdAtom; qtState.sdAtom = new char[tlvLength]; + memmove(qtState.sdAtom, headerStart, tlvLength); + qtState.sdAtomSize = tlvLength; + break; + } + case ('t'<<8|'w'): { // track width + qtState.width = (headerStart[0]<<8)|headerStart[1]; + break; + } + case ('t'<<8|'h'): { // track height + qtState.height = (headerStart[0]<<8)|headerStart[1]; + break; + } + } + + payloadDescriptionLength -= tlvLength; + headerStart += tlvLength; + } + if (payloadDescriptionLength > 0) return False; // malformed TLV data + headerStart += padding; + } + + if (L) { // Sample-Specific info follows + expectedHeaderSize += 4; + if (packetSize < expectedHeaderSize) return False; + + unsigned ssInfoLength = (headerStart[2]<<8)|headerStart[3]; + headerStart += 4; + +#ifdef DEBUG + fprintf(stderr, "\tssInfoLength: %d\n", ssInfoLength); +#endif + // Make sure "ssInfoLength" is valid + if (ssInfoLength < 4) return False; + expectedHeaderSize += (ssInfoLength - 4); + unsigned nonPaddedSize = expectedHeaderSize; + expectedHeaderSize += 3; + expectedHeaderSize -= expectedHeaderSize%4; // adds padding + if (packetSize < expectedHeaderSize) return False; + unsigned char padding = expectedHeaderSize - nonPaddedSize; + + ssInfoLength -= 4; + while (ssInfoLength > 3) { + unsigned short tlvLength = (headerStart[0]<<8)|headerStart[1]; +#ifdef DEBUG + unsigned short tlvType = (headerStart[2]<<8)|headerStart[3]; +#endif + ssInfoLength -= 4; + if (tlvLength > ssInfoLength) return False; // bad TLV +#ifdef DEBUG + fprintf(stderr, "\t\tTLV '%c%c', length %d, leaving %d remaining bytes\n", tlvType>>8, tlvType&0xFF, tlvLength, ssInfoLength - tlvLength); + for (int i = 0; i < tlvLength; ++i) fprintf(stderr, "%02x:", headerStart[4+i]); fprintf(stderr, "\n"); +#endif + ssInfoLength -= tlvLength; + headerStart += 4 + tlvLength; + } + if (ssInfoLength > 0) return False; // malformed TLV data + headerStart += padding; + } + + fCurrentPacketBeginsFrame = fCurrentPacketCompletesFrame; + // whether the *previous* packet ended a frame + fCurrentPacketCompletesFrame = packet->rtpMarkerBit(); + + resultSpecialHeaderSize = expectedHeaderSize; +#ifdef DEBUG + fprintf(stderr, "Result special header size: %d\n", resultSpecialHeaderSize); +#endif + return True; +} + +char const* QuickTimeGenericRTPSource::MIMEtype() const { + if (fMIMEtypeString == NULL) return MultiFramedRTPSource::MIMEtype(); + + return fMIMEtypeString; +} + + +////////// QTGenericBufferedPacket and QTGenericBufferedPacketFactory impl + +QTGenericBufferedPacket +::QTGenericBufferedPacket(QuickTimeGenericRTPSource& ourSource) + : fOurSource(ourSource) { +} + +QTGenericBufferedPacket::~QTGenericBufferedPacket() { +} + +unsigned QTGenericBufferedPacket:: + nextEnclosedFrameSize(unsigned char*& framePtr, unsigned dataSize) { + // We use the entire packet for a frame, unless "PCK" == 2 + if (fOurSource.qtState.PCK != 2) return dataSize; + + if (dataSize < 8) return 0; // sanity check + + unsigned short sampleLength = (framePtr[2]<<8)|framePtr[3]; + // later, extract and use the "timestamp" field ##### + framePtr += 8; + dataSize -= 8; + + return sampleLength < dataSize ? sampleLength : dataSize; +} + +BufferedPacket* QTGenericBufferedPacketFactory +::createNewPacket(MultiFramedRTPSource* ourSource) { + return new QTGenericBufferedPacket((QuickTimeGenericRTPSource&)(*ourSource)); +} diff --git a/AnyCore/lib_rtsp/liveMedia/RTCP.cpp b/AnyCore/lib_rtsp/liveMedia/RTCP.cpp new file mode 100644 index 0000000..5cb001d --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/RTCP.cpp @@ -0,0 +1,1073 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTCP +// Implementation + +#include "RTCP.hh" +#include "GroupsockHelper.hh" +#include "rtcp_from_spec.h" + +////////// RTCPMemberDatabase ////////// + +class RTCPMemberDatabase { +public: + RTCPMemberDatabase(RTCPInstance& ourRTCPInstance) + : fOurRTCPInstance(ourRTCPInstance), fNumMembers(1 /*ourself*/), + fTable(HashTable::create(ONE_WORD_HASH_KEYS)) { + } + + virtual ~RTCPMemberDatabase() { + delete fTable; + } + + Boolean isMember(unsigned ssrc) const { + return fTable->Lookup((char*)(long)ssrc) != NULL; + } + + Boolean noteMembership(unsigned ssrc, unsigned curTimeCount) { + Boolean isNew = !isMember(ssrc); + + if (isNew) { + ++fNumMembers; + } + + // Record the current time, so we can age stale members + fTable->Add((char*)(long)ssrc, (void*)(long)curTimeCount); + + return isNew; + } + + Boolean remove(unsigned ssrc) { + Boolean wasPresent = fTable->Remove((char*)(long)ssrc); + if (wasPresent) { + --fNumMembers; + } + return wasPresent; + } + + unsigned numMembers() const { + return fNumMembers; + } + + void reapOldMembers(unsigned threshold); + +private: + RTCPInstance& fOurRTCPInstance; + unsigned fNumMembers; + HashTable* fTable; +}; + +void RTCPMemberDatabase::reapOldMembers(unsigned threshold) { + Boolean foundOldMember; + u_int32_t oldSSRC = 0; + + do { + foundOldMember = False; + + HashTable::Iterator* iter + = HashTable::Iterator::create(*fTable); + uintptr_t timeCount; + char const* key; + while ((timeCount = (uintptr_t)(iter->next(key))) != 0) { +#ifdef DEBUG + fprintf(stderr, "reap: checking SSRC 0x%lx: %ld (threshold %d)\n", (unsigned long)key, timeCount, threshold); +#endif + if (timeCount < (uintptr_t)threshold) { // this SSRC is old + uintptr_t ssrc = (uintptr_t)key; + oldSSRC = (u_int32_t)ssrc; + foundOldMember = True; + } + } + delete iter; + + if (foundOldMember) { +#ifdef DEBUG + fprintf(stderr, "reap: removing SSRC 0x%x\n", oldSSRC); +#endif + fOurRTCPInstance.removeSSRC(oldSSRC, True); + } + } while (foundOldMember); +} + + +////////// RTCPInstance ////////// + +static double dTimeNow() { + struct timeval timeNow; + gettimeofday(&timeNow, NULL); + return (double) (timeNow.tv_sec + timeNow.tv_usec/1000000.0); +} + +static unsigned const maxRTCPPacketSize = 1450; + // bytes (1500, minus some allowance for IP, UDP, UMTP headers) +static unsigned const preferredRTCPPacketSize = 1000; // bytes + +RTCPInstance::RTCPInstance(UsageEnvironment& env, Groupsock* RTCPgs, + unsigned totSessionBW, + unsigned char const* cname, + RTPSink* sink, RTPSource* source, + Boolean isSSMSource) + : Medium(env), fRTCPInterface(this, RTCPgs), fTotSessionBW(totSessionBW), + fSink(sink), fSource(source), fIsSSMSource(isSSMSource), + fCNAME(RTCP_SDES_CNAME, cname), fOutgoingReportCount(1), + fAveRTCPSize(0), fIsInitial(1), fPrevNumMembers(0), + fLastSentSize(0), fLastReceivedSize(0), fLastReceivedSSRC(0), + fTypeOfEvent(EVENT_UNKNOWN), fTypeOfPacket(PACKET_UNKNOWN_TYPE), + fHaveJustSentPacket(False), fLastPacketSentSize(0), + fByeHandlerTask(NULL), fByeHandlerClientData(NULL), + fSRHandlerTask(NULL), fSRHandlerClientData(NULL), + fRRHandlerTask(NULL), fRRHandlerClientData(NULL), + fSpecificRRHandlerTable(NULL) { +#ifdef DEBUG + fprintf(stderr, "RTCPInstance[%p]::RTCPInstance()\n", this); +#endif + if (fTotSessionBW == 0) { // not allowed! + env << "RTCPInstance::RTCPInstance error: totSessionBW parameter should not be zero!\n"; + fTotSessionBW = 1; + } + + if (isSSMSource) RTCPgs->multicastSendOnly(); // don't receive multicast + + double timeNow = dTimeNow(); + fPrevReportTime = fNextReportTime = timeNow; + + fKnownMembers = new RTCPMemberDatabase(*this); + fInBuf = new unsigned char[maxRTCPPacketSize]; + if (fKnownMembers == NULL || fInBuf == NULL) return; + fNumBytesAlreadyRead = 0; + + fOutBuf = new OutPacketBuffer(preferredRTCPPacketSize, maxRTCPPacketSize, maxRTCPPacketSize); + if (fOutBuf == NULL) return; + + if (fSource != NULL && fSource->RTPgs() == RTCPgs) { + // We're receiving RTCP reports that are multiplexed with RTP, so ask the RTP source + // to give them to us: + fSource->registerForMultiplexedRTCPPackets(this); + } else { + // Arrange to handle incoming reports from the network: + TaskScheduler::BackgroundHandlerProc* handler + = (TaskScheduler::BackgroundHandlerProc*)&incomingReportHandler; + fRTCPInterface.startNetworkReading(handler); + } + + // Send our first report. + fTypeOfEvent = EVENT_REPORT; + onExpire(this); +} + +struct RRHandlerRecord { + TaskFunc* rrHandlerTask; + void* rrHandlerClientData; +}; + +RTCPInstance::~RTCPInstance() { +#ifdef DEBUG + fprintf(stderr, "RTCPInstance[%p]::~RTCPInstance()\n", this); +#endif + if (fSource != NULL) fSource->deregisterForMultiplexedRTCPPackets(); + + // Begin by sending a BYE. We have to do this immediately, without + // 'reconsideration', because "this" is going away. + fTypeOfEvent = EVENT_BYE; // not used, but... + sendBYE(); + + if (fSpecificRRHandlerTable != NULL) { + AddressPortLookupTable::Iterator iter(*fSpecificRRHandlerTable); + RRHandlerRecord* rrHandler; + while ((rrHandler = (RRHandlerRecord*)iter.next()) != NULL) { + delete rrHandler; + } + delete fSpecificRRHandlerTable; + } + + delete fKnownMembers; + delete fOutBuf; + delete[] fInBuf; +} + +RTCPInstance* RTCPInstance::createNew(UsageEnvironment& env, Groupsock* RTCPgs, + unsigned totSessionBW, + unsigned char const* cname, + RTPSink* sink, RTPSource* source, + Boolean isSSMSource) { + return new RTCPInstance(env, RTCPgs, totSessionBW, cname, sink, source, + isSSMSource); +} + +Boolean RTCPInstance::lookupByName(UsageEnvironment& env, + char const* instanceName, + RTCPInstance*& resultInstance) { + resultInstance = NULL; // unless we succeed + + Medium* medium; + if (!Medium::lookupByName(env, instanceName, medium)) return False; + + if (!medium->isRTCPInstance()) { + env.setResultMsg(instanceName, " is not a RTCP instance"); + return False; + } + + resultInstance = (RTCPInstance*)medium; + return True; +} + +Boolean RTCPInstance::isRTCPInstance() const { + return True; +} + +unsigned RTCPInstance::numMembers() const { + if (fKnownMembers == NULL) return 0; + + return fKnownMembers->numMembers(); +} + +void RTCPInstance::setByeHandler(TaskFunc* handlerTask, void* clientData, + Boolean handleActiveParticipantsOnly) { + fByeHandlerTask = handlerTask; + fByeHandlerClientData = clientData; + fByeHandleActiveParticipantsOnly = handleActiveParticipantsOnly; +} + +void RTCPInstance::setSRHandler(TaskFunc* handlerTask, void* clientData) { + fSRHandlerTask = handlerTask; + fSRHandlerClientData = clientData; +} + +void RTCPInstance::setRRHandler(TaskFunc* handlerTask, void* clientData) { + fRRHandlerTask = handlerTask; + fRRHandlerClientData = clientData; +} + +void RTCPInstance +::setSpecificRRHandler(netAddressBits fromAddress, Port fromPort, + TaskFunc* handlerTask, void* clientData) { + if (handlerTask == NULL && clientData == NULL) { + unsetSpecificRRHandler(fromAddress, fromPort); + return; + } + + RRHandlerRecord* rrHandler = new RRHandlerRecord; + rrHandler->rrHandlerTask = handlerTask; + rrHandler->rrHandlerClientData = clientData; + if (fSpecificRRHandlerTable == NULL) { + fSpecificRRHandlerTable = new AddressPortLookupTable; + } + RRHandlerRecord* existingRecord = (RRHandlerRecord*)fSpecificRRHandlerTable->Add(fromAddress, (~0), fromPort, rrHandler); + delete existingRecord; // if any + +} + +void RTCPInstance +::unsetSpecificRRHandler(netAddressBits fromAddress, Port fromPort) { + if (fSpecificRRHandlerTable == NULL) return; + + RRHandlerRecord* rrHandler + = (RRHandlerRecord*)(fSpecificRRHandlerTable->Lookup(fromAddress, (~0), fromPort)); + if (rrHandler != NULL) { + fSpecificRRHandlerTable->Remove(fromAddress, (~0), fromPort); + delete rrHandler; + } +} + +void RTCPInstance::setStreamSocket(int sockNum, + unsigned char streamChannelId) { + // Turn off background read handling: + fRTCPInterface.stopNetworkReading(); + + // Switch to RTCP-over-TCP: + fRTCPInterface.setStreamSocket(sockNum, streamChannelId); + + // Turn background reading back on: + TaskScheduler::BackgroundHandlerProc* handler + = (TaskScheduler::BackgroundHandlerProc*)&incomingReportHandler; + fRTCPInterface.startNetworkReading(handler); +} + +void RTCPInstance::addStreamSocket(int sockNum, + unsigned char streamChannelId) { + // First, turn off background read handling for the default (UDP) socket: + envir().taskScheduler().turnOffBackgroundReadHandling(fRTCPInterface.gs()->socketNum()); + + // Add the RTCP-over-TCP interface: + fRTCPInterface.addStreamSocket(sockNum, streamChannelId); + + // Turn on background reading for this socket (in case it's not on already): + TaskScheduler::BackgroundHandlerProc* handler + = (TaskScheduler::BackgroundHandlerProc*)&incomingReportHandler; + fRTCPInterface.startNetworkReading(handler); +} + +void RTCPInstance +::injectReport(u_int8_t const* packet, unsigned packetSize, struct sockaddr_in const& fromAddress) { + if (packetSize > maxRTCPPacketSize) packetSize = maxRTCPPacketSize; + memmove(fInBuf, packet, packetSize); + + processIncomingReport(packetSize, fromAddress, -1, 0xFF); // assume report received over UDP +} + +static unsigned const IP_UDP_HDR_SIZE = 28; + // overhead (bytes) of IP and UDP hdrs + +#define ADVANCE(n) pkt += (n); packetSize -= (n) + +void RTCPInstance::incomingReportHandler(RTCPInstance* instance, + int /*mask*/) { + instance->incomingReportHandler1(); +} + +void RTCPInstance::incomingReportHandler1() { + do { + if (fNumBytesAlreadyRead >= maxRTCPPacketSize) { + envir() << "RTCPInstance error: Hit limit when reading incoming packet over TCP. Increase \"maxRTCPPacketSize\"\n"; + break; + } + + unsigned numBytesRead; + struct sockaddr_in fromAddress; + int tcpSocketNum; + unsigned char tcpStreamChannelId; + Boolean packetReadWasIncomplete; + Boolean readResult + = fRTCPInterface.handleRead(&fInBuf[fNumBytesAlreadyRead], maxRTCPPacketSize - fNumBytesAlreadyRead, + numBytesRead, fromAddress, + tcpSocketNum, tcpStreamChannelId, + packetReadWasIncomplete); + + unsigned packetSize = 0; + if (packetReadWasIncomplete) { + fNumBytesAlreadyRead += numBytesRead; + return; // more reads are needed to get the entire packet + } else { // normal case: We've read the entire packet + packetSize = fNumBytesAlreadyRead + numBytesRead; + fNumBytesAlreadyRead = 0; // for next time + } + if (!readResult) break; + + // Ignore the packet if it was looped-back from ourself: + Boolean packetWasFromOurHost = False; + if (RTCPgs()->wasLoopedBackFromUs(envir(), fromAddress)) { + packetWasFromOurHost = True; + // However, we still want to handle incoming RTCP packets from + // *other processes* on the same machine. To distinguish this + // case from a true loop-back, check whether we've just sent a + // packet of the same size. (This check isn't perfect, but it seems + // to be the best we can do.) + if (fHaveJustSentPacket && fLastPacketSentSize == packetSize) { + // This is a true loop-back: + fHaveJustSentPacket = False; + break; // ignore this packet + } + } + + if (fIsSSMSource && !packetWasFromOurHost) { + // This packet is assumed to have been received via unicast (because we're a SSM source, + // and SSM receivers send back RTCP "RR" packets via unicast). + // 'Reflect' the packet by resending it to the multicast group, so that any other receivers + // can also get to see it. + + // NOTE: Denial-of-service attacks are possible here. + // Users of this software may wish to add their own, + // application-specific mechanism for 'authenticating' the + // validity of this packet before reflecting it. + + // NOTE: The test for "!packetWasFromOurHost" means that we won't reflect RTCP packets + // that come from other processes on the same host as us. The reason for this is that the + // 'packet size' test above is not 100% reliable; some packets that were truly looped back + // from us might not be detected as such, and this might lead to infinite + // forwarding/receiving of some packets. To avoid this possibility, we reflect only + // RTCP packets that we know for sure originated elsewhere. + // (Note, though, that if we ever re-enable the code in "Groupsock::multicastSendOnly()", + // then we could remove the test for "!packetWasFromOurHost".) + fRTCPInterface.sendPacket(fInBuf, packetSize); + fHaveJustSentPacket = True; + fLastPacketSentSize = packetSize; + } + + processIncomingReport(packetSize, fromAddress, tcpSocketNum, tcpStreamChannelId); + } while (0); +} + +void RTCPInstance +::processIncomingReport(unsigned packetSize, struct sockaddr_in const& fromAddress, + int tcpSocketNum, unsigned char tcpStreamChannelId) { + do { + Boolean callByeHandler = False; + unsigned char* pkt = fInBuf; + +#ifdef DEBUG + fprintf(stderr, "[%p]saw incoming RTCP packet (from ", this); + if (tcpSocketNum < 0) { + // Note that "fromAddress" is valid only if we're receiving over UDP (not over TCP): + fprintf(stderr, "address %s, port %d", AddressString(fromAddress).val(), ntohs(fromAddress.sin_port)); + } else { + fprintf(stderr, "TCP socket #%d, stream channel id %d", tcpSocketNum, tcpStreamChannelId); + } + fprintf(stderr, ")\n"); + for (unsigned i = 0; i < packetSize; ++i) { + if (i%4 == 0) fprintf(stderr, " "); + fprintf(stderr, "%02x", pkt[i]); + } + fprintf(stderr, "\n"); +#endif + int totPacketSize = IP_UDP_HDR_SIZE + packetSize; + + // Check the RTCP packet for validity: + // It must at least contain a header (4 bytes), and this header + // must be version=2, with no padding bit, and a payload type of + // SR (200) or RR (201): + if (packetSize < 4) break; + unsigned rtcpHdr = ntohl(*(u_int32_t*)pkt); + if ((rtcpHdr & 0xE0FE0000) != (0x80000000 | (RTCP_PT_SR<<16))) { +#ifdef DEBUG + fprintf(stderr, "rejected bad RTCP packet: header 0x%08x\n", rtcpHdr); +#endif + break; + } + + // Process each of the individual RTCP 'subpackets' in (what may be) + // a compound RTCP packet. + int typeOfPacket = PACKET_UNKNOWN_TYPE; + unsigned reportSenderSSRC = 0; + Boolean packetOK = False; + while (1) { + unsigned rc = (rtcpHdr>>24)&0x1F; + unsigned pt = (rtcpHdr>>16)&0xFF; + unsigned length = 4*(rtcpHdr&0xFFFF); // doesn't count hdr + ADVANCE(4); // skip over the header + if (length > packetSize) break; + + // Assume that each RTCP subpacket begins with a 4-byte SSRC: + if (length < 4) break; length -= 4; + reportSenderSSRC = ntohl(*(u_int32_t*)pkt); ADVANCE(4); + + Boolean subPacketOK = False; + switch (pt) { + case RTCP_PT_SR: { +#ifdef DEBUG + fprintf(stderr, "SR\n"); +#endif + if (length < 20) break; length -= 20; + + // Extract the NTP timestamp, and note this: + unsigned NTPmsw = ntohl(*(u_int32_t*)pkt); ADVANCE(4); + unsigned NTPlsw = ntohl(*(u_int32_t*)pkt); ADVANCE(4); + unsigned rtpTimestamp = ntohl(*(u_int32_t*)pkt); ADVANCE(4); + if (fSource != NULL) { + RTPReceptionStatsDB& receptionStats + = fSource->receptionStatsDB(); + receptionStats.noteIncomingSR(reportSenderSSRC, + NTPmsw, NTPlsw, rtpTimestamp); + } + ADVANCE(8); // skip over packet count, octet count + + // If a 'SR handler' was set, call it now: + if (fSRHandlerTask != NULL) (*fSRHandlerTask)(fSRHandlerClientData); + + // The rest of the SR is handled like a RR (so, no "break;" here) + } + case RTCP_PT_RR: { +#ifdef DEBUG + fprintf(stderr, "RR\n"); +#endif + unsigned reportBlocksSize = rc*(6*4); + if (length < reportBlocksSize) break; + length -= reportBlocksSize; + + if (fSink != NULL) { + // Use this information to update stats about our transmissions: + RTPTransmissionStatsDB& transmissionStats = fSink->transmissionStatsDB(); + for (unsigned i = 0; i < rc; ++i) { + unsigned senderSSRC = ntohl(*(u_int32_t*)pkt); ADVANCE(4); + // We care only about reports about our own transmission, not others' + if (senderSSRC == fSink->SSRC()) { + unsigned lossStats = ntohl(*(u_int32_t*)pkt); ADVANCE(4); + unsigned highestReceived = ntohl(*(u_int32_t*)pkt); ADVANCE(4); + unsigned jitter = ntohl(*(u_int32_t*)pkt); ADVANCE(4); + unsigned timeLastSR = ntohl(*(u_int32_t*)pkt); ADVANCE(4); + unsigned timeSinceLastSR = ntohl(*(u_int32_t*)pkt); ADVANCE(4); + transmissionStats.noteIncomingRR(reportSenderSSRC, fromAddress, + lossStats, + highestReceived, jitter, + timeLastSR, timeSinceLastSR); + } else { + ADVANCE(4*5); + } + } + } else { + ADVANCE(reportBlocksSize); + } + + if (pt == RTCP_PT_RR) { // i.e., we didn't fall through from 'SR' + // If a 'RR handler' was set, call it now: + + // Specific RR handler: + if (fSpecificRRHandlerTable != NULL) { + netAddressBits fromAddr; + portNumBits fromPortNum; + if (tcpSocketNum < 0) { + // Normal case: We read the RTCP packet over UDP + fromAddr = fromAddress.sin_addr.s_addr; + fromPortNum = ntohs(fromAddress.sin_port); + } else { + // Special case: We read the RTCP packet over TCP (interleaved) + // Hack: Use the TCP socket and channel id to look up the handler + fromAddr = tcpSocketNum; + fromPortNum = tcpStreamChannelId; + } + Port fromPort(fromPortNum); + RRHandlerRecord* rrHandler + = (RRHandlerRecord*)(fSpecificRRHandlerTable->Lookup(fromAddr, (~0), fromPort)); + if (rrHandler != NULL) { + if (rrHandler->rrHandlerTask != NULL) { + (*(rrHandler->rrHandlerTask))(rrHandler->rrHandlerClientData); + } + } + } + + // General RR handler: + if (fRRHandlerTask != NULL) (*fRRHandlerTask)(fRRHandlerClientData); + } + + subPacketOK = True; + typeOfPacket = PACKET_RTCP_REPORT; + break; + } + case RTCP_PT_BYE: { +#ifdef DEBUG + fprintf(stderr, "BYE\n"); +#endif + // If a 'BYE handler' was set, arrange for it to be called at the end of this routine. + // (Note: We don't call it immediately, in case it happens to cause "this" to be deleted.) + if (fByeHandlerTask != NULL + && (!fByeHandleActiveParticipantsOnly + || (fSource != NULL + && fSource->receptionStatsDB().lookup(reportSenderSSRC) != NULL) + || (fSink != NULL + && fSink->transmissionStatsDB().lookup(reportSenderSSRC) != NULL))) { + callByeHandler = True; + } + + // We should really check for & handle >1 SSRCs being present ##### + + subPacketOK = True; + typeOfPacket = PACKET_BYE; + break; + } + // Later handle SDES, APP, and compound RTCP packets ##### + default: +#ifdef DEBUG + fprintf(stderr, "UNSUPPORTED TYPE(0x%x)\n", pt); +#endif + subPacketOK = True; + break; + } + if (!subPacketOK) break; + + // need to check for (& handle) SSRC collision! ##### + +#ifdef DEBUG + fprintf(stderr, "validated RTCP subpacket (type %d): %d, %d, %d, 0x%08x\n", typeOfPacket, rc, pt, length, reportSenderSSRC); +#endif + + // Skip over any remaining bytes in this subpacket: + ADVANCE(length); + + // Check whether another RTCP 'subpacket' follows: + if (packetSize == 0) { + packetOK = True; + break; + } else if (packetSize < 4) { +#ifdef DEBUG + fprintf(stderr, "extraneous %d bytes at end of RTCP packet!\n", packetSize); +#endif + break; + } + rtcpHdr = ntohl(*(u_int32_t*)pkt); + if ((rtcpHdr & 0xC0000000) != 0x80000000) { +#ifdef DEBUG + fprintf(stderr, "bad RTCP subpacket: header 0x%08x\n", rtcpHdr); +#endif + break; + } + } + + if (!packetOK) { +#ifdef DEBUG + fprintf(stderr, "rejected bad RTCP subpacket: header 0x%08x\n", rtcpHdr); +#endif + break; + } else { +#ifdef DEBUG + fprintf(stderr, "validated entire RTCP packet\n"); +#endif + } + + onReceive(typeOfPacket, totPacketSize, reportSenderSSRC); + + // Finally, if we need to call a "BYE" handler, do so now (in case it causes "this" to get deleted): + if (callByeHandler && fByeHandlerTask != NULL/*sanity check*/) { + TaskFunc* byeHandler = fByeHandlerTask; + fByeHandlerTask = NULL; // because we call the handler only once, by default + (*byeHandler)(fByeHandlerClientData); + } + } while (0); +} + +void RTCPInstance::onReceive(int typeOfPacket, int totPacketSize, + unsigned ssrc) { + fTypeOfPacket = typeOfPacket; + fLastReceivedSize = totPacketSize; + fLastReceivedSSRC = ssrc; + + int members = (int)numMembers(); + int senders = (fSink != NULL) ? 1 : 0; + + OnReceive(this, // p + this, // e + &members, // members + &fPrevNumMembers, // pmembers + &senders, // senders + &fAveRTCPSize, // avg_rtcp_size + &fPrevReportTime, // tp + dTimeNow(), // tc + fNextReportTime); +} + +void RTCPInstance::sendReport() { +#ifdef DEBUG + fprintf(stderr, "sending REPORT\n"); +#endif + // Begin by including a SR and/or RR report: + if (!addReport()) return; + + // Then, include a SDES: + addSDES(); + + // Send the report: + sendBuiltPacket(); + + // Periodically clean out old members from our SSRC membership database: + const unsigned membershipReapPeriod = 5; + if ((++fOutgoingReportCount) % membershipReapPeriod == 0) { + unsigned threshold = fOutgoingReportCount - membershipReapPeriod; + fKnownMembers->reapOldMembers(threshold); + } +} + +void RTCPInstance::sendBYE() { +#ifdef DEBUG + fprintf(stderr, "sending BYE\n"); +#endif + // The packet must begin with a SR and/or RR report: + (void)addReport(True); + + addBYE(); + sendBuiltPacket(); +} + +void RTCPInstance::sendBuiltPacket() { +#ifdef DEBUG + fprintf(stderr, "sending RTCP packet\n"); + unsigned char* p = fOutBuf->packet(); + for (unsigned i = 0; i < fOutBuf->curPacketSize(); ++i) { + if (i%4 == 0) fprintf(stderr," "); + fprintf(stderr, "%02x", p[i]); + } + fprintf(stderr, "\n"); +#endif + unsigned reportSize = fOutBuf->curPacketSize(); + fRTCPInterface.sendPacket(fOutBuf->packet(), reportSize); + fOutBuf->resetOffset(); + + fLastSentSize = IP_UDP_HDR_SIZE + reportSize; + fHaveJustSentPacket = True; + fLastPacketSentSize = reportSize; +} + +int RTCPInstance::checkNewSSRC() { + return fKnownMembers->noteMembership(fLastReceivedSSRC, + fOutgoingReportCount); +} + +void RTCPInstance::removeLastReceivedSSRC() { + removeSSRC(fLastReceivedSSRC, False/*keep stats around*/); +} + +void RTCPInstance::removeSSRC(u_int32_t ssrc, Boolean alsoRemoveStats) { + fKnownMembers->remove(ssrc); + + if (alsoRemoveStats) { + // Also, remove records of this SSRC from any reception or transmission stats + if (fSource != NULL) fSource->receptionStatsDB().removeRecord(ssrc); + if (fSink != NULL) fSink->transmissionStatsDB().removeRecord(ssrc); + } +} + +void RTCPInstance::onExpire(RTCPInstance* instance) { + instance->onExpire1(); +} + +// Member functions to build specific kinds of report: + +Boolean RTCPInstance::addReport(Boolean alwaysAdd) { + // Include a SR or a RR, depending on whether we have an associated sink or source: + if (fSink != NULL) { + if (!alwaysAdd) { + if (!fSink->enableRTCPReports()) return False; + + // Hack: Don't send a SR during those (brief) times when the timestamp of the + // next outgoing RTP packet has been preset, to ensure that that timestamp gets + // used for that outgoing packet. (David Bertrand, 2006.07.18) + if (fSink->nextTimestampHasBeenPreset()) return False; + } + + addSR(); + } else if (fSource != NULL) { + if (!alwaysAdd) { + if (!fSource->enableRTCPReports()) return False; + } + + addRR(); + } + + return True; +} + +void RTCPInstance::addSR() { + // ASSERT: fSink != NULL + + enqueueCommonReportPrefix(RTCP_PT_SR, fSink->SSRC(), + 5 /* extra words in a SR */); + + // Now, add the 'sender info' for our sink + + // Insert the NTP and RTP timestamps for the 'wallclock time': + struct timeval timeNow; + gettimeofday(&timeNow, NULL); + fOutBuf->enqueueWord(timeNow.tv_sec + 0x83AA7E80); + // NTP timestamp most-significant word (1970 epoch -> 1900 epoch) + double fractionalPart = (timeNow.tv_usec/15625.0)*0x04000000; // 2^32/10^6 + fOutBuf->enqueueWord((unsigned)(fractionalPart+0.5)); + // NTP timestamp least-significant word + unsigned rtpTimestamp = fSink->convertToRTPTimestamp(timeNow); + fOutBuf->enqueueWord(rtpTimestamp); // RTP ts + + // Insert the packet and byte counts: + fOutBuf->enqueueWord(fSink->packetCount()); + fOutBuf->enqueueWord(fSink->octetCount()); + + enqueueCommonReportSuffix(); +} + +void RTCPInstance::addRR() { + // ASSERT: fSource != NULL + + enqueueCommonReportPrefix(RTCP_PT_RR, fSource->SSRC()); + enqueueCommonReportSuffix(); +} + +void RTCPInstance::enqueueCommonReportPrefix(unsigned char packetType, + unsigned SSRC, + unsigned numExtraWords) { + unsigned numReportingSources; + if (fSource == NULL) { + numReportingSources = 0; // we don't receive anything + } else { + RTPReceptionStatsDB& allReceptionStats + = fSource->receptionStatsDB(); + numReportingSources = allReceptionStats.numActiveSourcesSinceLastReset(); + // This must be <32, to fit in 5 bits: + if (numReportingSources >= 32) { numReportingSources = 32; } + // Later: support adding more reports to handle >32 sources (unlikely)##### + } + + unsigned rtcpHdr = 0x80000000; // version 2, no padding + rtcpHdr |= (numReportingSources<<24); + rtcpHdr |= (packetType<<16); + rtcpHdr |= (1 + numExtraWords + 6*numReportingSources); + // each report block is 6 32-bit words long + fOutBuf->enqueueWord(rtcpHdr); + + fOutBuf->enqueueWord(SSRC); +} + +void RTCPInstance::enqueueCommonReportSuffix() { + // Output the report blocks for each source: + if (fSource != NULL) { + RTPReceptionStatsDB& allReceptionStats + = fSource->receptionStatsDB(); + + RTPReceptionStatsDB::Iterator iterator(allReceptionStats); + while (1) { + RTPReceptionStats* receptionStats = iterator.next(); + if (receptionStats == NULL) break; + enqueueReportBlock(receptionStats); + } + + allReceptionStats.reset(); // because we have just generated a report + } +} + +void +RTCPInstance::enqueueReportBlock(RTPReceptionStats* stats) { + fOutBuf->enqueueWord(stats->SSRC()); + + unsigned highestExtSeqNumReceived = stats->highestExtSeqNumReceived(); + + unsigned totNumExpected + = highestExtSeqNumReceived - stats->baseExtSeqNumReceived(); + int totNumLost = totNumExpected - stats->totNumPacketsReceived(); + // 'Clamp' this loss number to a 24-bit signed value: + if (totNumLost > 0x007FFFFF) { + totNumLost = 0x007FFFFF; + } else if (totNumLost < 0) { + if (totNumLost < -0x00800000) totNumLost = 0x00800000; // unlikely, but... + totNumLost &= 0x00FFFFFF; + } + + unsigned numExpectedSinceLastReset + = highestExtSeqNumReceived - stats->lastResetExtSeqNumReceived(); + int numLostSinceLastReset + = numExpectedSinceLastReset - stats->numPacketsReceivedSinceLastReset(); + unsigned char lossFraction; + if (numExpectedSinceLastReset == 0 || numLostSinceLastReset < 0) { + lossFraction = 0; + } else { + lossFraction = (unsigned char) + ((numLostSinceLastReset << 8) / numExpectedSinceLastReset); + } + + fOutBuf->enqueueWord((lossFraction<<24) | totNumLost); + fOutBuf->enqueueWord(highestExtSeqNumReceived); + + fOutBuf->enqueueWord(stats->jitter()); + + unsigned NTPmsw = stats->lastReceivedSR_NTPmsw(); + unsigned NTPlsw = stats->lastReceivedSR_NTPlsw(); + unsigned LSR = ((NTPmsw&0xFFFF)<<16)|(NTPlsw>>16); // middle 32 bits + fOutBuf->enqueueWord(LSR); + + // Figure out how long has elapsed since the last SR rcvd from this src: + struct timeval const& LSRtime = stats->lastReceivedSR_time(); // "last SR" + struct timeval timeNow, timeSinceLSR; + gettimeofday(&timeNow, NULL); + if (timeNow.tv_usec < LSRtime.tv_usec) { + timeNow.tv_usec += 1000000; + timeNow.tv_sec -= 1; + } + timeSinceLSR.tv_sec = timeNow.tv_sec - LSRtime.tv_sec; + timeSinceLSR.tv_usec = timeNow.tv_usec - LSRtime.tv_usec; + // The enqueued time is in units of 1/65536 seconds. + // (Note that 65536/1000000 == 1024/15625) + unsigned DLSR; + if (LSR == 0) { + DLSR = 0; + } else { + DLSR = (timeSinceLSR.tv_sec<<16) + | ( (((timeSinceLSR.tv_usec<<11)+15625)/31250) & 0xFFFF); + } + fOutBuf->enqueueWord(DLSR); +} + +void RTCPInstance::addSDES() { + // For now we support only the CNAME item; later support more ##### + + // Begin by figuring out the size of the entire SDES report: + unsigned numBytes = 4; + // counts the SSRC, but not the header; it'll get subtracted out + numBytes += fCNAME.totalSize(); // includes id and length + numBytes += 1; // the special END item + + unsigned num4ByteWords = (numBytes + 3)/4; + + unsigned rtcpHdr = 0x81000000; // version 2, no padding, 1 SSRC chunk + rtcpHdr |= (RTCP_PT_SDES<<16); + rtcpHdr |= num4ByteWords; + fOutBuf->enqueueWord(rtcpHdr); + + if (fSource != NULL) { + fOutBuf->enqueueWord(fSource->SSRC()); + } else if (fSink != NULL) { + fOutBuf->enqueueWord(fSink->SSRC()); + } + + // Add the CNAME: + fOutBuf->enqueue(fCNAME.data(), fCNAME.totalSize()); + + // Add the 'END' item (i.e., a zero byte), plus any more needed to pad: + unsigned numPaddingBytesNeeded = 4 - (fOutBuf->curPacketSize() % 4); + unsigned char const zero = '\0'; + while (numPaddingBytesNeeded-- > 0) fOutBuf->enqueue(&zero, 1); +} + +void RTCPInstance::addBYE() { + unsigned rtcpHdr = 0x81000000; // version 2, no padding, 1 SSRC + rtcpHdr |= (RTCP_PT_BYE<<16); + rtcpHdr |= 1; // 2 32-bit words total (i.e., with 1 SSRC) + fOutBuf->enqueueWord(rtcpHdr); + + if (fSource != NULL) { + fOutBuf->enqueueWord(fSource->SSRC()); + } else if (fSink != NULL) { + fOutBuf->enqueueWord(fSink->SSRC()); + } +} + +void RTCPInstance::schedule(double nextTime) { + fNextReportTime = nextTime; + + double secondsToDelay = nextTime - dTimeNow(); + if (secondsToDelay < 0) secondsToDelay = 0; +#ifdef DEBUG + fprintf(stderr, "schedule(%f->%f)\n", secondsToDelay, nextTime); +#endif + int64_t usToGo = (int64_t)(secondsToDelay * 1000000); + nextTask() = envir().taskScheduler().scheduleDelayedTask(usToGo, + (TaskFunc*)RTCPInstance::onExpire, this); +} + +void RTCPInstance::reschedule(double nextTime) { + envir().taskScheduler().unscheduleDelayedTask(nextTask()); + schedule(nextTime); +} + +void RTCPInstance::onExpire1() { + // Note: fTotSessionBW is kbits per second + double rtcpBW = 0.05*fTotSessionBW*1024/8; // -> bytes per second + + OnExpire(this, // event + numMembers(), // members + (fSink != NULL) ? 1 : 0, // senders + rtcpBW, // rtcp_bw + (fSink != NULL) ? 1 : 0, // we_sent + &fAveRTCPSize, // ave_rtcp_size + &fIsInitial, // initial + dTimeNow(), // tc + &fPrevReportTime, // tp + &fPrevNumMembers // pmembers + ); +} + +////////// SDESItem ////////// + +SDESItem::SDESItem(unsigned char tag, unsigned char const* value) { + unsigned length = strlen((char const*)value); + if (length > 0xFF) length = 0xFF; // maximum data length for a SDES item + + fData[0] = tag; + fData[1] = (unsigned char)length; + memmove(&fData[2], value, length); +} + +unsigned SDESItem::totalSize() const { + return 2 + (unsigned)fData[1]; +} + + +////////// Implementation of routines imported by the "rtcp_from_spec" C code + +extern "C" void Schedule(double nextTime, event e) { + RTCPInstance* instance = (RTCPInstance*)e; + if (instance == NULL) return; + + instance->schedule(nextTime); +} + +extern "C" void Reschedule(double nextTime, event e) { + RTCPInstance* instance = (RTCPInstance*)e; + if (instance == NULL) return; + + instance->reschedule(nextTime); +} + +extern "C" void SendRTCPReport(event e) { + RTCPInstance* instance = (RTCPInstance*)e; + if (instance == NULL) return; + + instance->sendReport(); +} + +extern "C" void SendBYEPacket(event e) { + RTCPInstance* instance = (RTCPInstance*)e; + if (instance == NULL) return; + + instance->sendBYE(); +} + +extern "C" int TypeOfEvent(event e) { + RTCPInstance* instance = (RTCPInstance*)e; + if (instance == NULL) return EVENT_UNKNOWN; + + return instance->typeOfEvent(); +} + +extern "C" int SentPacketSize(event e) { + RTCPInstance* instance = (RTCPInstance*)e; + if (instance == NULL) return 0; + + return instance->sentPacketSize(); +} + +extern "C" int PacketType(packet p) { + RTCPInstance* instance = (RTCPInstance*)p; + if (instance == NULL) return PACKET_UNKNOWN_TYPE; + + return instance->packetType(); +} + +extern "C" int ReceivedPacketSize(packet p) { + RTCPInstance* instance = (RTCPInstance*)p; + if (instance == NULL) return 0; + + return instance->receivedPacketSize(); +} + +extern "C" int NewMember(packet p) { + RTCPInstance* instance = (RTCPInstance*)p; + if (instance == NULL) return 0; + + return instance->checkNewSSRC(); +} + +extern "C" int NewSender(packet /*p*/) { + return 0; // we don't yet recognize senders other than ourselves ##### +} + +extern "C" void AddMember(packet /*p*/) { + // Do nothing; all of the real work was done when NewMember() was called +} + +extern "C" void AddSender(packet /*p*/) { + // we don't yet recognize senders other than ourselves ##### +} + +extern "C" void RemoveMember(packet p) { + RTCPInstance* instance = (RTCPInstance*)p; + if (instance == NULL) return; + + instance->removeLastReceivedSSRC(); +} + +extern "C" void RemoveSender(packet /*p*/) { + // we don't yet recognize senders other than ourselves ##### +} + +extern "C" double drand30() { + unsigned tmp = our_random()&0x3FFFFFFF; // a random 30-bit integer + return tmp/(double)(1024*1024*1024); +} diff --git a/AnyCore/lib_rtsp/liveMedia/include/AC3AudioFileServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/include/AC3AudioFileServerMediaSubsession.hh new file mode 100644 index 0000000..ecb0b0e --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/AC3AudioFileServerMediaSubsession.hh @@ -0,0 +1,48 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from an AC3 audio file. +// C++ header + +#ifndef _AC3_AUDIO_FILE_SERVER_MEDIA_SUBSESSION_HH +#define _AC3_AUDIO_FILE_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _FILE_SERVER_MEDIA_SUBSESSION_HH +#include "FileServerMediaSubsession.hh" +#endif + +class AC3AudioFileServerMediaSubsession: public FileServerMediaSubsession{ +public: + static AC3AudioFileServerMediaSubsession* + createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource); + +private: + AC3AudioFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, Boolean reuseFirstSource); + // called only by createNew(); + virtual ~AC3AudioFileServerMediaSubsession(); + +private: // redefined virtual functions + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate); + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource); +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/AC3AudioRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/AC3AudioRTPSink.hh new file mode 100644 index 0000000..ba2ba5a --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/AC3AudioRTPSink.hh @@ -0,0 +1,57 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for AC3 audio +// C++ header + +#ifndef _AC3_AUDIO_RTP_SINK_HH +#define _AC3_AUDIO_RTP_SINK_HH + +#ifndef _AUDIO_RTP_SINK_HH +#include "AudioRTPSink.hh" +#endif + +class AC3AudioRTPSink: public AudioRTPSink { +public: + static AC3AudioRTPSink* createNew(UsageEnvironment& env, + Groupsock* RTPgs, + u_int8_t rtpPayloadFormat, + u_int32_t rtpTimestampFrequency); + +protected: + AC3AudioRTPSink(UsageEnvironment& env, Groupsock* RTPgs, + u_int8_t rtpPayloadFormat, + u_int32_t rtpTimestampFrequency); + // called only by createNew() + + virtual ~AC3AudioRTPSink(); + +private: // redefined virtual functions: + virtual Boolean frameCanAppearAfterPacketStart(unsigned char const* frameStart, + unsigned numBytesInFrame) const; + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + virtual unsigned specialHeaderSize() const; + +private: + unsigned char fTotNumFragmentsUsed; // used only if a frame gets fragmented across multiple packets +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/AC3AudioRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/AC3AudioRTPSource.hh new file mode 100644 index 0000000..4525094 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/AC3AudioRTPSource.hh @@ -0,0 +1,51 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// AC3 Audio RTP Sources +// C++ header + +#ifndef _AC3_AUDIO_RTP_SOURCE_HH +#define _AC3_AUDIO_RTP_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +class AC3AudioRTPSource: public MultiFramedRTPSource { +public: + static AC3AudioRTPSource* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + +protected: + virtual ~AC3AudioRTPSource(); + +private: + AC3AudioRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + // called only by createNew() + +private: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual char const* MIMEtype() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/AC3AudioStreamFramer.hh b/AnyCore/lib_rtsp/liveMedia/include/AC3AudioStreamFramer.hh new file mode 100644 index 0000000..9b87797 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/AC3AudioStreamFramer.hh @@ -0,0 +1,70 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up an AC3 audio elementary stream into frames +// C++ header + +#ifndef _AC3_AUDIO_STREAM_FRAMER_HH +#define _AC3_AUDIO_STREAM_FRAMER_HH + +#ifndef _FRAMED_FILTER_HH +#include "FramedFilter.hh" +#endif + +class AC3AudioStreamFramer: public FramedFilter { +public: + static AC3AudioStreamFramer* + createNew(UsageEnvironment& env, FramedSource* inputSource, + unsigned char streamCode = 0); + // If "streamCode" != 0, then we assume that there's a 1-byte code at the beginning of each chunk of data that we read from + // our source. If that code is not the value we want, we discard the chunk of data. + // However, if "streamCode" == 0 (the default), then we don't expect this 1-byte code. + + unsigned samplingRate(); + + void flushInput(); // called if there is a discontinuity (seeking) in the input + +private: + AC3AudioStreamFramer(UsageEnvironment& env, FramedSource* inputSource, + unsigned char streamCode); + // called only by createNew() + virtual ~AC3AudioStreamFramer(); + + static void handleNewData(void* clientData, + unsigned char* ptr, unsigned size, + struct timeval presentationTime); + void handleNewData(unsigned char* ptr, unsigned size); + + void parseNextFrame(); + +private: + // redefined virtual functions: + virtual void doGetNextFrame(); + +private: + struct timeval currentFramePlayTime() const; + +private: + struct timeval fNextFramePresentationTime; + +private: // parsing state + class AC3AudioStreamParser* fParser; + unsigned char fOurStreamCode; + friend class AC3AudioStreamParser; // hack +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/ADTSAudioFileServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/include/ADTSAudioFileServerMediaSubsession.hh new file mode 100644 index 0000000..68f136d --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/ADTSAudioFileServerMediaSubsession.hh @@ -0,0 +1,48 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from an AAC audio file in ADTS format +// C++ header + +#ifndef _ADTS_AUDIO_FILE_SERVER_MEDIA_SUBSESSION_HH +#define _ADTS_AUDIO_FILE_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _FILE_SERVER_MEDIA_SUBSESSION_HH +#include "FileServerMediaSubsession.hh" +#endif + +class ADTSAudioFileServerMediaSubsession: public FileServerMediaSubsession{ +public: + static ADTSAudioFileServerMediaSubsession* + createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource); + +protected: + ADTSAudioFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, Boolean reuseFirstSource); + // called only by createNew(); + virtual ~ADTSAudioFileServerMediaSubsession(); + +protected: // redefined virtual functions + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate); + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource); +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/ADTSAudioFileSource.hh b/AnyCore/lib_rtsp/liveMedia/include/ADTSAudioFileSource.hh new file mode 100644 index 0000000..c49876a --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/ADTSAudioFileSource.hh @@ -0,0 +1,56 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A source object for AAC audio files in ADTS format +// C++ header + +#ifndef _ADTS_AUDIO_FILE_SOURCE_HH +#define _ADTS_AUDIO_FILE_SOURCE_HH + +#ifndef _FRAMED_FILE_SOURCE_HH +#include "FramedFileSource.hh" +#endif + +class ADTSAudioFileSource: public FramedFileSource { +public: + static ADTSAudioFileSource* createNew(UsageEnvironment& env, + char const* fileName); + + unsigned samplingFrequency() const { return fSamplingFrequency; } + unsigned numChannels() const { return fNumChannels; } + char const* configStr() const { return fConfigStr; } + // returns the 'AudioSpecificConfig' for this stream (in ASCII form) + +private: + ADTSAudioFileSource(UsageEnvironment& env, FILE* fid, u_int8_t profile, + u_int8_t samplingFrequencyIndex, u_int8_t channelConfiguration); + // called only by createNew() + + virtual ~ADTSAudioFileSource(); + +private: + // redefined virtual functions: + virtual void doGetNextFrame(); + +private: + unsigned fSamplingFrequency; + unsigned fNumChannels; + unsigned fuSecsPerFrame; + char fConfigStr[5]; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/AMRAudioFileServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/include/AMRAudioFileServerMediaSubsession.hh new file mode 100644 index 0000000..9a6fc5d --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/AMRAudioFileServerMediaSubsession.hh @@ -0,0 +1,48 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from an AMR audio file. +// C++ header + +#ifndef _AMR_AUDIO_FILE_SERVER_MEDIA_SUBSESSION_HH +#define _AMR_AUDIO_FILE_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _FILE_SERVER_MEDIA_SUBSESSION_HH +#include "FileServerMediaSubsession.hh" +#endif + +class AMRAudioFileServerMediaSubsession: public FileServerMediaSubsession{ +public: + static AMRAudioFileServerMediaSubsession* + createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource); + +private: + AMRAudioFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, Boolean reuseFirstSource); + // called only by createNew(); + virtual ~AMRAudioFileServerMediaSubsession(); + +private: // redefined virtual functions + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate); + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource); +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/AMRAudioFileSink.hh b/AnyCore/lib_rtsp/liveMedia/include/AMRAudioFileSink.hh new file mode 100644 index 0000000..6d24417 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/AMRAudioFileSink.hh @@ -0,0 +1,51 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// AMR Audio File Sinks +// C++ header + +#ifndef _AMR_AUDIO_FILE_SINK_HH +#define _AMR_AUDIO_FILE_SINK_HH + +#ifndef _FILE_SINK_HH +#include "FileSink.hh" +#endif + +class AMRAudioFileSink: public FileSink { +public: + static AMRAudioFileSink* createNew(UsageEnvironment& env, char const* fileName, + unsigned bufferSize = 10000, + Boolean oneFilePerFrame = False); + // (See "FileSink.hh" for a description of these parameters.) + +protected: + AMRAudioFileSink(UsageEnvironment& env, FILE* fid, unsigned bufferSize, + char const* perFrameFileNamePrefix); + // called only by createNew() + virtual ~AMRAudioFileSink(); + +protected: // redefined virtual functions: + virtual Boolean sourceIsCompatibleWithUs(MediaSource& source); + virtual void afterGettingFrame(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime); + +protected: + Boolean fHaveWrittenHeader; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/AMRAudioFileSource.hh b/AnyCore/lib_rtsp/liveMedia/include/AMRAudioFileSource.hh new file mode 100644 index 0000000..468ace2 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/AMRAudioFileSource.hh @@ -0,0 +1,48 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A source object for AMR audio files (as defined in RFC 4867, section 5) +// C++ header + +#ifndef _AMR_AUDIO_FILE_SOURCE_HH +#define _AMR_AUDIO_FILE_SOURCE_HH + +#ifndef _AMR_AUDIO_SOURCE_HH +#include "AMRAudioSource.hh" +#endif + +class AMRAudioFileSource: public AMRAudioSource { +public: + static AMRAudioFileSource* createNew(UsageEnvironment& env, + char const* fileName); + +private: + AMRAudioFileSource(UsageEnvironment& env, FILE* fid, + Boolean isWideband, unsigned numChannels); + // called only by createNew() + + virtual ~AMRAudioFileSource(); + +private: + // redefined virtual functions: + virtual void doGetNextFrame(); + +private: + FILE* fFid; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/AMRAudioRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/AMRAudioRTPSink.hh new file mode 100644 index 0000000..82ea9b3 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/AMRAudioRTPSink.hh @@ -0,0 +1,65 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for AMR audio (RFC 4867) +// C++ header + +#ifndef _AMR_AUDIO_RTP_SINK_HH +#define _AMR_AUDIO_RTP_SINK_HH + +#ifndef _AUDIO_RTP_SINK_HH +#include "AudioRTPSink.hh" +#endif + +class AMRAudioRTPSink: public AudioRTPSink { +public: + static AMRAudioRTPSink* createNew(UsageEnvironment& env, + Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + Boolean sourceIsWideband = False, + unsigned numChannelsInSource = 1); + + Boolean sourceIsWideband() const { return fSourceIsWideband; } + +protected: + AMRAudioRTPSink(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + Boolean sourceIsWideband, unsigned numChannelsInSource); + // called only by createNew() + + virtual ~AMRAudioRTPSink(); + +private: // redefined virtual functions: + virtual Boolean sourceIsCompatibleWithUs(MediaSource& source); + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + virtual Boolean + frameCanAppearAfterPacketStart(unsigned char const* frameStart, + unsigned numBytesInFrame) const; + + virtual unsigned specialHeaderSize() const; + virtual char const* auxSDPLine(); + +private: + Boolean fSourceIsWideband; + char* fFmtpSDPLine; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/AMRAudioRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/AMRAudioRTPSource.hh new file mode 100644 index 0000000..dd3868e --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/AMRAudioRTPSource.hh @@ -0,0 +1,53 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// AMR Audio RTP Sources (RFC 4867) +// C++ header + +#ifndef _AMR_AUDIO_RTP_SOURCE_HH +#define _AMR_AUDIO_RTP_SOURCE_HH + +#ifndef _RTP_SOURCE_HH +#include "RTPSource.hh" +#endif +#ifndef _AMR_AUDIO_SOURCE_HH +#include "AMRAudioSource.hh" +#endif + +class AMRAudioRTPSource { +public: + static AMRAudioSource* createNew(UsageEnvironment& env, + Groupsock* RTPgs, + RTPSource*& resultRTPSource, + unsigned char rtpPayloadFormat, + Boolean isWideband = False, + unsigned numChannels = 1, + Boolean isOctetAligned = True, + unsigned interleaving = 0, + // relevant only if "isOctetAligned" + // The maximum # of frame-blocks in a group + // 0 means: no interleaving + Boolean robustSortingOrder = False, + // relevant only if "isOctetAligned" + Boolean CRCsArePresent = False + // relevant only if "isOctetAligned" + ); + // This returns a source to read from, but "resultRTPSource" will + // point to RTP-related state. +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/AMRAudioSource.hh b/AnyCore/lib_rtsp/liveMedia/include/AMRAudioSource.hh new file mode 100644 index 0000000..f7d7f87 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/AMRAudioSource.hh @@ -0,0 +1,52 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A source object for AMR audio sources +// C++ header + +#ifndef _AMR_AUDIO_SOURCE_HH +#define _AMR_AUDIO_SOURCE_HH + +#ifndef _FRAMED_SOURCE_HH +#include "FramedSource.hh" +#endif + +class AMRAudioSource: public FramedSource { +public: + Boolean isWideband() const { return fIsWideband; } + unsigned numChannels() const { return fNumChannels; } + + u_int8_t lastFrameHeader() const { return fLastFrameHeader; } + // The frame header for the most recently read frame (RFC 4867, sec. 5.3) + +protected: + AMRAudioSource(UsageEnvironment& env, Boolean isWideband, unsigned numChannels); + // virtual base class + virtual ~AMRAudioSource(); + +private: + // redefined virtual functions: + virtual char const* MIMEtype() const; + virtual Boolean isAMRAudioSource() const; + +protected: + Boolean fIsWideband; + unsigned fNumChannels; + u_int8_t fLastFrameHeader; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/AVIFileSink.hh b/AnyCore/lib_rtsp/liveMedia/include/AVIFileSink.hh new file mode 100644 index 0000000..5c319c9 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/AVIFileSink.hh @@ -0,0 +1,115 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A sink that generates an AVI file from a composite media session +// C++ header + +#ifndef _AVI_FILE_SINK_HH +#define _AVI_FILE_SINK_HH + +#ifndef _MEDIA_SESSION_HH +#include "MediaSession.hh" +#endif + +class AVIFileSink: public Medium { +public: + static AVIFileSink* createNew(UsageEnvironment& env, + MediaSession& inputSession, + char const* outputFileName, + unsigned bufferSize = 20000, + unsigned short movieWidth = 240, + unsigned short movieHeight = 180, + unsigned movieFPS = 15, + Boolean packetLossCompensate = False); + + typedef void (afterPlayingFunc)(void* clientData); + Boolean startPlaying(afterPlayingFunc* afterFunc, + void* afterClientData); + + unsigned numActiveSubsessions() const { return fNumSubsessions; } + +private: + AVIFileSink(UsageEnvironment& env, MediaSession& inputSession, + char const* outputFileName, unsigned bufferSize, + unsigned short movieWidth, unsigned short movieHeight, + unsigned movieFPS, Boolean packetLossCompensate); + // called only by createNew() + virtual ~AVIFileSink(); + + Boolean continuePlaying(); + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + static void onSourceClosure(void* clientData); + void onSourceClosure1(); + static void onRTCPBye(void* clientData); + void addIndexRecord(class AVIIndexRecord* newIndexRecord); + void completeOutputFile(); + +private: + friend class AVISubsessionIOState; + MediaSession& fInputSession; + FILE* fOutFid; + class AVIIndexRecord *fIndexRecordsHead, *fIndexRecordsTail; + unsigned fNumIndexRecords; + unsigned fBufferSize; + Boolean fPacketLossCompensate; + Boolean fAreCurrentlyBeingPlayed; + afterPlayingFunc* fAfterFunc; + void* fAfterClientData; + unsigned fNumSubsessions; + unsigned fNumBytesWritten; + struct timeval fStartTime; + Boolean fHaveCompletedOutputFile; + +private: + ///// Definitions specific to the AVI file format: + + unsigned addWord(unsigned word); // outputs "word" in little-endian order + unsigned addHalfWord(unsigned short halfWord); + unsigned addByte(unsigned char byte) { + putc(byte, fOutFid); + return 1; + } + unsigned addZeroWords(unsigned numWords); + unsigned add4ByteString(char const* str); + void setWord(unsigned filePosn, unsigned size); + + // Define member functions for outputting various types of file header: +#define _header(name) unsigned addFileHeader_##name() + _header(AVI); + _header(hdrl); + _header(avih); + _header(strl); + _header(strh); + _header(strf); + _header(JUNK); +// _header(JUNK); + _header(movi); +private: + unsigned short fMovieWidth, fMovieHeight; + unsigned fMovieFPS; + unsigned fRIFFSizePosition, fRIFFSizeValue; + unsigned fAVIHMaxBytesPerSecondPosition; + unsigned fAVIHFrameCountPosition; + unsigned fMoviSizePosition, fMoviSizeValue; + class AVISubsessionIOState* fCurrentIOState; + unsigned fJunkNumber; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/AudioInputDevice.hh b/AnyCore/lib_rtsp/liveMedia/include/AudioInputDevice.hh new file mode 100644 index 0000000..e0a1a3f --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/AudioInputDevice.hh @@ -0,0 +1,71 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Generic audio input device (such as a microphone, or an input sound card) +// C++ header + +#ifndef _AUDIO_INPUT_DEVICE_HH +#define _AUDIO_INPUT_DEVICE_HH + +#ifndef _FRAMED_SOURCE_HH +#include "FramedSource.hh" +#endif + +class AudioPortNames { +public: + AudioPortNames(); + virtual ~AudioPortNames(); + + unsigned numPorts; + char** portName; +}; + +class AudioInputDevice: public FramedSource { +public: + unsigned char bitsPerSample() const { return fBitsPerSample; } + unsigned char numChannels() const { return fNumChannels; } + unsigned samplingFrequency() const { return fSamplingFrequency; } + + virtual Boolean setInputPort(int portIndex) = 0; + virtual double getAverageLevel() const = 0; + + static AudioInputDevice* + createNew(UsageEnvironment& env, int inputPortNumber, + unsigned char bitsPerSample, unsigned char numChannels, + unsigned samplingFrequency, unsigned granularityInMS = 20); + static AudioPortNames* getPortNames(); + + static char** allowedDeviceNames; + // If this is set to non-NULL, then it's a NULL-terminated array of strings + // of device names that we are allowed to access. + +protected: + AudioInputDevice(UsageEnvironment& env, + unsigned char bitsPerSample, + unsigned char numChannels, + unsigned samplingFrequency, + unsigned granularityInMS); + // we're an abstract base class + + virtual ~AudioInputDevice(); + +protected: + unsigned char fBitsPerSample, fNumChannels; + unsigned fSamplingFrequency; + unsigned fGranularityInMS; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/AudioRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/AudioRTPSink.hh new file mode 100644 index 0000000..f00f865 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/AudioRTPSink.hh @@ -0,0 +1,42 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A generic RTP sink for audio codecs (abstract base class) +// C++ header + +#ifndef _AUDIO_RTP_SINK_HH +#define _AUDIO_RTP_SINK_HH + +#ifndef _MULTI_FRAMED_RTP_SINK_HH +#include "MultiFramedRTPSink.hh" +#endif + +class AudioRTPSink: public MultiFramedRTPSink { +protected: + AudioRTPSink(UsageEnvironment& env, + Groupsock* rtpgs, unsigned char rtpPayloadType, + unsigned rtpTimestampFrequency, + char const* rtpPayloadFormatName, + unsigned numChannels = 1); + // (we're an abstract base class) + virtual ~AudioRTPSink(); + +private: // redefined virtual functions: + virtual char const* sdpMediaType() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/Base64.hh b/AnyCore/lib_rtsp/liveMedia/include/Base64.hh new file mode 100644 index 0000000..feda65d --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/Base64.hh @@ -0,0 +1,43 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Base64 encoding and decoding +// C++ header + +#ifndef _BASE64_HH +#define _BASE64_HH + +#ifndef _BOOLEAN_HH +#include "Boolean.hh" +#endif + +unsigned char* base64Decode(char const* in, unsigned& resultSize, + Boolean trimTrailingZeros = True); + // returns a newly allocated array - of size "resultSize" - that + // the caller is responsible for delete[]ing. + +unsigned char* base64Decode(char const* in, unsigned inSize, + unsigned& resultSize, + Boolean trimTrailingZeros = True); + // As above, but includes the size of the input string (i.e., the number of bytes to decode) as a parameter. + // This saves an extra call to "strlen()" if we already know the length of the input string. + +char* base64Encode(char const* orig, unsigned origLength); + // returns a 0-terminated string that + // the caller is responsible for delete[]ing. + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/BasicUDPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/BasicUDPSink.hh new file mode 100644 index 0000000..14f6e60 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/BasicUDPSink.hh @@ -0,0 +1,62 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A simple UDP sink (i.e., without RTP or other headers added); one frame per packet +// C++ header + +#ifndef _BASIC_UDP_SINK_HH +#define _BASIC_UDP_SINK_HH + +#ifndef _MEDIA_SINK_HH +#include "MediaSink.hh" +#endif +#ifndef _GROUPSOCK_HH +#include +#endif + +class BasicUDPSink: public MediaSink { +public: + static BasicUDPSink* createNew(UsageEnvironment& env, Groupsock* gs, + unsigned maxPayloadSize = 1450); +protected: + BasicUDPSink(UsageEnvironment& env, Groupsock* gs, unsigned maxPayloadSize); + // called only by createNew() + virtual ~BasicUDPSink(); + +private: // redefined virtual functions: + virtual Boolean continuePlaying(); + +private: + void continuePlaying1(); + + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize, unsigned numTruncatedBytes, + unsigned durationInMicroseconds); + + static void sendNext(void* firstArg); + +private: + Groupsock* fGS; + unsigned fMaxPayloadSize; + unsigned char* fOutputBuffer; + struct timeval fNextSendTime; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/BasicUDPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/BasicUDPSource.hh new file mode 100644 index 0000000..f823122 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/BasicUDPSource.hh @@ -0,0 +1,55 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A simple UDP source, where every UDP payload is a complete frame +// C++ header + +#ifndef _BASIC_UDP_SOURCE_HH +#define _BASIC_UDP_SOURCE_HH + +#ifndef _FRAMED_SOURCE_HH +#include "FramedSource.hh" +#endif +#ifndef _GROUPSOCK_HH +#include "Groupsock.hh" +#endif + +class BasicUDPSource: public FramedSource { +public: + static BasicUDPSource* createNew(UsageEnvironment& env, Groupsock* inputGS); + + virtual ~BasicUDPSource(); + + Groupsock* gs() const { return fInputGS; } + +private: + BasicUDPSource(UsageEnvironment& env, Groupsock* inputGS); + // called only by createNew() + + static void incomingPacketHandler(BasicUDPSource* source, int mask); + void incomingPacketHandler1(); + +private: // redefined virtual functions: + virtual void doGetNextFrame(); + virtual void doStopGettingFrames(); + +private: + Groupsock* fInputGS; + Boolean fHaveStartedReading; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/BitVector.hh b/AnyCore/lib_rtsp/liveMedia/include/BitVector.hh new file mode 100644 index 0000000..477c3e4 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/BitVector.hh @@ -0,0 +1,66 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Bit Vector data structure +// C++ header + +#ifndef _BIT_VECTOR_HH +#define _BIT_VECTOR_HH + +#ifndef _BOOLEAN_HH +#include "Boolean.hh" +#endif + +class BitVector { +public: + BitVector(unsigned char* baseBytePtr, + unsigned baseBitOffset, + unsigned totNumBits); + + void setup(unsigned char* baseBytePtr, + unsigned baseBitOffset, + unsigned totNumBits); + + void putBits(unsigned from, unsigned numBits); // "numBits" <= 32 + void put1Bit(unsigned bit); + + unsigned getBits(unsigned numBits); // "numBits" <= 32 + unsigned get1Bit(); + Boolean get1BitBoolean() { return get1Bit() != 0; } + + void skipBits(unsigned numBits); + + unsigned curBitIndex() const { return fCurBitIndex; } + unsigned totNumBits() const { return fTotNumBits; } + unsigned numBitsRemaining() const { return fTotNumBits - fCurBitIndex; } + + unsigned get_expGolomb(); + // Returns the value of the next bits, assuming that they were encoded using an exponential-Golomb code of order 0 + +private: + unsigned char* fBaseBytePtr; + unsigned fBaseBitOffset; + unsigned fTotNumBits; + unsigned fCurBitIndex; +}; + +// A general bit copy operation: +void shiftBits(unsigned char* toBasePtr, unsigned toBitOffset, + unsigned char const* fromBasePtr, unsigned fromBitOffset, + unsigned numBits); + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/ByteStreamFileSource.hh b/AnyCore/lib_rtsp/liveMedia/include/ByteStreamFileSource.hh new file mode 100644 index 0000000..eb93a1e --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/ByteStreamFileSource.hh @@ -0,0 +1,82 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A file source that is a plain byte stream (rather than frames) +// C++ header + +#ifndef _BYTE_STREAM_FILE_SOURCE_HH +#define _BYTE_STREAM_FILE_SOURCE_HH + +#ifndef _FRAMED_FILE_SOURCE_HH +#include "FramedFileSource.hh" +#endif + +class ByteStreamFileSource: public FramedFileSource { +public: + static ByteStreamFileSource* createNew(UsageEnvironment& env, + char const* fileName, + unsigned preferredFrameSize = 0, + unsigned playTimePerFrame = 0); + // "preferredFrameSize" == 0 means 'no preference' + // "playTimePerFrame" is in microseconds + + static ByteStreamFileSource* createNew(UsageEnvironment& env, + FILE* fid, + unsigned preferredFrameSize = 0, + unsigned playTimePerFrame = 0); + // an alternative version of "createNew()" that's used if you already have + // an open file. + + u_int64_t fileSize() const { return fFileSize; } + // 0 means zero-length, unbounded, or unknown + + void seekToByteAbsolute(u_int64_t byteNumber, u_int64_t numBytesToStream = 0); + // if "numBytesToStream" is >0, then we limit the stream to that number of bytes, before treating it as EOF + void seekToByteRelative(int64_t offset, u_int64_t numBytesToStream = 0); + void seekToEnd(); // to force EOF handling on the next read + +protected: + ByteStreamFileSource(UsageEnvironment& env, + FILE* fid, + unsigned preferredFrameSize, + unsigned playTimePerFrame); + // called only by createNew() + + virtual ~ByteStreamFileSource(); + + static void fileReadableHandler(ByteStreamFileSource* source, int mask); + void doReadFromFile(); + +private: + // redefined virtual functions: + virtual void doGetNextFrame(); + virtual void doStopGettingFrames(); + +protected: + u_int64_t fFileSize; + +private: + unsigned fPreferredFrameSize; + unsigned fPlayTimePerFrame; + Boolean fFidIsSeekable; + unsigned fLastPlayTime; + Boolean fHaveStartedReading; + Boolean fLimitNumBytesToStream; + u_int64_t fNumBytesToStream; // used iff "fLimitNumBytesToStream" is True +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/ByteStreamMemoryBufferSource.hh b/AnyCore/lib_rtsp/liveMedia/include/ByteStreamMemoryBufferSource.hh new file mode 100644 index 0000000..c70b24e --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/ByteStreamMemoryBufferSource.hh @@ -0,0 +1,70 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A class for streaming data from a (static) memory buffer, as if it were a file. +// C++ header + +#ifndef _BYTE_STREAM_MEMORY_BUFFER_SOURCE_HH +#define _BYTE_STREAM_MEMORY_BUFFER_SOURCE_HH + +#ifndef _FRAMED_SOURCE_HH +#include "FramedSource.hh" +#endif + +class ByteStreamMemoryBufferSource: public FramedSource { +public: + static ByteStreamMemoryBufferSource* createNew(UsageEnvironment& env, + u_int8_t* buffer, u_int64_t bufferSize, + Boolean deleteBufferOnClose = True, + unsigned preferredFrameSize = 0, + unsigned playTimePerFrame = 0); + // "preferredFrameSize" == 0 means 'no preference' + // "playTimePerFrame" is in microseconds + + u_int64_t bufferSize() const { return fBufferSize; } + + void seekToByteAbsolute(u_int64_t byteNumber, u_int64_t numBytesToStream = 0); + // if "numBytesToStream" is >0, then we limit the stream to that number of bytes, before treating it as EOF + void seekToByteRelative(int64_t offset, u_int64_t numBytesToStream = 0); + +protected: + ByteStreamMemoryBufferSource(UsageEnvironment& env, + u_int8_t* buffer, u_int64_t bufferSize, + Boolean deleteBufferOnClose, + unsigned preferredFrameSize, + unsigned playTimePerFrame); + // called only by createNew() + + virtual ~ByteStreamMemoryBufferSource(); + +private: + // redefined virtual functions: + virtual void doGetNextFrame(); + +private: + u_int8_t* fBuffer; + u_int64_t fBufferSize; + u_int64_t fCurIndex; + Boolean fDeleteBufferOnClose; + unsigned fPreferredFrameSize; + unsigned fPlayTimePerFrame; + unsigned fLastPlayTime; + Boolean fLimitNumBytesToStream; + u_int64_t fNumBytesToStream; // used iff "fLimitNumBytesToStream" is True +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/ByteStreamMultiFileSource.hh b/AnyCore/lib_rtsp/liveMedia/include/ByteStreamMultiFileSource.hh new file mode 100644 index 0000000..00bf85d --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/ByteStreamMultiFileSource.hh @@ -0,0 +1,67 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A source that consists of multiple byte-stream files, read sequentially +// C++ header + +#ifndef _BYTE_STREAM_MULTI_FILE_SOURCE_HH +#define _BYTE_STREAM_MULTI_FILE_SOURCE_HH + +#ifndef _BYTE_STREAM_FILE_SOURCE_HH +#include "ByteStreamFileSource.hh" +#endif + +class ByteStreamMultiFileSource: public FramedSource { +public: + static ByteStreamMultiFileSource* + createNew(UsageEnvironment& env, char const** fileNameArray, + unsigned preferredFrameSize = 0, unsigned playTimePerFrame = 0); + // A 'filename' of NULL indicates the end of the array + + Boolean haveStartedNewFile() const { return fHaveStartedNewFile; } + // True iff the most recently delivered frame was the first from a newly-opened file + +protected: + ByteStreamMultiFileSource(UsageEnvironment& env, char const** fileNameArray, + unsigned preferredFrameSize, unsigned playTimePerFrame); + // called only by createNew() + + virtual ~ByteStreamMultiFileSource(); + +private: + // redefined virtual functions: + virtual void doGetNextFrame(); + +private: + static void onSourceClosure(void* clientData); + void onSourceClosure1(); + static void afterGettingFrame(void* clientData, + unsigned frameSize, unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + +private: + unsigned fPreferredFrameSize; + unsigned fPlayTimePerFrame; + unsigned fNumSources; + unsigned fCurrentlyReadSourceNumber; + Boolean fHaveStartedNewFile; + char const** fFileNameArray; + ByteStreamFileSource** fSourceArray; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/DVVideoFileServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/include/DVVideoFileServerMediaSubsession.hh new file mode 100644 index 0000000..9ed02b5 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/DVVideoFileServerMediaSubsession.hh @@ -0,0 +1,51 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a DV video file. +// C++ header + +#ifndef _DV_VIDEO_FILE_SERVER_MEDIA_SUBSESSION_HH +#define _DV_VIDEO_FILE_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _FILE_SERVER_MEDIA_SUBSESSION_HH +#include "FileServerMediaSubsession.hh" +#endif + +class DVVideoFileServerMediaSubsession: public FileServerMediaSubsession{ +public: + static DVVideoFileServerMediaSubsession* + createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource); + +private: + DVVideoFileServerMediaSubsession(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource); + // called only by createNew(); + virtual ~DVVideoFileServerMediaSubsession(); + +private: // redefined virtual functions + virtual char const* getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource); + virtual void seekStreamSource(FramedSource* inputSource, double& seekNPT, double streamDuration, u_int64_t& numBytes); + virtual void setStreamSourceDuration(FramedSource* inputSource, double streamDuration, u_int64_t& numBytes); + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate); + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource); + virtual float duration() const; + +private: + float fFileDuration; // in seconds +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/DVVideoRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/DVVideoRTPSink.hh new file mode 100644 index 0000000..f07b204 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/DVVideoRTPSink.hh @@ -0,0 +1,57 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for DV video (RFC 3189) +// (Thanks to Ben Hutchings for prototyping this.) +// C++ header + +#ifndef _DV_VIDEO_RTP_SINK_HH +#define _DV_VIDEO_RTP_SINK_HH + +#ifndef _VIDEO_RTP_SINK_HH +#include "VideoRTPSink.hh" +#endif +#ifndef _DV_VIDEO_STREAM_FRAMER_HH +#include "DVVideoStreamFramer.hh" +#endif + +class DVVideoRTPSink: public VideoRTPSink { +public: + static DVVideoRTPSink* createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat); + char const* auxSDPLineFromFramer(DVVideoStreamFramer* framerSource); + +protected: + DVVideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat); + // called only by createNew() + + virtual ~DVVideoRTPSink(); + +private: // redefined virtual functions: + virtual Boolean sourceIsCompatibleWithUs(MediaSource& source); + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + virtual unsigned computeOverflowForNewFrame(unsigned newFrameSize) const; + virtual char const* auxSDPLine(); + +private: + char* fFmtpSDPLine; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/DVVideoRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/DVVideoRTPSource.hh new file mode 100644 index 0000000..ce395d6 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/DVVideoRTPSource.hh @@ -0,0 +1,51 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// DV Video RTP Sources +// C++ header + +#ifndef _DV_VIDEO_RTP_SOURCE_HH +#define _DV_VIDEO_RTP_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +class DVVideoRTPSource: public MultiFramedRTPSource { +public: + static DVVideoRTPSource* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + +protected: + virtual ~DVVideoRTPSource(); + +private: + DVVideoRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + // called only by createNew() + +private: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual char const* MIMEtype() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/DVVideoStreamFramer.hh b/AnyCore/lib_rtsp/liveMedia/include/DVVideoStreamFramer.hh new file mode 100644 index 0000000..3a0ccdb --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/DVVideoStreamFramer.hh @@ -0,0 +1,72 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that parses a DV input stream into DV frames to deliver to the downstream object +// C++ header + +#ifndef _DV_VIDEO_STREAM_FRAMER_HH +#define _DV_VIDEO_STREAM_FRAMER_HH + +#ifndef _FRAMED_FILTER_HH +#include "FramedFilter.hh" +#endif + +#define DV_DIF_BLOCK_SIZE 80 +#define DV_NUM_BLOCKS_PER_SEQUENCE 150 +#define DV_SAVED_INITIAL_BLOCKS_SIZE ((DV_NUM_BLOCKS_PER_SEQUENCE+6-1)*DV_DIF_BLOCK_SIZE) + /* enough data to ensure that it contains an intact 6-block header (which occurs at the start of a 150-block sequence) */ + +class DVVideoStreamFramer: public FramedFilter { +public: + static DVVideoStreamFramer* + createNew(UsageEnvironment& env, FramedSource* inputSource, + Boolean sourceIsSeekable = False, Boolean leavePresentationTimesUnmodified = False); + // Set "sourceIsSeekable" to True if the input source is a seekable object (e.g. a file), and the server that uses us + // does a seek-to-zero on the source before reading from it. (Our RTSP server implementation does this.) + char const* profileName(); + Boolean getFrameParameters(unsigned& frameSize/*bytes*/, double& frameDuration/*microseconds*/); + +protected: + DVVideoStreamFramer(UsageEnvironment& env, FramedSource* inputSource, + Boolean sourceIsSeekable, Boolean leavePresentationTimesUnmodified); + // called only by createNew(), or by subclass constructors + virtual ~DVVideoStreamFramer(); + +protected: + // redefined virtual functions: + virtual Boolean isDVVideoStreamFramer() const; + virtual void doGetNextFrame(); + +protected: + void getAndDeliverData(); // used to implement "doGetNextFrame()" + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, struct timeval presentationTime); + void getProfile(); + +protected: + Boolean fLeavePresentationTimesUnmodified; + void const* fOurProfile; + struct timeval fNextFramePresentationTime; + unsigned char fSavedInitialBlocks[DV_SAVED_INITIAL_BLOCKS_SIZE]; + char fInitialBlocksPresent; + Boolean fSourceIsSeekable; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/DarwinInjector.hh b/AnyCore/lib_rtsp/liveMedia/include/DarwinInjector.hh new file mode 100644 index 0000000..56027fc --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/DarwinInjector.hh @@ -0,0 +1,106 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// An object that redirects one or more RTP/RTCP streams - forming a single +// multimedia session - into a 'Darwin Streaming Server' (for subsequent +// reflection to potentially arbitrarily many remote RTSP clients). +// C++ header + +#ifndef _DARWIN_INJECTOR_HH +#define _DARWIN_INJECTOR_HH + +#ifndef _RTSP_CLIENT_HH +#include +#endif + +#ifndef _RTCP_HH +#include +#endif + +/* +To use a "DarwinInjector": + 1/ Create RTP sinks and RTCP instances for each audio or video subsession. + Note: These can use 0.0.0.0 for the address, and 0 for the port number, + of each 'groupsock') + 2/ Call "addStream()" for each. + 3/ Call "setDestination()" to specify the remote Darwin Streaming Server. + Note: You must have 'write' permission on the Darwin Streaming Server. + This can be set up using a "qtaccess" file in the server's 'movies' + directory. For example, the following "qtaccess" file allows anyone to + play streams from the server, but allows only valid users to + inject streams *into* the server: + + require valid-user + + require any-user + Use the "remoteUserName" and "remotePassword" parameters to + "setDestination()", as appropriate. + 4/ Call "startPlaying" on each RTP sink (from the corresponding 'source'). +*/ + +class SubstreamDescriptor; // forward + +class DarwinInjector: public Medium { +public: + static DarwinInjector* createNew(UsageEnvironment& env, + char const* applicationName = "DarwinInjector", + int verbosityLevel = 0); + + static Boolean lookupByName(UsageEnvironment& env, char const* name, + DarwinInjector*& result); + + void addStream(RTPSink* rtpSink, RTCPInstance* rtcpInstance); + + Boolean setDestination(char const* remoteRTSPServerNameOrAddress, + char const* remoteFileName, + char const* sessionName = "", + char const* sessionInfo = "", + portNumBits remoteRTSPServerPortNumber = 554, + char const* remoteUserName = "", + char const* remotePassword = "", + char const* sessionAuthor = "", + char const* sessionCopyright = "", + int timeout = -1); + +private: // redefined virtual functions + virtual Boolean isDarwinInjector() const; + +private: + DarwinInjector(UsageEnvironment& env, + char const* applicationName, int verbosityLevel); + // called only by createNew() + + virtual ~DarwinInjector(); + + static void genericResponseHandler(RTSPClient* rtspClient, int responseCode, char* responseString); + void genericResponseHandler1(int responseCode, char* responseString); + +private: + char const* fApplicationName; + int fVerbosityLevel; + RTSPClient* fRTSPClient; + unsigned fSubstreamSDPSizes; + SubstreamDescriptor* fHeadSubstream; + SubstreamDescriptor* fTailSubstream; + MediaSession* fSession; + unsigned fLastTrackId; + char fWatchVariable; + int fResultCode; + char* fResultString; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/DeviceSource.hh b/AnyCore/lib_rtsp/liveMedia/include/DeviceSource.hh new file mode 100644 index 0000000..adc937f --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/DeviceSource.hh @@ -0,0 +1,66 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A template for a MediaSource encapsulating an audio/video input device +// +// NOTE: Sections of this code labeled "%%% TO BE WRITTEN %%%" are incomplete, and needto be written by the programmer +// (depending on the features of the particulardevice). +// C++ header + +#ifndef _DEVICE_SOURCE_HH +#define _DEVICE_SOURCE_HH + +#ifndef _FRAMED_SOURCE_HH +#include "FramedSource.hh" +#endif + +// The following class can be used to define specific encoder parameters +class DeviceParameters { + //%%% TO BE WRITTEN %%% +}; + +class DeviceSource: public FramedSource { +public: + static DeviceSource* createNew(UsageEnvironment& env, + DeviceParameters params); + +public: + static EventTriggerId eventTriggerId; + // Note that this is defined here to be a static class variable, because this code is intended to illustrate how to + // encapsulate a *single* device - not a set of devices. + // You can, however, redefine this to be a non-static member variable. + +protected: + DeviceSource(UsageEnvironment& env, DeviceParameters params); + // called only by createNew(), or by subclass constructors + virtual ~DeviceSource(); + +private: + // redefined virtual functions: + virtual void doGetNextFrame(); + //virtual void doStopGettingFrames(); // optional + +private: + static void deliverFrame0(void* clientData); + void deliverFrame(); + +private: + static unsigned referenceCount; // used to count how many instances of this class currently exist + DeviceParameters fParams; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/DigestAuthentication.hh b/AnyCore/lib_rtsp/liveMedia/include/DigestAuthentication.hh new file mode 100644 index 0000000..a3656c0 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/DigestAuthentication.hh @@ -0,0 +1,74 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A class used for digest authentication. +// C++ header + +#ifndef _DIGEST_AUTHENTICATION_HH +#define _DIGEST_AUTHENTICATION_HH + +#ifndef _BOOLEAN_HH +#include +#endif + +// A class used for digest authentication. +// The "realm", and "nonce" fields are supplied by the server +// (in a "401 Unauthorized" response). +// The "username" and "password" fields are supplied by the client. +class Authenticator { +public: + Authenticator(); + Authenticator(char const* username, char const* password, Boolean passwordIsMD5 = False); + // If "passwordIsMD5" is True, then "password" is actually the value computed + // by md5(::) + Authenticator(const Authenticator& orig); + Authenticator& operator=(const Authenticator& rightSide); + virtual ~Authenticator(); + + void reset(); + void setRealmAndNonce(char const* realm, char const* nonce); + void setRealmAndRandomNonce(char const* realm); + // as above, except that the nonce is created randomly. + // (This is used by servers.) + void setUsernameAndPassword(char const* username, char const* password, Boolean passwordIsMD5 = False); + // If "passwordIsMD5" is True, then "password" is actually the value computed + // by md5(::) + + char const* realm() const { return fRealm; } + char const* nonce() const { return fNonce; } + char const* username() const { return fUsername; } + char const* password() const { return fPassword; } + + char const* computeDigestResponse(char const* cmd, char const* url) const; + // The returned string from this function must later be freed by calling: + void reclaimDigestResponse(char const* responseStr) const; + +private: + void resetRealmAndNonce(); + void resetUsernameAndPassword(); + void assignRealmAndNonce(char const* realm, char const* nonce); + void assignUsernameAndPassword(char const* username, char const* password, Boolean passwordIsMD5); + void assign(char const* realm, char const* nonce, + char const* username, char const* password, Boolean passwordIsMD5); + +private: + char* fRealm; char* fNonce; + char* fUsername; char* fPassword; + Boolean fPasswordIsMD5; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/FileServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/include/FileServerMediaSubsession.hh new file mode 100644 index 0000000..42d1756 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/FileServerMediaSubsession.hh @@ -0,0 +1,40 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a file. +// C++ header + +#ifndef _FILE_SERVER_MEDIA_SUBSESSION_HH +#define _FILE_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _ON_DEMAND_SERVER_MEDIA_SUBSESSION_HH +#include "OnDemandServerMediaSubsession.hh" +#endif + +class FileServerMediaSubsession: public OnDemandServerMediaSubsession { +protected: // we're a virtual base class + FileServerMediaSubsession(UsageEnvironment& env, char const* fileName, + Boolean reuseFirstSource); + virtual ~FileServerMediaSubsession(); + +protected: + char const* fFileName; + u_int64_t fFileSize; // if known +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/FileSink.hh b/AnyCore/lib_rtsp/liveMedia/include/FileSink.hh new file mode 100644 index 0000000..ff72863 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/FileSink.hh @@ -0,0 +1,71 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// File Sinks +// C++ header + +#ifndef _FILE_SINK_HH +#define _FILE_SINK_HH + +#ifndef _MEDIA_SINK_HH +#include "MediaSink.hh" +#endif + +class FileSink: public MediaSink { +public: + static FileSink* createNew(UsageEnvironment& env, char const* fileName, + unsigned bufferSize = 20000, + Boolean oneFilePerFrame = False); + // "bufferSize" should be at least as large as the largest expected + // input frame. + // "oneFilePerFrame" - if True - specifies that each input frame will + // be written to a separate file (using the presentation time as a + // file name suffix). The default behavior ("oneFilePerFrame" == False) + // is to output all incoming data into a single file. + + virtual void addData(unsigned char const* data, unsigned dataSize, + struct timeval presentationTime); + // (Available in case a client wants to add extra data to the output file) + +protected: + FileSink(UsageEnvironment& env, FILE* fid, unsigned bufferSize, + char const* perFrameFileNamePrefix); + // called only by createNew() + virtual ~FileSink(); + +protected: // redefined virtual functions: + virtual Boolean continuePlaying(); + +protected: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + virtual void afterGettingFrame(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime); + + FILE* fOutFid; + unsigned char* fBuffer; + unsigned fBufferSize; + char* fPerFrameFileNamePrefix; // used if "oneFilePerFrame" is True + char* fPerFrameFileNameBuffer; // used if "oneFilePerFrame" is True + struct timeval fPrevPresentationTime; + unsigned fSamePresentationTimeCounter; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/FramedFileSource.hh b/AnyCore/lib_rtsp/liveMedia/include/FramedFileSource.hh new file mode 100644 index 0000000..8ad76d8 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/FramedFileSource.hh @@ -0,0 +1,37 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Framed File Sources +// C++ header + +#ifndef _FRAMED_FILE_SOURCE_HH +#define _FRAMED_FILE_SOURCE_HH + +#ifndef _FRAMED_SOURCE_HH +#include "FramedSource.hh" +#endif + +class FramedFileSource: public FramedSource { +protected: + FramedFileSource(UsageEnvironment& env, FILE* fid); // abstract base class + virtual ~FramedFileSource(); + +protected: + FILE* fFid; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/FramedFilter.hh b/AnyCore/lib_rtsp/liveMedia/include/FramedFilter.hh new file mode 100644 index 0000000..fe74fa7 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/FramedFilter.hh @@ -0,0 +1,52 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Framed Filters +// C++ header + +#ifndef _FRAMED_FILTER_HH +#define _FRAMED_FILTER_HH + +#ifndef _FRAMED_SOURCE_HH +#include "FramedSource.hh" +#endif + +class FramedFilter: public FramedSource { +public: + FramedSource* inputSource() const { return fInputSource; } + + void reassignInputSource(FramedSource* newInputSource) { fInputSource = newInputSource; } + + // Call before destruction if you want to prevent the destructor from closing the input source + void detachInputSource(); + +protected: + FramedFilter(UsageEnvironment& env, FramedSource* inputSource); + // abstract base class + virtual ~FramedFilter(); + +protected: + // Redefined virtual functions (with default 'null' implementations): + virtual char const* MIMEtype() const; + virtual void getAttributes() const; + virtual void doStopGettingFrames(); + +protected: + FramedSource* fInputSource; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/FramedSource.hh b/AnyCore/lib_rtsp/liveMedia/include/FramedSource.hh new file mode 100644 index 0000000..63865ba --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/FramedSource.hh @@ -0,0 +1,95 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Framed Sources +// C++ header + +#ifndef _FRAMED_SOURCE_HH +#define _FRAMED_SOURCE_HH + +#ifndef _NET_COMMON_H +#include "NetCommon.h" +#endif +#ifndef _MEDIA_SOURCE_HH +#include "MediaSource.hh" +#endif + +class FramedSource: public MediaSource { +public: + static Boolean lookupByName(UsageEnvironment& env, char const* sourceName, + FramedSource*& resultSource); + + typedef void (afterGettingFunc)(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + typedef void (onCloseFunc)(void* clientData); + void getNextFrame(unsigned char* to, unsigned maxSize, + afterGettingFunc* afterGettingFunc, + void* afterGettingClientData, + onCloseFunc* onCloseFunc, + void* onCloseClientData); + + static void handleClosure(void* clientData); + void handleClosure(); + // This should be called (on ourself) if the source is discovered + // to be closed (i.e., no longer readable) + + void stopGettingFrames(); + + virtual unsigned maxFrameSize() const; + // size of the largest possible frame that we may serve, or 0 + // if no such maximum is known (default) + + virtual void doGetNextFrame() = 0; + // called by getNextFrame() + + Boolean isCurrentlyAwaitingData() const {return fIsCurrentlyAwaitingData;} + + static void afterGetting(FramedSource* source); + // doGetNextFrame() should arrange for this to be called after the + // frame has been read (*iff* it is read successfully) + +protected: + FramedSource(UsageEnvironment& env); // abstract base class + virtual ~FramedSource(); + + virtual void doStopGettingFrames(); + +protected: + // The following variables are typically accessed/set by doGetNextFrame() + unsigned char* fTo; // in + unsigned fMaxSize; // in + unsigned fFrameSize; // out + unsigned fNumTruncatedBytes; // out + struct timeval fPresentationTime; // out + unsigned fDurationInMicroseconds; // out + +private: + // redefined virtual functions: + virtual Boolean isFramedSource() const; + +private: + afterGettingFunc* fAfterGettingFunc; + void* fAfterGettingClientData; + onCloseFunc* fOnCloseFunc; + void* fOnCloseClientData; + + Boolean fIsCurrentlyAwaitingData; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/GSMAudioRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/GSMAudioRTPSink.hh new file mode 100644 index 0000000..9267a8c --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/GSMAudioRTPSink.hh @@ -0,0 +1,44 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for GSM audio +// C++ header + +#ifndef _GSM_AUDIO_RTP_SINK_HH +#define _GSM_AUDIO_RTP_SINK_HH + +#ifndef _AUDIO_RTP_SINK_HH +#include "AudioRTPSink.hh" +#endif + +class GSMAudioRTPSink: public AudioRTPSink { +public: + static GSMAudioRTPSink* createNew(UsageEnvironment& env, Groupsock* RTPgs); + +protected: + GSMAudioRTPSink(UsageEnvironment& env, Groupsock* RTPgs); + // called only by createNew() + + virtual ~GSMAudioRTPSink(); + +private: // redefined virtual functions: + virtual + Boolean frameCanAppearAfterPacketStart(unsigned char const* frameStart, + unsigned numBytesInFrame) const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H261VideoRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/H261VideoRTPSource.hh new file mode 100644 index 0000000..f66151c --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H261VideoRTPSource.hh @@ -0,0 +1,56 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// H.261 Video RTP Sources +// C++ header + +#ifndef _H261_VIDEO_RTP_SOURCE_HH +#define _H261_VIDEO_RTP_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +class H261VideoRTPSource: public MultiFramedRTPSource { +public: + static H261VideoRTPSource* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat = 31, + unsigned rtpTimestampFrequency = 90000); + + u_int32_t lastSpecialHeader() const {return fLastSpecialHeader;} + +protected: + virtual ~H261VideoRTPSource(); + +private: + H261VideoRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + // called only by createNew() + +private: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual char const* MIMEtype() const; + +private: + u_int32_t fLastSpecialHeader; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H263plusVideoFileServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/include/H263plusVideoFileServerMediaSubsession.hh new file mode 100644 index 0000000..d1df495 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H263plusVideoFileServerMediaSubsession.hh @@ -0,0 +1,48 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a H.263 video file. +// C++ header + +#ifndef _H263PLUS_VIDEO_FILE_SERVER_MEDIA_SUBSESSION_HH +#define _H263PLUS_VIDEO_FILE_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _FILE_SERVER_MEDIA_SUBSESSION_HH +#include "FileServerMediaSubsession.hh" +#endif + +class H263plusVideoFileServerMediaSubsession: public FileServerMediaSubsession{ +public: + static H263plusVideoFileServerMediaSubsession* + createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource); + +private: + H263plusVideoFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, Boolean reuseFirstSource); + // called only by createNew(); + virtual ~H263plusVideoFileServerMediaSubsession(); + +private: // redefined virtual functions + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate); + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource); +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H263plusVideoRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/H263plusVideoRTPSink.hh new file mode 100644 index 0000000..3b49b65 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H263plusVideoRTPSink.hh @@ -0,0 +1,54 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for H.263+ video (RFC 4629) +// C++ header + +#ifndef _H263_PLUS_VIDEO_RTP_SINK_HH +#define _H263_PLUS_VIDEO_RTP_SINK_HH + +#ifndef _VIDEO_RTP_SINK_HH +#include "VideoRTPSink.hh" +#endif + +class H263plusVideoRTPSink: public VideoRTPSink { +public: + static H263plusVideoRTPSink* createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + u_int32_t rtpTimestampFrequency = 90000); + +protected: + H263plusVideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + u_int32_t rtpTimestampFrequency); + // called only by createNew() + + virtual ~H263plusVideoRTPSink(); + +private: // redefined virtual functions: + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + virtual + Boolean frameCanAppearAfterPacketStart(unsigned char const* frameStart, + unsigned numBytesInFrame) const; + virtual unsigned specialHeaderSize() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H263plusVideoRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/H263plusVideoRTPSource.hh new file mode 100644 index 0000000..d49ad00 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H263plusVideoRTPSource.hh @@ -0,0 +1,60 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// H.263+ Video RTP Sources +// C++ header + +#ifndef _H263_PLUS_VIDEO_RTP_SOURCE_HH +#define _H263_PLUS_VIDEO_RTP_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +#define SPECIAL_HEADER_BUFFER_SIZE 1000 + +class H263plusVideoRTPSource: public MultiFramedRTPSource { +public: + static H263plusVideoRTPSource* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency = 90000); + + // A data structure that stores copies of the special header bytes + // from the most recent frame's RTP packets: + unsigned char fNumSpecialHeaders; + unsigned fSpecialHeaderBytesLength; + unsigned char fSpecialHeaderBytes[SPECIAL_HEADER_BUFFER_SIZE]; + unsigned fPacketSizes[256]; + +protected: + virtual ~H263plusVideoRTPSource(); + +private: + H263plusVideoRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + // called only by createNew() + +private: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual char const* MIMEtype() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H263plusVideoStreamFramer.hh b/AnyCore/lib_rtsp/liveMedia/include/H263plusVideoStreamFramer.hh new file mode 100644 index 0000000..05fb3d3 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H263plusVideoStreamFramer.hh @@ -0,0 +1,64 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up an H263 video elementary stream into frames. +// Author Benhard Feiten + +#ifndef _H263PLUS_VIDEO_STREAM_FRAMER_HH +#define _H263PLUS_VIDEO_STREAM_FRAMER_HH + +#ifndef _FRAMED_FILTER_HH +#include "FramedFilter.hh" +#endif + + +class H263plusVideoStreamFramer: public FramedFilter { +public: + + static H263plusVideoStreamFramer* createNew(UsageEnvironment& env, FramedSource* inputSource); + + Boolean& pictureEndMarker() { return fPictureEndMarker; } // a hack for implementing the RTP 'M' bit + +protected: + // Constructor called only by createNew(), or by subclass constructors + H263plusVideoStreamFramer(UsageEnvironment& env, + FramedSource* inputSource, + Boolean createParser = True); + virtual ~H263plusVideoStreamFramer(); + + +public: + static void continueReadProcessing(void* clientData, + unsigned char* ptr, unsigned size, + struct timeval presentationTime); + void continueReadProcessing(); + +private: + virtual void doGetNextFrame(); + virtual Boolean isH263plusVideoStreamFramer() const; + +protected: + double fFrameRate; + unsigned fPictureCount; // hack used to implement doGetNextFrame() ?? + Boolean fPictureEndMarker; + +private: + class H263plusVideoStreamParser* fParser; + struct timeval fPresentationTimeBase; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H264VideoFileServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/include/H264VideoFileServerMediaSubsession.hh new file mode 100644 index 0000000..465ae96 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H264VideoFileServerMediaSubsession.hh @@ -0,0 +1,61 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a H264 Elementary Stream video file. +// C++ header + +#ifndef _H264_VIDEO_FILE_SERVER_MEDIA_SUBSESSION_HH +#define _H264_VIDEO_FILE_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _FILE_SERVER_MEDIA_SUBSESSION_HH +#include "FileServerMediaSubsession.hh" +#endif + +class H264VideoFileServerMediaSubsession: public FileServerMediaSubsession { +public: + static H264VideoFileServerMediaSubsession* + createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource); + + // Used to implement "getAuxSDPLine()": + void checkForAuxSDPLine1(); + void afterPlayingDummy1(); + +protected: + H264VideoFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, Boolean reuseFirstSource); + // called only by createNew(); + virtual ~H264VideoFileServerMediaSubsession(); + + void setDoneFlag() { fDoneFlag = ~0; } + +protected: // redefined virtual functions + virtual char const* getAuxSDPLine(RTPSink* rtpSink, + FramedSource* inputSource); + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate); + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource); + +private: + char* fAuxSDPLine; + char fDoneFlag; // used when setting up "fAuxSDPLine" + RTPSink* fDummyRTPSink; // ditto +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H264VideoFileSink.hh b/AnyCore/lib_rtsp/liveMedia/include/H264VideoFileSink.hh new file mode 100644 index 0000000..881fc89 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H264VideoFileSink.hh @@ -0,0 +1,47 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// H.264 Video File Sinks +// C++ header + +#ifndef _H264_VIDEO_FILE_SINK_HH +#define _H264_VIDEO_FILE_SINK_HH + +#ifndef _H264_OR_5_VIDEO_FILE_SINK_HH +#include "H264or5VideoFileSink.hh" +#endif + +class H264VideoFileSink: public H264or5VideoFileSink { +public: + static H264VideoFileSink* createNew(UsageEnvironment& env, char const* fileName, + char const* sPropParameterSetsStr = NULL, + // "sPropParameterSetsStr" is an optional 'SDP format' string + // (comma-separated Base64-encoded) representing SPS and/or PPS NAL-units + // to prepend to the output + unsigned bufferSize = 100000, + Boolean oneFilePerFrame = False); + // See "FileSink.hh" for a description of these parameters. + +protected: + H264VideoFileSink(UsageEnvironment& env, FILE* fid, + char const* sPropParameterSetsStr, + unsigned bufferSize, char const* perFrameFileNamePrefix); + // called only by createNew() + virtual ~H264VideoFileSink(); +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H264VideoRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/H264VideoRTPSink.hh new file mode 100644 index 0000000..649fa96 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H264VideoRTPSink.hh @@ -0,0 +1,59 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for H.264 video (RFC 3984) +// C++ header + +#ifndef _H264_VIDEO_RTP_SINK_HH +#define _H264_VIDEO_RTP_SINK_HH + +#ifndef _H264_OR_5_VIDEO_RTP_SINK_HH +#include "H264or5VideoRTPSink.hh" +#endif + +class H264VideoRTPSink: public H264or5VideoRTPSink { +public: + static H264VideoRTPSink* + createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat); + static H264VideoRTPSink* + createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, + u_int8_t const* sps, unsigned spsSize, u_int8_t const* pps, unsigned ppsSize); + // an optional variant of "createNew()", useful if we know, in advance, + // the stream's SPS and PPS NAL units. + // This avoids us having to 'pre-read' from the input source in order to get these values. + static H264VideoRTPSink* + createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, + char const* sPropParameterSetsStr); + // an optional variant of "createNew()", useful if we know, in advance, + // the stream's SPS and PPS NAL units. + // This avoids us having to 'pre-read' from the input source in order to get these values. + +protected: + H264VideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, + u_int8_t const* sps = NULL, unsigned spsSize = 0, + u_int8_t const* pps = NULL, unsigned ppsSize = 0); + // called only by createNew() + virtual ~H264VideoRTPSink(); + +protected: // redefined virtual functions: + virtual char const* auxSDPLine(); + +private: // redefined virtual functions: + virtual Boolean sourceIsCompatibleWithUs(MediaSource& source); +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H264VideoRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/H264VideoRTPSource.hh new file mode 100644 index 0000000..b7a6212 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H264VideoRTPSource.hh @@ -0,0 +1,70 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// H.264 Video RTP Sources +// C++ header + +#ifndef _H264_VIDEO_RTP_SOURCE_HH +#define _H264_VIDEO_RTP_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +class H264VideoRTPSource: public MultiFramedRTPSource { +public: + static H264VideoRTPSource* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency = 90000); + +protected: + H264VideoRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + // called only by createNew() + + virtual ~H264VideoRTPSource(); + +protected: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual char const* MIMEtype() const; + +private: + friend class H264BufferedPacket; + unsigned char fCurPacketNALUnitType; +}; + +class SPropRecord { +public: + ~SPropRecord() { delete[] sPropBytes; } + + unsigned sPropLength; // in bytes + unsigned char* sPropBytes; +}; + +SPropRecord* parseSPropParameterSets(char const* sPropParameterSetsStr, + // result parameter: + unsigned& numSPropRecords); + // Returns the binary value of each 'parameter set' specified in a + // "sprop-parameter-sets" string (in the SDP description for a H.264/RTP stream). + // The value is returned as an array (length "numSPropRecords") of "SPropRecord"s. + // This array is dynamically allocated by this routine, and must be delete[]d by the caller. + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H264VideoStreamDiscreteFramer.hh b/AnyCore/lib_rtsp/liveMedia/include/H264VideoStreamDiscreteFramer.hh new file mode 100644 index 0000000..ffbac92 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H264VideoStreamDiscreteFramer.hh @@ -0,0 +1,46 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A simplified version of "H264VideoStreamFramer" that takes only complete, +// discrete frames (rather than an arbitrary byte stream) as input. +// This avoids the parsing and data copying overhead of the full +// "H264VideoStreamFramer". +// C++ header + +#ifndef _H264_VIDEO_STREAM_DISCRETE_FRAMER_HH +#define _H264_VIDEO_STREAM_DISCRETE_FRAMER_HH + +#ifndef _H264_OR_5_VIDEO_STREAM_DISCRETE_FRAMER_HH +#include "H264or5VideoStreamDiscreteFramer.hh" +#endif + +class H264VideoStreamDiscreteFramer: public H264or5VideoStreamDiscreteFramer { +public: + static H264VideoStreamDiscreteFramer* + createNew(UsageEnvironment& env, FramedSource* inputSource); + +protected: + H264VideoStreamDiscreteFramer(UsageEnvironment& env, FramedSource* inputSource); + // called only by createNew() + virtual ~H264VideoStreamDiscreteFramer(); + +private: + // redefined virtual functions: + virtual Boolean isH264VideoStreamFramer() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H264VideoStreamFramer.hh b/AnyCore/lib_rtsp/liveMedia/include/H264VideoStreamFramer.hh new file mode 100644 index 0000000..deca92a --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H264VideoStreamFramer.hh @@ -0,0 +1,43 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up a H.264 Video Elementary Stream into NAL units. +// C++ header + +#ifndef _H264_VIDEO_STREAM_FRAMER_HH +#define _H264_VIDEO_STREAM_FRAMER_HH + +#ifndef _H264_OR_5_VIDEO_STREAM_FRAMER_HH +#include "H264or5VideoStreamFramer.hh" +#endif + +class H264VideoStreamFramer: public H264or5VideoStreamFramer { +public: + static H264VideoStreamFramer* createNew(UsageEnvironment& env, FramedSource* inputSource, + Boolean includeStartCodeInOutput = False); + +protected: + H264VideoStreamFramer(UsageEnvironment& env, FramedSource* inputSource, + Boolean createParser, Boolean includeStartCodeInOutput); + // called only by "createNew()" + virtual ~H264VideoStreamFramer(); + + // redefined virtual functions: + virtual Boolean isH264VideoStreamFramer() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H264or5VideoFileSink.hh b/AnyCore/lib_rtsp/liveMedia/include/H264or5VideoFileSink.hh new file mode 100644 index 0000000..b93bc4b --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H264or5VideoFileSink.hh @@ -0,0 +1,46 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// H.264 or H.265 Video File Sinks +// C++ header + +#ifndef _H264_OR_5_VIDEO_FILE_SINK_HH +#define _H264_OR_5_VIDEO_FILE_SINK_HH + +#ifndef _FILE_SINK_HH +#include "FileSink.hh" +#endif + +class H264or5VideoFileSink: public FileSink { +protected: + H264or5VideoFileSink(UsageEnvironment& env, FILE* fid, + unsigned bufferSize, char const* perFrameFileNamePrefix, + char const* sPropParameterSetsStr1, + char const* sPropParameterSetsStr2 = NULL, + char const* sPropParameterSetsStr3 = NULL); + // we're an abstract base class + virtual ~H264or5VideoFileSink(); + +protected: // redefined virtual functions: + virtual void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, struct timeval presentationTime); + +private: + char const* fSPropParameterSetsStr[3]; + Boolean fHaveWrittenFirstFrame; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H264or5VideoRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/H264or5VideoRTPSink.hh new file mode 100644 index 0000000..00ca921 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H264or5VideoRTPSink.hh @@ -0,0 +1,60 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for H.264 or H.265 video +// C++ header + +#ifndef _H264_OR_5_VIDEO_RTP_SINK_HH +#define _H264_OR_5_VIDEO_RTP_SINK_HH + +#ifndef _VIDEO_RTP_SINK_HH +#include "VideoRTPSink.hh" +#endif +#ifndef _FRAMED_FILTER_HH +#include "FramedFilter.hh" +#endif + +class H264or5VideoRTPSink: public VideoRTPSink { +protected: + H264or5VideoRTPSink(int hNumber, // 264 or 265 + UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, + u_int8_t const* vps = NULL, unsigned vpsSize = 0, + u_int8_t const* sps = NULL, unsigned spsSize = 0, + u_int8_t const* pps = NULL, unsigned ppsSize = 0); + // we're an abstrace base class + virtual ~H264or5VideoRTPSink(); + +private: // redefined virtual functions: + virtual Boolean continuePlaying(); + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + virtual Boolean frameCanAppearAfterPacketStart(unsigned char const* frameStart, + unsigned numBytesInFrame) const; + +protected: + int fHNumber; + FramedFilter* fOurFragmenter; + char* fFmtpSDPLine; + u_int8_t* fVPS; unsigned fVPSSize; + u_int8_t* fSPS; unsigned fSPSSize; + u_int8_t* fPPS; unsigned fPPSSize; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H264or5VideoStreamDiscreteFramer.hh b/AnyCore/lib_rtsp/liveMedia/include/H264or5VideoStreamDiscreteFramer.hh new file mode 100644 index 0000000..dc83567 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H264or5VideoStreamDiscreteFramer.hh @@ -0,0 +1,52 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A simplified version of "H264or5VideoStreamFramer" that takes only complete, +// discrete frames (rather than an arbitrary byte stream) as input. +// This avoids the parsing and data copying overhead of the full +// "H264or5VideoStreamFramer". +// C++ header + +#ifndef _H264_OR_5_VIDEO_STREAM_DISCRETE_FRAMER_HH +#define _H264_OR_5_VIDEO_STREAM_DISCRETE_FRAMER_HH + +#ifndef _H264_OR_5_VIDEO_STREAM_FRAMER_HH +#include "H264or5VideoStreamFramer.hh" +#endif + +class H264or5VideoStreamDiscreteFramer: public H264or5VideoStreamFramer { +protected: + H264or5VideoStreamDiscreteFramer(int hNumber, UsageEnvironment& env, FramedSource* inputSource); + // we're an abstract base class + virtual ~H264or5VideoStreamDiscreteFramer(); + +protected: + // redefined virtual functions: + virtual void doGetNextFrame(); + +protected: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H264or5VideoStreamFramer.hh b/AnyCore/lib_rtsp/liveMedia/include/H264or5VideoStreamFramer.hh new file mode 100644 index 0000000..9fb2847 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H264or5VideoStreamFramer.hh @@ -0,0 +1,87 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up a H.264 or H.265 Video Elementary Stream into NAL units. +// C++ header + +#ifndef _H264_OR_5_VIDEO_STREAM_FRAMER_HH +#define _H264_OR_5_VIDEO_STREAM_FRAMER_HH + +#ifndef _MPEG_VIDEO_STREAM_FRAMER_HH +#include "MPEGVideoStreamFramer.hh" +#endif + +class H264or5VideoStreamFramer: public MPEGVideoStreamFramer { +public: + void getVPSandSPSandPPS(u_int8_t*& vps, unsigned& vpsSize, + u_int8_t*& sps, unsigned& spsSize, + u_int8_t*& pps, unsigned& ppsSize) const { + // Returns pointers to copies of the most recently seen VPS (video parameter set) + // SPS (sequence parameter set) and PPS (picture parameter set) NAL units. + // (NULL pointers are returned if the NAL units have not yet been seen.) + vps = fLastSeenVPS; vpsSize = fLastSeenVPSSize; + sps = fLastSeenSPS; spsSize = fLastSeenSPSSize; + pps = fLastSeenPPS; ppsSize = fLastSeenPPSSize; + } + + void setVPSandSPSandPPS(u_int8_t* vps, unsigned vpsSize, + u_int8_t* sps, unsigned spsSize, + u_int8_t* pps, unsigned ppsSize) { + // Assigns copies of the VPS, SPS and PPS NAL units. If this function is not called, + // then these NAL units are assigned only if/when they appear in the input stream. + saveCopyOfVPS(vps, vpsSize); + saveCopyOfSPS(sps, spsSize); + saveCopyOfPPS(pps, ppsSize); + } + +protected: + H264or5VideoStreamFramer(int hNumber, // 264 or 265 + UsageEnvironment& env, FramedSource* inputSource, + Boolean createParser, Boolean includeStartCodeInOutput); + // We're an abstract base class. + virtual ~H264or5VideoStreamFramer(); + + void saveCopyOfVPS(u_int8_t* from, unsigned size); + void saveCopyOfSPS(u_int8_t* from, unsigned size); + void saveCopyOfPPS(u_int8_t* from, unsigned size); + + void setPresentationTime() { fPresentationTime = fNextPresentationTime; } + + Boolean isVPS(u_int8_t nal_unit_type); + Boolean isSPS(u_int8_t nal_unit_type); + Boolean isPPS(u_int8_t nal_unit_type); + Boolean isVCL(u_int8_t nal_unit_type); + +protected: + int fHNumber; + u_int8_t* fLastSeenVPS; + unsigned fLastSeenVPSSize; + u_int8_t* fLastSeenSPS; + unsigned fLastSeenSPSSize; + u_int8_t* fLastSeenPPS; + unsigned fLastSeenPPSSize; + struct timeval fNextPresentationTime; // the presentation time to be used for the next NAL unit to be parsed/delivered after this + friend class H264or5VideoStreamParser; // hack +}; + +// A general routine for making a copy of a (H.264 or H.265) NAL unit, +// removing 'emulation' bytes from the copy: +unsigned removeH264or5EmulationBytes(u_int8_t* to, unsigned toMaxSize, + u_int8_t* from, unsigned fromSize); + // returns the size of the copy; it will be <= min(toMaxSize,fromSize) + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H265VideoFileServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/include/H265VideoFileServerMediaSubsession.hh new file mode 100644 index 0000000..f34a1eb --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H265VideoFileServerMediaSubsession.hh @@ -0,0 +1,61 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a H265 Elementary Stream video file. +// C++ header + +#ifndef _H265_VIDEO_FILE_SERVER_MEDIA_SUBSESSION_HH +#define _H265_VIDEO_FILE_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _FILE_SERVER_MEDIA_SUBSESSION_HH +#include "FileServerMediaSubsession.hh" +#endif + +class H265VideoFileServerMediaSubsession: public FileServerMediaSubsession { +public: + static H265VideoFileServerMediaSubsession* + createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource); + + // Used to implement "getAuxSDPLine()": + void checkForAuxSDPLine1(); + void afterPlayingDummy1(); + +protected: + H265VideoFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, Boolean reuseFirstSource); + // called only by createNew(); + virtual ~H265VideoFileServerMediaSubsession(); + + void setDoneFlag() { fDoneFlag = ~0; } + +protected: // redefined virtual functions + virtual char const* getAuxSDPLine(RTPSink* rtpSink, + FramedSource* inputSource); + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate); + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource); + +private: + char* fAuxSDPLine; + char fDoneFlag; // used when setting up "fAuxSDPLine" + RTPSink* fDummyRTPSink; // ditto +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H265VideoFileSink.hh b/AnyCore/lib_rtsp/liveMedia/include/H265VideoFileSink.hh new file mode 100644 index 0000000..1f77bfe --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H265VideoFileSink.hh @@ -0,0 +1,51 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// H.265 Video File Sinks +// C++ header + +#ifndef _H265_VIDEO_FILE_SINK_HH +#define _H265_VIDEO_FILE_SINK_HH + +#ifndef _H264_OR_5_VIDEO_FILE_SINK_HH +#include "H264or5VideoFileSink.hh" +#endif + +class H265VideoFileSink: public H264or5VideoFileSink { +public: + static H265VideoFileSink* createNew(UsageEnvironment& env, char const* fileName, + char const* sPropVPSStr = NULL, + char const* sPropSPSStr = NULL, + char const* sPropPPSStr = NULL, + // The "sProp*Str" parameters are optional 'SDP format' strings + // (comma-separated Base64-encoded) representing VPS, SPS, and/or PPS NAL-units + // to prepend to the output + unsigned bufferSize = 100000, + Boolean oneFilePerFrame = False); + // See "FileSink.hh" for a description of these parameters. + +protected: + H265VideoFileSink(UsageEnvironment& env, FILE* fid, + char const* sPropVPSStr, + char const* sPropSPSStr, + char const* sPropPPSStr, + unsigned bufferSize, char const* perFrameFileNamePrefix); + // called only by createNew() + virtual ~H265VideoFileSink(); +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H265VideoRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/H265VideoRTPSink.hh new file mode 100644 index 0000000..3c6f433 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H265VideoRTPSink.hh @@ -0,0 +1,62 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for H.265 video +// C++ header + +#ifndef _H265_VIDEO_RTP_SINK_HH +#define _H265_VIDEO_RTP_SINK_HH + +#ifndef _H264_OR_5_VIDEO_RTP_SINK_HH +#include "H264or5VideoRTPSink.hh" +#endif + +class H265VideoRTPSink: public H264or5VideoRTPSink { +public: + static H265VideoRTPSink* + createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat); + static H265VideoRTPSink* + createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, + u_int8_t const* vps, unsigned vpsSize, + u_int8_t const* sps, unsigned spsSize, + u_int8_t const* pps, unsigned ppsSize); + // an optional variant of "createNew()", useful if we know, in advance, + // the stream's VPS, SPS and PPS NAL units. + // This avoids us having to 'pre-read' from the input source in order to get these values. + static H265VideoRTPSink* + createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, + char const* sPropVPSStr, char const* sPropSPSStr, char const* sPropPPSStr); + // an optional variant of "createNew()", useful if we know, in advance, + // the stream's VPS, SPS and PPS NAL units. + // This avoids us having to 'pre-read' from the input source in order to get these values. + +protected: + H265VideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, + u_int8_t const* vps = NULL, unsigned vpsSize = 0, + u_int8_t const* sps = NULL, unsigned spsSize = 0, + u_int8_t const* pps = NULL, unsigned ppsSize = 0); + // called only by createNew() + virtual ~H265VideoRTPSink(); + +protected: // redefined virtual functions: + virtual char const* auxSDPLine(); + +private: // redefined virtual functions: + virtual Boolean sourceIsCompatibleWithUs(MediaSource& source); +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H265VideoRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/H265VideoRTPSource.hh new file mode 100644 index 0000000..38b7162 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H265VideoRTPSource.hh @@ -0,0 +1,67 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// H.265 Video RTP Sources +// C++ header + +#ifndef _H265_VIDEO_RTP_SOURCE_HH +#define _H265_VIDEO_RTP_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +class H265VideoRTPSource: public MultiFramedRTPSource { +public: + static H265VideoRTPSource* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + Boolean expectDONFields = False, + unsigned rtpTimestampFrequency = 90000); + // "expectDONFields" is True iff we expect incoming H.265/RTP packets to contain + // DONL and DOND fields. I.e., if "tx-mode == "MST" or sprop-depack-buf-nalus > 0". + + u_int64_t currentNALUnitAbsDon() const { return fCurrentNALUnitAbsDon; } + // the 'absolute decoding order number (AbsDon)' for the most-recently delivered NAL unit + +protected: + H265VideoRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + Boolean expectDONFields, + unsigned rtpTimestampFrequency); + // called only by createNew() + + virtual ~H265VideoRTPSource(); + +protected: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual char const* MIMEtype() const; + +private: + void computeAbsDonFromDON(u_int16_t DON); + +private: + friend class H265BufferedPacket; + Boolean fExpectDONFields; + unsigned char fCurPacketNALUnitType; + u_int16_t fPreviousNALUnitDON; + u_int64_t fCurrentNALUnitAbsDon; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H265VideoStreamDiscreteFramer.hh b/AnyCore/lib_rtsp/liveMedia/include/H265VideoStreamDiscreteFramer.hh new file mode 100644 index 0000000..9234773 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H265VideoStreamDiscreteFramer.hh @@ -0,0 +1,46 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A simplified version of "H265VideoStreamFramer" that takes only complete, +// discrete frames (rather than an arbitrary byte stream) as input. +// This avoids the parsing and data copying overhead of the full +// "H265VideoStreamFramer". +// C++ header + +#ifndef _H265_VIDEO_STREAM_DISCRETE_FRAMER_HH +#define _H265_VIDEO_STREAM_DISCRETE_FRAMER_HH + +#ifndef _H264_OR_5_VIDEO_STREAM_DISCRETE_FRAMER_HH +#include "H264or5VideoStreamDiscreteFramer.hh" +#endif + +class H265VideoStreamDiscreteFramer: public H264or5VideoStreamDiscreteFramer { +public: + static H265VideoStreamDiscreteFramer* + createNew(UsageEnvironment& env, FramedSource* inputSource); + +protected: + H265VideoStreamDiscreteFramer(UsageEnvironment& env, FramedSource* inputSource); + // called only by createNew() + virtual ~H265VideoStreamDiscreteFramer(); + +private: + // redefined virtual functions: + virtual Boolean isH265VideoStreamFramer() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/H265VideoStreamFramer.hh b/AnyCore/lib_rtsp/liveMedia/include/H265VideoStreamFramer.hh new file mode 100644 index 0000000..5d6940b --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/H265VideoStreamFramer.hh @@ -0,0 +1,42 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up a H.265 Video Elementary Stream into NAL units. +// C++ header + +#ifndef _H265_VIDEO_STREAM_FRAMER_HH +#define _H265_VIDEO_STREAM_FRAMER_HH + +#ifndef _H264_OR_5_VIDEO_STREAM_FRAMER_HH +#include "H264or5VideoStreamFramer.hh" +#endif + +class H265VideoStreamFramer: public H264or5VideoStreamFramer { +public: + static H265VideoStreamFramer* createNew(UsageEnvironment& env, FramedSource* inputSource, + Boolean includeStartCodeInOutput = False); + +protected: + H265VideoStreamFramer(UsageEnvironment& env, FramedSource* inputSource, Boolean createParser, Boolean includeStartCodeInOutput); + // called only by "createNew()" + virtual ~H265VideoStreamFramer(); + + // redefined virtual functions: + virtual Boolean isH265VideoStreamFramer() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/InputFile.hh b/AnyCore/lib_rtsp/liveMedia/include/InputFile.hh new file mode 100644 index 0000000..0b5d9ee --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/InputFile.hh @@ -0,0 +1,66 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Common routines for opening/closing named input files +// C++ header + +#ifndef _INPUT_FILE_HH +#define _INPUT_FILE_HH + +#include +#include + +#if (defined(__WIN32__) || defined(_WIN32) || defined(_WIN32_WCE)) +#ifndef _WIN32_WCE +// Include header files that might be needed by Windows (in code that uses this header file): +#include +#include +#endif + +#define READ_FROM_FILES_SYNCHRONOUSLY 1 + // Because Windows is a silly toy operating system that doesn't (reliably) treat + // open files as being readable sockets (which can be handled within the default + // "BasicTaskScheduler" event loop, using "select()"), we implement file reading + // in Windows using synchronous, rather than asynchronous, I/O. This can severely + // limit the scalability of servers using this code that run on Windows. + // If this is a problem for you, then either use a better operating system, + // or else write your own Windows-specific event loop ("TaskScheduler" subclass) + // that can handle readable data in Windows open files as an event. +#endif + +#ifndef _WIN32_WCE +#include +#endif + +FILE* OpenInputFile(UsageEnvironment& env, char const* fileName); + +void CloseInputFile(FILE* fid); + +u_int64_t GetFileSize(char const* fileName, FILE* fid); + // 0 means zero-length, unbounded, or unknown + +int64_t SeekFile64(FILE *fid, int64_t offset, int whence); + // A platform-independent routine for seeking within (possibly) large files + +int64_t TellFile64(FILE *fid); + // A platform-independent routine for reporting the position within + // (possibly) large files + +Boolean FileIsSeekable(FILE *fid); + // Tests whether "fid" is seekable, by trying to seek within it. + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/JPEGVideoRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/JPEGVideoRTPSink.hh new file mode 100644 index 0000000..ae2579e --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/JPEGVideoRTPSink.hh @@ -0,0 +1,52 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for JPEG video (RFC 2435) +// C++ header + +#ifndef _JPEG_VIDEO_RTP_SINK_HH +#define _JPEG_VIDEO_RTP_SINK_HH + +#ifndef _VIDEO_RTP_SINK_HH +#include "VideoRTPSink.hh" +#endif + +class JPEGVideoRTPSink: public VideoRTPSink { +public: + static JPEGVideoRTPSink* createNew(UsageEnvironment& env, Groupsock* RTPgs); + +protected: + JPEGVideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs); + // called only by createNew() + + virtual ~JPEGVideoRTPSink(); + +private: // redefined virtual functions: + virtual Boolean sourceIsCompatibleWithUs(MediaSource& source); + + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + virtual + Boolean frameCanAppearAfterPacketStart(unsigned char const* frameStart, + unsigned numBytesInFrame) const; + virtual unsigned specialHeaderSize() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/JPEGVideoRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/JPEGVideoRTPSource.hh new file mode 100644 index 0000000..7703560 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/JPEGVideoRTPSource.hh @@ -0,0 +1,59 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// JPEG Video (RFC 2435) RTP Sources +// C++ header + +#ifndef _JPEG_VIDEO_RTP_SOURCE_HH +#define _JPEG_VIDEO_RTP_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +#define MAX_JPEG_HEADER_SIZE 1024 + +class JPEGVideoRTPSource: public MultiFramedRTPSource { +public: + static JPEGVideoRTPSource* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat = 26, + unsigned rtpPayloadFrequency = 90000, + unsigned defaultWidth = 0, unsigned defaultHeight = 0); + +protected: + virtual ~JPEGVideoRTPSource(); + +private: + JPEGVideoRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency, + unsigned defaultWidth, unsigned defaultHeight); + // called only by createNew() + + // Image dimensions from the SDP description, if any + unsigned fDefaultWidth, fDefaultHeight; + +private: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + + virtual char const* MIMEtype() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/JPEGVideoSource.hh b/AnyCore/lib_rtsp/liveMedia/include/JPEGVideoSource.hh new file mode 100644 index 0000000..46b5e10 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/JPEGVideoSource.hh @@ -0,0 +1,55 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// JPEG video sources +// C++ header + +#ifndef _JPEG_VIDEO_SOURCE_HH +#define _JPEG_VIDEO_SOURCE_HH + +#ifndef _FRAMED_SOURCE_HH +#include "FramedSource.hh" +#endif + +class JPEGVideoSource: public FramedSource { +public: + virtual u_int8_t type() = 0; + virtual u_int8_t qFactor() = 0; + virtual u_int8_t width() = 0; // # pixels/8 (or 0 for 2048 pixels) + virtual u_int8_t height() = 0; // # pixels/8 (or 0 for 2048 pixels) + + virtual u_int8_t const* quantizationTables(u_int8_t& precision, + u_int16_t& length); + // If "qFactor()" returns a value >= 128, then this function is called + // to tell us the quantization tables that are being used. + // (The default implementation of this function just returns NULL.) + // "precision" and "length" are as defined in RFC 2435, section 3.1.8. + + virtual u_int16_t restartInterval(); + // If restart intervals are being used (i.e., 64 <= type() <= 127), then this function must be + // redefined - by a subclass - to return a non-zero value. + +protected: + JPEGVideoSource(UsageEnvironment& env); // abstract base class + virtual ~JPEGVideoSource(); + +private: + // redefined virtual functions: + virtual Boolean isJPEGVideoSource() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/Locale.hh b/AnyCore/lib_rtsp/liveMedia/include/Locale.hh new file mode 100644 index 0000000..392d696 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/Locale.hh @@ -0,0 +1,70 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Support for temporarily setting the locale (e.g., to "C" or "POSIX") for (e.g.) parsing or printing +// floating-point numbers in protocol headers, or calling toupper()/tolower() on human-input strings. +// C++ header + +#ifndef _LOCALE_HH +#define _LOCALE_HH + +// If you're on a system that (for whatever reason) doesn't have either the "setlocale()" or the "newlocale()" function, then +// add "-DLOCALE_NOT_USED" to your "config.*" file. + +// If you're on a system that (for whatever reason) has "setlocale()" but not "newlocale()", then +// add "-DXLOCALE_NOT_USED" to your "config.*" file. +// (Note that -DLOCALE_NOT_USED implies -DXLOCALE_NOT_USED; you do not need both.) +// Also, for Windows systems, we define "XLOCALE_NOT_USED" by default, because at least some Windows systems +// (or their development environments) don't have "newlocale()". If, however, your Windows system *does* have "newlocale()", +// then you can override this by defining "XLOCALE_USED" before #including this file. + +#ifdef XLOCALE_USED +#undef LOCALE_NOT_USED +#undef XLOCALE_NOT_USED +#else +#if defined(__WIN32__) || defined(_WIN32) +#define XLOCALE_NOT_USED 1 +#endif +#endif + +#ifndef LOCALE_NOT_USED +#include +#ifndef XLOCALE_NOT_USED +#include // because, on some systems, doesn't include ; this makes sure that we get both +#endif +#endif + + +enum LocaleCategory { All, Numeric }; // define and implement more categories later, as needed + +class Locale { +public: + Locale(char const* newLocale, LocaleCategory category = All); + virtual ~Locale(); + +private: +#ifndef LOCALE_NOT_USED +#ifndef XLOCALE_NOT_USED + locale_t fLocale, fPrevLocale; +#else + int fCategoryNum; + char* fPrevLocale; +#endif +#endif +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MP3ADU.hh b/AnyCore/lib_rtsp/liveMedia/include/MP3ADU.hh new file mode 100644 index 0000000..3223481 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MP3ADU.hh @@ -0,0 +1,97 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// 'ADU' MP3 streams (for improved loss-tolerance) +// C++ header + +#ifndef _MP3_ADU_HH +#define _MP3_ADU_HH + +#ifndef _FRAMED_FILTER_HH +#include "FramedFilter.hh" +#endif + +class ADUFromMP3Source: public FramedFilter { +public: + static ADUFromMP3Source* createNew(UsageEnvironment& env, + FramedSource* inputSource, + Boolean includeADUdescriptors = True); + + void resetInput(); + // This is called whenever there's a discontinuity in the input MP3 source + // (e.g., due to seeking within the source). It causes any still-unprocessed + // MP3 frame data within our queue to be discarded, so that it does not + // erroneously get used by backpointers from the new MP3 frames. + + Boolean setScaleFactor(int scale); + +protected: + ADUFromMP3Source(UsageEnvironment& env, + FramedSource* inputSource, + Boolean includeADUdescriptors); + // called only by createNew() + virtual ~ADUFromMP3Source(); + +private: + // Redefined virtual functions: + virtual void doGetNextFrame(); + virtual char const* MIMEtype() const; + +private: + Boolean doGetNextFrame1(); + +private: + Boolean fAreEnqueueingMP3Frame; + class SegmentQueue* fSegments; + Boolean fIncludeADUdescriptors; + unsigned fTotalDataSizeBeforePreviousRead; + int fScale; + unsigned fFrameCounter; +}; + +class MP3FromADUSource: public FramedFilter { +public: + static MP3FromADUSource* createNew(UsageEnvironment& env, + FramedSource* inputSource, + Boolean includeADUdescriptors = True); + +protected: + MP3FromADUSource(UsageEnvironment& env, + FramedSource* inputSource, + Boolean includeADUdescriptors); + // called only by createNew() + virtual ~MP3FromADUSource(); + +private: + // Redefined virtual functions: + virtual void doGetNextFrame(); + virtual char const* MIMEtype() const; + +private: + Boolean needToGetAnADU(); + void insertDummyADUsIfNecessary(); + Boolean generateFrameFromHeadADU(); + +private: + Boolean fAreEnqueueingADU; + class SegmentQueue* fSegments; +}; + +// Definitions of external C functions that implement various MP3 operations: +extern "C" int mp3ZeroOutSideInfo(unsigned char*, unsigned, unsigned); + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MP3ADURTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/MP3ADURTPSink.hh new file mode 100644 index 0000000..aa04f11 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MP3ADURTPSink.hh @@ -0,0 +1,55 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for 'ADUized' MP3 frames ("mpa-robust") +// C++ header + +#ifndef _MP3_ADU_RTP_SINK_HH +#define _MP3_ADU_RTP_SINK_HH + +#ifndef _AUDIO_RTP_SINK_HH +#include "AudioRTPSink.hh" +#endif + +class MP3ADURTPSink: public AudioRTPSink { +public: + static MP3ADURTPSink* createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char RTPPayloadType); + +protected: + virtual ~MP3ADURTPSink(); + +private: + MP3ADURTPSink(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char RTPPayloadType); + // called only by createNew() + + +private: + // Redefined virtual functions: + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + virtual unsigned specialHeaderSize() const; + +private: + unsigned fCurADUSize; // used when fragmenting over multiple RTP packets +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MP3ADURTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/MP3ADURTPSource.hh new file mode 100644 index 0000000..348b23e --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MP3ADURTPSource.hh @@ -0,0 +1,49 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP source for 'ADUized' MP3 frames ("mpa-robust") +// C++ header + +#ifndef _MP3_ADU_SOURCE_HH +#define _MP3_ADU_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +class MP3ADURTPSource: public MultiFramedRTPSource { +public: + static MP3ADURTPSource* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency = 90000); + +protected: + virtual ~MP3ADURTPSource(); + +private: + MP3ADURTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + // called only by createNew() + +private: + // redefined virtual functions: + virtual char const* MIMEtype() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MP3ADUTranscoder.hh b/AnyCore/lib_rtsp/liveMedia/include/MP3ADUTranscoder.hh new file mode 100644 index 0000000..74784ed --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MP3ADUTranscoder.hh @@ -0,0 +1,64 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Transcoder for ADUized MP3 frames +// C++ header + +#ifndef _MP3_ADU_TRANSCODER_HH +#define _MP3_ADU_TRANSCODER_HH + +#ifndef _FRAMED_FILTER_HH +#include "FramedFilter.hh" +#endif + +class MP3ADUTranscoder: public FramedFilter { +public: + static MP3ADUTranscoder* createNew(UsageEnvironment& env, + unsigned outBitrate /* in kbps */, + FramedSource* inputSource); + + unsigned outBitrate() const { return fOutBitrate; } +protected: + MP3ADUTranscoder(UsageEnvironment& env, + unsigned outBitrate /* in kbps */, + FramedSource* inputSource); + // called only by createNew() + virtual ~MP3ADUTranscoder(); + +private: + // redefined virtual functions: + virtual void doGetNextFrame(); + virtual void getAttributes() const; + +private: + static void afterGettingFrame(void* clientData, + unsigned numBytesRead, unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned numBytesRead, unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + +private: + unsigned fOutBitrate; // in kbps + unsigned fAvailableBytesForBackpointer; + + unsigned char* fOrigADU; + // used to store incoming ADU prior to transcoding +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MP3ADUinterleaving.hh b/AnyCore/lib_rtsp/liveMedia/include/MP3ADUinterleaving.hh new file mode 100644 index 0000000..b1ed288 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MP3ADUinterleaving.hh @@ -0,0 +1,129 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Interleaving of MP3 ADUs +// C++ header + +#ifndef _MP3_ADU_INTERLEAVING_HH +#define _MP3_ADU_INTERLEAVING_HH + +#ifndef _FRAMED_FILTER_HH +#include "FramedFilter.hh" +#endif + +// A data structure used to represent an interleaving +#define MAX_CYCLE_SIZE 256 +class Interleaving { +public: + Interleaving(unsigned cycleSize, unsigned char const* cycleArray); + virtual ~Interleaving(); + + unsigned cycleSize() const {return fCycleSize;} + unsigned char lookupInverseCycle(unsigned char index) const { + return fInverseCycle[index]; + } + +private: + unsigned fCycleSize; + unsigned char fInverseCycle[MAX_CYCLE_SIZE]; +}; + +// This class is used only as a base for the following two: + +class MP3ADUinterleaverBase: public FramedFilter { +protected: + MP3ADUinterleaverBase(UsageEnvironment& env, + FramedSource* inputSource); + // abstract base class + virtual ~MP3ADUinterleaverBase(); + + static FramedSource* getInputSource(UsageEnvironment& env, + char const* inputSourceName); + static void afterGettingFrame(void* clientData, + unsigned numBytesRead, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + virtual void afterGettingFrame(unsigned numBytesRead, + struct timeval presentationTime, + unsigned durationInMicroseconds) = 0; +}; + +// This class is used to convert an ADU sequence from non-interleaved +// to interleaved form: + +class MP3ADUinterleaver: public MP3ADUinterleaverBase { +public: + static MP3ADUinterleaver* createNew(UsageEnvironment& env, + Interleaving const& interleaving, + FramedSource* inputSource); + +protected: + MP3ADUinterleaver(UsageEnvironment& env, + Interleaving const& interleaving, + FramedSource* inputSource); + // called only by createNew() + virtual ~MP3ADUinterleaver(); + +private: + // redefined virtual functions: + virtual void doGetNextFrame(); + virtual void afterGettingFrame(unsigned numBytesRead, + struct timeval presentationTime, + unsigned durationInMicroseconds); + +private: + void releaseOutgoingFrame(); + +private: + Interleaving const fInterleaving; + class InterleavingFrames* fFrames; + unsigned char fPositionOfNextIncomingFrame; + unsigned fII, fICC; +}; + +// This class is used to convert an ADU sequence from interleaved +// to non-interleaved form: + +class MP3ADUdeinterleaver: public MP3ADUinterleaverBase { +public: + static MP3ADUdeinterleaver* createNew(UsageEnvironment& env, + FramedSource* inputSource); + +protected: + MP3ADUdeinterleaver(UsageEnvironment& env, + FramedSource* inputSource); + // called only by createNew() + virtual ~MP3ADUdeinterleaver(); + +private: + // redefined virtual functions: + virtual void doGetNextFrame(); + virtual void afterGettingFrame(unsigned numBytesRead, + struct timeval presentationTime, + unsigned durationInMicroseconds); + +private: + void releaseOutgoingFrame(); + +private: + class DeinterleavingFrames* fFrames; + unsigned fIIlastSeen, fICClastSeen; +}; + +#endif + diff --git a/AnyCore/lib_rtsp/liveMedia/include/MP3AudioFileServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/include/MP3AudioFileServerMediaSubsession.hh new file mode 100644 index 0000000..4cf914d --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MP3AudioFileServerMediaSubsession.hh @@ -0,0 +1,73 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from an MP3 audio file. +// (Actually, any MPEG-1 or MPEG-2 audio file should work.) +// C++ header + +#ifndef _MP3_AUDIO_FILE_SERVER_MEDIA_SUBSESSION_HH +#define _MP3_AUDIO_FILE_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _FILE_SERVER_MEDIA_SUBSESSION_HH +#include "FileServerMediaSubsession.hh" +#endif +#ifndef _MP3_ADU_INTERLEAVING_HH +#include "MP3ADUinterleaving.hh" +#endif +#ifndef _MP3_ADU_HH +#include "MP3ADU.hh" +#endif + +class MP3AudioFileServerMediaSubsession: public FileServerMediaSubsession{ +public: + static MP3AudioFileServerMediaSubsession* + createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource, + Boolean generateADUs, Interleaving* interleaving); + // Note: "interleaving" is used only if "generateADUs" is True, + // (and a value of NULL means 'no interleaving') + +protected: + MP3AudioFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, Boolean reuseFirstSource, + Boolean generateADUs, + Interleaving* interleaving); + // called only by createNew(); + virtual ~MP3AudioFileServerMediaSubsession(); + + FramedSource* createNewStreamSourceCommon(FramedSource* baseMP3Source, unsigned mp3NumBytes, unsigned& estBitrate); + void getBaseStreams(FramedSource* frontStream, + FramedSource*& sourceMP3Stream, ADUFromMP3Source*& aduStream/*if any*/); + +protected: // redefined virtual functions + virtual void seekStreamSource(FramedSource* inputSource, double& seekNPT, double streamDuration, u_int64_t& numBytes); + virtual void setStreamSourceScale(FramedSource* inputSource, float scale); + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate); + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource); + virtual void testScaleFactor(float& scale); + virtual float duration() const; + +protected: + Boolean fGenerateADUs; + Interleaving* fInterleaving; + float fFileDuration; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MP3FileSource.hh b/AnyCore/lib_rtsp/liveMedia/include/MP3FileSource.hh new file mode 100644 index 0000000..5148842 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MP3FileSource.hh @@ -0,0 +1,69 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MP3 File Sources +// C++ header + +#ifndef _MP3_FILE_SOURCE_HH +#define _MP3_FILE_SOURCE_HH + +#ifndef _FRAMED_FILE_SOURCE_HH +#include "FramedFileSource.hh" +#endif + +class MP3StreamState; // forward + +class MP3FileSource: public FramedFileSource { +public: + static MP3FileSource* createNew(UsageEnvironment& env, char const* fileName); + + float filePlayTime() const; + unsigned fileSize() const; + void setPresentationTimeScale(unsigned scale); + void seekWithinFile(double seekNPT, double streamDuration); + // if "streamDuration" is >0.0, then we limit the stream to that duration, before treating it as EOF + +protected: + MP3FileSource(UsageEnvironment& env, FILE* fid); + // called only by createNew() + + virtual ~MP3FileSource(); + +protected: + void assignStream(FILE* fid, unsigned filesize); + Boolean initializeStream(); + + MP3StreamState* streamState() {return fStreamState;} + +private: + // redefined virtual functions: + virtual void doGetNextFrame(); + virtual char const* MIMEtype() const; + virtual void getAttributes() const; + +private: + virtual Boolean doGetNextFrame1(); + +private: + MP3StreamState* fStreamState; + Boolean fHaveJustInitialized; + struct timeval fFirstFramePresentationTime; // set on stream init + Boolean fLimitNumBytesToStream; + unsigned fNumBytesToStream; // used iff "fLimitNumBytesToStream" is True +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MP3Transcoder.hh b/AnyCore/lib_rtsp/liveMedia/include/MP3Transcoder.hh new file mode 100644 index 0000000..4c04596 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MP3Transcoder.hh @@ -0,0 +1,44 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MP3 Transcoder +// C++ header + +#ifndef _MP3_TRANSCODER_HH +#define _MP3_TRANSCODER_HH + +#ifndef _MP3_ADU_HH +#include "MP3ADU.hh" +#endif +#ifndef _MP3_ADU_TRANSCODER_HH +#include "MP3ADUTranscoder.hh" +#endif + +class MP3Transcoder: public MP3FromADUSource { +public: + static MP3Transcoder* createNew(UsageEnvironment& env, + unsigned outBitrate /* in kbps */, + FramedSource* inputSource); + +protected: + MP3Transcoder(UsageEnvironment& env, + MP3ADUTranscoder* aduTranscoder); + // called only by createNew() + virtual ~MP3Transcoder(); +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2AudioRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2AudioRTPSink.hh new file mode 100644 index 0000000..16d0675 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2AudioRTPSink.hh @@ -0,0 +1,48 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for MPEG audio (RFC 2250) +// C++ header + +#ifndef _MPEG_1OR2_AUDIO_RTP_SINK_HH +#define _MPEG_1OR2_AUDIO_RTP_SINK_HH + +#ifndef _AUDIO_RTP_SINK_HH +#include "AudioRTPSink.hh" +#endif + +class MPEG1or2AudioRTPSink: public AudioRTPSink { +public: + static MPEG1or2AudioRTPSink* createNew(UsageEnvironment& env, + Groupsock* RTPgs); + +protected: + MPEG1or2AudioRTPSink(UsageEnvironment& env, Groupsock* RTPgs); + // called only by createNew() + + virtual ~MPEG1or2AudioRTPSink(); + +private: // redefined virtual functions: + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + virtual unsigned specialHeaderSize() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2AudioRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2AudioRTPSource.hh new file mode 100644 index 0000000..e0c8d3b --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2AudioRTPSource.hh @@ -0,0 +1,51 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MPEG-1 or MPEG-2 Audio RTP Sources +// C++ header + +#ifndef _MPEG_1OR2_AUDIO_RTP_SOURCE_HH +#define _MPEG_1OR2_AUDIO_RTP_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +class MPEG1or2AudioRTPSource: public MultiFramedRTPSource { +public: + static MPEG1or2AudioRTPSource* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat = 14, + unsigned rtpTimestampFrequency = 90000); + +protected: + virtual ~MPEG1or2AudioRTPSource(); + +private: + MPEG1or2AudioRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + // called only by createNew() + +private: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual char const* MIMEtype() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2AudioStreamFramer.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2AudioStreamFramer.hh new file mode 100644 index 0000000..85b4923 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2AudioStreamFramer.hh @@ -0,0 +1,70 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up an MPEG (1,2) audio elementary stream into frames +// C++ header + +#ifndef _MPEG_1OR2_AUDIO_STREAM_FRAMER_HH +#define _MPEG_1OR2_AUDIO_STREAM_FRAMER_HH + +#ifndef _FRAMED_FILTER_HH +#include "FramedFilter.hh" +#endif + +class MPEG1or2AudioStreamFramer: public FramedFilter { +public: + static MPEG1or2AudioStreamFramer* + createNew(UsageEnvironment& env, FramedSource* inputSource, + Boolean syncWithInputSource = False); + // If "syncWithInputSource" is True, the stream's presentation time + // will be reset to that of the input source, whenever new data + // is read from it. + + void flushInput(); // called if there is a discontinuity (seeking) in the input + +private: + MPEG1or2AudioStreamFramer(UsageEnvironment& env, FramedSource* inputSource, + Boolean syncWithInputSource); + // called only by createNew() + virtual ~MPEG1or2AudioStreamFramer(); + + static void continueReadProcessing(void* clientData, + unsigned char* ptr, unsigned size, + struct timeval presentationTime); + void continueReadProcessing(); + + void resetPresentationTime(struct timeval newPresentationTime); + // useful if we're being synced with a separate (e.g., video) stream + +private: + // redefined virtual functions: + virtual void doGetNextFrame(); + +private: + void reset(); + struct timeval currentFramePlayTime() const; + +private: + Boolean fSyncWithInputSource; + struct timeval fNextFramePresentationTime; + +private: // parsing state + class MPEG1or2AudioStreamParser* fParser; + friend class MPEG1or2AudioStreamParser; // hack +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2Demux.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2Demux.hh new file mode 100644 index 0000000..53a76c5 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2Demux.hh @@ -0,0 +1,150 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Demultiplexer for a MPEG 1 or 2 Program Stream +// C++ header + +#ifndef _MPEG_1OR2_DEMUX_HH +#define _MPEG_1OR2_DEMUX_HH + +#ifndef _FRAMED_SOURCE_HH +#include "FramedSource.hh" +#endif + +class MPEG1or2DemuxedElementaryStream; // forward + +class MPEG1or2Demux: public Medium { +public: + static MPEG1or2Demux* createNew(UsageEnvironment& env, + FramedSource* inputSource, + Boolean reclaimWhenLastESDies = False); + // If "reclaimWhenLastESDies" is True, the the demux is deleted when + // all "MPEG1or2DemuxedElementaryStream"s that we created get deleted. + + MPEG1or2DemuxedElementaryStream* newElementaryStream(u_int8_t streamIdTag); + + // Specialized versions of the above for audio and video: + MPEG1or2DemuxedElementaryStream* newAudioStream(); + MPEG1or2DemuxedElementaryStream* newVideoStream(); + + // A hack for getting raw, undemuxed PES packets from the Program Stream: + MPEG1or2DemuxedElementaryStream* newRawPESStream(); + + void getNextFrame(u_int8_t streamIdTag, + unsigned char* to, unsigned maxSize, + FramedSource::afterGettingFunc* afterGettingFunc, + void* afterGettingClientData, + FramedSource::onCloseFunc* onCloseFunc, + void* onCloseClientData); + // similar to FramedSource::getNextFrame(), except that it also + // takes a stream id tag as parameter. + + void stopGettingFrames(u_int8_t streamIdTag); + // similar to FramedSource::stopGettingFrames(), except that it also + // takes a stream id tag as parameter. + + static void handleClosure(void* clientData); + // This should be called (on ourself) if the source is discovered + // to be closed (i.e., no longer readable) + + FramedSource* inputSource() const { return fInputSource; } + + class SCR { + public: + SCR(); + + u_int8_t highBit; + u_int32_t remainingBits; + u_int16_t extension; + + Boolean isValid; + }; + SCR& lastSeenSCR() { return fLastSeenSCR; } + + unsigned char mpegVersion() const { return fMPEGversion; } + + void flushInput(); // should be called before any 'seek' on the underlying source + +private: + MPEG1or2Demux(UsageEnvironment& env, + FramedSource* inputSource, Boolean reclaimWhenLastESDies); + // called only by createNew() + virtual ~MPEG1or2Demux(); + + void registerReadInterest(u_int8_t streamIdTag, + unsigned char* to, unsigned maxSize, + FramedSource::afterGettingFunc* afterGettingFunc, + void* afterGettingClientData, + FramedSource::onCloseFunc* onCloseFunc, + void* onCloseClientData); + + Boolean useSavedData(u_int8_t streamIdTag, + unsigned char* to, unsigned maxSize, + FramedSource::afterGettingFunc* afterGettingFunc, + void* afterGettingClientData); + + static void continueReadProcessing(void* clientData, + unsigned char* ptr, unsigned size, + struct timeval presentationTime); + void continueReadProcessing(); + +private: + friend class MPEG1or2DemuxedElementaryStream; + void noteElementaryStreamDeletion(MPEG1or2DemuxedElementaryStream* es); + +private: + FramedSource* fInputSource; + SCR fLastSeenSCR; + unsigned char fMPEGversion; + + unsigned char fNextAudioStreamNumber; + unsigned char fNextVideoStreamNumber; + Boolean fReclaimWhenLastESDies; + unsigned fNumOutstandingESs; + + // A descriptor for each possible stream id tag: + typedef struct OutputDescriptor { + // input parameters + unsigned char* to; unsigned maxSize; + FramedSource::afterGettingFunc* fAfterGettingFunc; + void* afterGettingClientData; + FramedSource::onCloseFunc* fOnCloseFunc; + void* onCloseClientData; + + // output parameters + unsigned frameSize; struct timeval presentationTime; + class SavedData; // forward + SavedData* savedDataHead; + SavedData* savedDataTail; + unsigned savedDataTotalSize; + + // status parameters + Boolean isPotentiallyReadable; + Boolean isCurrentlyActive; + Boolean isCurrentlyAwaitingData; + } OutputDescriptor_t; + OutputDescriptor_t fOutput[256]; + + unsigned fNumPendingReads; + Boolean fHaveUndeliveredData; + +private: // parsing state + class MPEGProgramStreamParser* fParser; + friend class MPEGProgramStreamParser; // hack +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2DemuxedElementaryStream.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2DemuxedElementaryStream.hh new file mode 100644 index 0000000..523b8b2 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2DemuxedElementaryStream.hh @@ -0,0 +1,69 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A MPEG 1 or 2 Elementary Stream, demultiplexed from a Program Stream +// C++ header + +#ifndef _MPEG_1OR2_DEMUXED_ELEMENTARY_STREAM_HH +#define _MPEG_1OR2_DEMUXED_ELEMENTARY_STREAM_HH + +#ifndef _MPEG_1OR2_DEMUX_HH +#include "MPEG1or2Demux.hh" +#endif + +class MPEG1or2DemuxedElementaryStream: public FramedSource { +public: + MPEG1or2Demux::SCR lastSeenSCR() const { return fLastSeenSCR; } + + unsigned char mpegVersion() const { return fMPEGversion; } + + MPEG1or2Demux& sourceDemux() const { return fOurSourceDemux; } + +private: // We are created only by a MPEG1or2Demux (a friend) + MPEG1or2DemuxedElementaryStream(UsageEnvironment& env, + u_int8_t streamIdTag, + MPEG1or2Demux& sourceDemux); + virtual ~MPEG1or2DemuxedElementaryStream(); + +private: + // redefined virtual functions: + virtual void doGetNextFrame(); + virtual void doStopGettingFrames(); + virtual char const* MIMEtype() const; + virtual unsigned maxFrameSize() const; + +private: + static void afterGettingFrame(void* clientData, + unsigned frameSize, unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + + void afterGettingFrame1(unsigned frameSize, unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + +private: + u_int8_t fOurStreamIdTag; + MPEG1or2Demux& fOurSourceDemux; + char const* fMIMEtype; + MPEG1or2Demux::SCR fLastSeenSCR; + unsigned char fMPEGversion; + + friend class MPEG1or2Demux; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2DemuxedServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2DemuxedServerMediaSubsession.hh new file mode 100644 index 0000000..7117ff6 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2DemuxedServerMediaSubsession.hh @@ -0,0 +1,63 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a MPEG-1 or 2 demuxer. +// C++ header + +#ifndef _MPEG_1OR2_DEMUXED_SERVER_MEDIA_SUBSESSION_HH +#define _MPEG_1OR2_DEMUXED_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _ON_DEMAND_SERVER_MEDIA_SUBSESSION_HH +#include "OnDemandServerMediaSubsession.hh" +#endif +#ifndef _MPEG_1OR2_FILE_SERVER_DEMUX_HH +#include "MPEG1or2FileServerDemux.hh" +#endif + +class MPEG1or2DemuxedServerMediaSubsession: public OnDemandServerMediaSubsession{ +public: + static MPEG1or2DemuxedServerMediaSubsession* + createNew(MPEG1or2FileServerDemux& demux, u_int8_t streamIdTag, + Boolean reuseFirstSource, + Boolean iFramesOnly = False, double vshPeriod = 5.0); + // The last two parameters are relevant for video streams only + +private: + MPEG1or2DemuxedServerMediaSubsession(MPEG1or2FileServerDemux& demux, + u_int8_t streamIdTag, Boolean reuseFirstSource, + Boolean iFramesOnly, double vshPeriod); + // called only by createNew(); + virtual ~MPEG1or2DemuxedServerMediaSubsession(); + +private: // redefined virtual functions + virtual void seekStreamSource(FramedSource* inputSource, double& seekNPT, double streamDuration, u_int64_t& numBytes); + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate); + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource); + virtual float duration() const; + +private: + MPEG1or2FileServerDemux& fOurDemux; + u_int8_t fStreamIdTag; + Boolean fIFramesOnly; // for video streams + double fVSHPeriod; // for video streams +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2FileServerDemux.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2FileServerDemux.hh new file mode 100644 index 0000000..359ba06 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2FileServerDemux.hh @@ -0,0 +1,67 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A server demultiplexer for a MPEG 1 or 2 Program Stream +// C++ header + +#ifndef _MPEG_1OR2_FILE_SERVER_DEMUX_HH +#define _MPEG_1OR2_FILE_SERVER_DEMUX_HH + +#ifndef _SERVER_MEDIA_SESSION_HH +#include "ServerMediaSession.hh" +#endif +#ifndef _MPEG_1OR2_DEMUXED_ELEMENTARY_STREAM_HH +#include "MPEG1or2DemuxedElementaryStream.hh" +#endif + +class MPEG1or2FileServerDemux: public Medium { +public: + static MPEG1or2FileServerDemux* + createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource); + + ServerMediaSubsession* newAudioServerMediaSubsession(); // MPEG-1 or 2 audio + ServerMediaSubsession* newVideoServerMediaSubsession(Boolean iFramesOnly = False, + double vshPeriod = 5.0 + /* how often (in seconds) to inject a Video_Sequence_Header, + if one doesn't already appear in the stream */); + ServerMediaSubsession* newAC3AudioServerMediaSubsession(); // AC-3 audio (from VOB) + + unsigned fileSize() const { return fFileSize; } + float fileDuration() const { return fFileDuration; } + +private: + MPEG1or2FileServerDemux(UsageEnvironment& env, char const* fileName, + Boolean reuseFirstSource); + // called only by createNew(); + virtual ~MPEG1or2FileServerDemux(); + +private: + friend class MPEG1or2DemuxedServerMediaSubsession; + MPEG1or2DemuxedElementaryStream* newElementaryStream(unsigned clientSessionId, + u_int8_t streamIdTag); + +private: + char const* fFileName; + unsigned fFileSize; + float fFileDuration; + Boolean fReuseFirstSource; + MPEG1or2Demux* fSession0Demux; + MPEG1or2Demux* fLastCreatedDemux; + unsigned fLastClientSessionId; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoFileServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoFileServerMediaSubsession.hh new file mode 100644 index 0000000..93a46a9 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoFileServerMediaSubsession.hh @@ -0,0 +1,59 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a MPEG-1 or 2 Elementary Stream video file. +// C++ header + +#ifndef _MPEG_1OR2_VIDEO_FILE_SERVER_MEDIA_SUBSESSION_HH +#define _MPEG_1OR2_VIDEO_FILE_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _FILE_SERVER_MEDIA_SUBSESSION_HH +#include "FileServerMediaSubsession.hh" +#endif + +class MPEG1or2VideoFileServerMediaSubsession: public FileServerMediaSubsession{ +public: + static MPEG1or2VideoFileServerMediaSubsession* + createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource, + Boolean iFramesOnly = False, + double vshPeriod = 5.0 + /* how often (in seconds) to inject a Video_Sequence_Header, + if one doesn't already appear in the stream */); + +private: + MPEG1or2VideoFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, + Boolean reuseFirstSource, + Boolean iFramesOnly, + double vshPeriod); + // called only by createNew(); + virtual ~MPEG1or2VideoFileServerMediaSubsession(); + +private: // redefined virtual functions + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate); + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource); + +private: + Boolean fIFramesOnly; + double fVSHPeriod; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoRTPSink.hh new file mode 100644 index 0000000..2e91e90 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoRTPSink.hh @@ -0,0 +1,69 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for MPEG video (RFC 2250) +// C++ header + +#ifndef _MPEG_1OR2_VIDEO_RTP_SINK_HH +#define _MPEG_1OR2_VIDEO_RTP_SINK_HH + +#ifndef _VIDEO_RTP_SINK_HH +#include "VideoRTPSink.hh" +#endif + +class MPEG1or2VideoRTPSink: public VideoRTPSink { +public: + static MPEG1or2VideoRTPSink* createNew(UsageEnvironment& env, Groupsock* RTPgs); + +protected: + MPEG1or2VideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs); + // called only by createNew() + + virtual ~MPEG1or2VideoRTPSink(); + +private: // redefined virtual functions: + virtual Boolean sourceIsCompatibleWithUs(MediaSource& source); + + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + virtual Boolean allowFragmentationAfterStart() const; + virtual + Boolean frameCanAppearAfterPacketStart(unsigned char const* frameStart, + unsigned numBytesInFrame) const; + virtual unsigned specialHeaderSize() const; + +private: + // MPEG video-specific state, used to decide how to fill out the + // video-specific header, and when to include multiple 'frames' in a + // single outgoing RTP packet. Eventually we should somehow get this + // state from the source (MPEG1or2VideoStreamFramer) instead, as the source + // already has this info itself. + struct { + unsigned temporal_reference; + unsigned char picture_coding_type; + unsigned char vector_code_bits; // FBV,BFC,FFV,FFC from RFC 2250, sec. 3.4 + } fPictureState; + Boolean fPreviousFrameWasSlice; + // used to implement frameCanAppearAfterPacketStart() + Boolean fSequenceHeaderPresent; + Boolean fPacketBeginsSlice, fPacketEndsSlice; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoRTPSource.hh new file mode 100644 index 0000000..20d58c9 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoRTPSource.hh @@ -0,0 +1,53 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MPEG-1 or MPEG-2 Video RTP Sources +// C++ header + +#ifndef _MPEG_1OR2_VIDEO_RTP_SOURCE_HH +#define _MPEG_1OR2_VIDEO_RTP_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +class MPEG1or2VideoRTPSource: public MultiFramedRTPSource { +public: + static MPEG1or2VideoRTPSource* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat = 32, + unsigned rtpPayloadFrequency = 90000); + +protected: + virtual ~MPEG1or2VideoRTPSource(); + +private: + MPEG1or2VideoRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + // called only by createNew() + +private: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual Boolean packetIsUsableInJitterCalculation(unsigned char* packet, + unsigned packetSize); + virtual char const* MIMEtype() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoStreamDiscreteFramer.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoStreamDiscreteFramer.hh new file mode 100644 index 0000000..ac77367 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoStreamDiscreteFramer.hh @@ -0,0 +1,76 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A simplified version of "MPEG1or2VideoStreamFramer" that takes only +// complete, discrete frames (rather than an arbitrary byte stream) as input. +// This avoids the parsing and data copying overhead of the full +// "MPEG1or2VideoStreamFramer". +// C++ header + +#ifndef _MPEG1or2_VIDEO_STREAM_DISCRETE_FRAMER_HH +#define _MPEG1or2_VIDEO_STREAM_DISCRETE_FRAMER_HH + +#ifndef _MPEG1or2_VIDEO_STREAM_FRAMER_HH +#include "MPEG1or2VideoStreamFramer.hh" +#endif + +#define VSH_MAX_SIZE 1000 + +class MPEG1or2VideoStreamDiscreteFramer: public MPEG1or2VideoStreamFramer { +public: + static MPEG1or2VideoStreamDiscreteFramer* + createNew(UsageEnvironment& env, FramedSource* inputSource, + Boolean iFramesOnly = False, // see MPEG1or2VideoStreamFramer.hh + double vshPeriod = 5.0, // see MPEG1or2VideoStreamFramer.hh + Boolean leavePresentationTimesUnmodified = False); + +protected: + MPEG1or2VideoStreamDiscreteFramer(UsageEnvironment& env, + FramedSource* inputSource, + Boolean iFramesOnly, double vshPeriod, Boolean leavePresentationTimesUnmodified); + // called only by createNew() + virtual ~MPEG1or2VideoStreamDiscreteFramer(); + +protected: + // redefined virtual functions: + virtual void doGetNextFrame(); + +protected: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + +protected: + Boolean fLeavePresentationTimesUnmodified; + struct timeval fLastNonBFramePresentationTime; + unsigned fLastNonBFrameTemporal_reference; + + // A saved copy of the most recently seen 'video_sequence_header', + // in case we need to insert it into the stream periodically: + unsigned char fSavedVSHBuffer[VSH_MAX_SIZE]; + unsigned fSavedVSHSize; + double fSavedVSHTimestamp; + Boolean fIFramesOnly; + double fVSHPeriod; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoStreamFramer.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoStreamFramer.hh new file mode 100644 index 0000000..7d8516d --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG1or2VideoStreamFramer.hh @@ -0,0 +1,56 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up an MPEG 1 or 2 video elementary stream into +// frames for: Video_Sequence_Header, GOP_Header, Picture_Header +// C++ header + +#ifndef _MPEG_1OR2_VIDEO_STREAM_FRAMER_HH +#define _MPEG_1OR2_VIDEO_STREAM_FRAMER_HH + +#ifndef _MPEG_VIDEO_STREAM_FRAMER_HH +#include "MPEGVideoStreamFramer.hh" +#endif + +class MPEG1or2VideoStreamFramer: public MPEGVideoStreamFramer { +public: + static MPEG1or2VideoStreamFramer* + createNew(UsageEnvironment& env, FramedSource* inputSource, + Boolean iFramesOnly = False, + double vshPeriod = 5.0 + /* how often (in seconds) to inject a Video_Sequence_Header, + if one doesn't already appear in the stream */); + +protected: + MPEG1or2VideoStreamFramer(UsageEnvironment& env, + FramedSource* inputSource, + Boolean iFramesOnly, double vshPeriod, + Boolean createParser = True); + // called only by createNew(), or by subclass constructors + virtual ~MPEG1or2VideoStreamFramer(); + +private: + // redefined virtual functions: + virtual Boolean isMPEG1or2VideoStreamFramer() const; + +private: + double getCurrentPTS() const; + + friend class MPEG1or2VideoStreamParser; // hack +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG2IndexFromTransportStream.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG2IndexFromTransportStream.hh new file mode 100644 index 0000000..d3ef8e8 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG2IndexFromTransportStream.hh @@ -0,0 +1,95 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that produces a sequence of I-frame indices from a MPEG-2 Transport Stream +// C++ header + +#ifndef _MPEG2_IFRAME_INDEX_FROM_TRANSPORT_STREAM_HH +#define _MPEG2_IFRAME_INDEX_FROM_TRANSPORT_STREAM_HH + +#ifndef _FRAMED_FILTER_HH +#include "FramedFilter.hh" +#endif + +#ifndef TRANSPORT_PACKET_SIZE +#define TRANSPORT_PACKET_SIZE 188 +#endif + +#ifndef MAX_PES_PACKET_SIZE +#define MAX_PES_PACKET_SIZE 65536 +#endif + +class IndexRecord; // forward + +class MPEG2IFrameIndexFromTransportStream: public FramedFilter { +public: + static MPEG2IFrameIndexFromTransportStream* + createNew(UsageEnvironment& env, FramedSource* inputSource); + +protected: + MPEG2IFrameIndexFromTransportStream(UsageEnvironment& env, + FramedSource* inputSource); + // called only by createNew() + virtual ~MPEG2IFrameIndexFromTransportStream(); + +private: + // Redefined virtual functions: + virtual void doGetNextFrame(); + +private: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + + static void handleInputClosure(void* clientData); + void handleInputClosure1(); + + void analyzePAT(unsigned char* pkt, unsigned size); + void analyzePMT(unsigned char* pkt, unsigned size); + + Boolean deliverIndexRecord(); + Boolean parseFrame(); + Boolean parseToNextCode(unsigned char& nextCode); + void compactParseBuffer(); + void addToTail(IndexRecord* newIndexRecord); + +private: + Boolean fIsH264; // True iff the video is H.264 (encapsulated in a Transport Stream) + Boolean fIsH265; // True iff the video is H.265 (encapsulated in a Transport Stream) + unsigned long fInputTransportPacketCounter; + unsigned fClosureNumber; + u_int8_t fLastContinuityCounter; + float fFirstPCR, fLastPCR; + Boolean fHaveSeenFirstPCR; + u_int16_t fPMT_PID, fVideo_PID; + // Note: We assume: 1 program per Transport Stream; 1 video stream per program + unsigned char fInputBuffer[TRANSPORT_PACKET_SIZE]; + unsigned char* fParseBuffer; + unsigned fParseBufferSize; + unsigned fParseBufferFrameStart; + unsigned fParseBufferParseEnd; + unsigned fParseBufferDataEnd; + IndexRecord* fHeadIndexRecord; + IndexRecord* fTailIndexRecord; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportFileServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportFileServerMediaSubsession.hh new file mode 100644 index 0000000..71596ec --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportFileServerMediaSubsession.hh @@ -0,0 +1,131 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a MPEG-2 Transport Stream file. +// C++ header + +#ifndef _MPEG2_TRANSPORT_FILE_SERVER_MEDIA_SUBSESSION_HH +#define _MPEG2_TRANSPORT_FILE_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _FILE_SERVER_MEDIA_SUBSESSION_HH +#include "FileServerMediaSubsession.hh" +#endif +#ifndef _MPEG2_TRANSPORT_STREAM_FRAMER_HH +#include "MPEG2TransportStreamFramer.hh" +#endif +#ifndef _BYTE_STREAM_FILE_SOURCE_HH +#include "ByteStreamFileSource.hh" +#endif +#ifndef _MPEG2_TRANSPORT_STREAM_TRICK_MODE_FILTER_HH +#include "MPEG2TransportStreamTrickModeFilter.hh" +#endif +#ifndef _MPEG2_TRANSPORT_STREAM_FROM_ES_SOURCE_HH +#include "MPEG2TransportStreamFromESSource.hh" +#endif + +class ClientTrickPlayState; // forward + +class MPEG2TransportFileServerMediaSubsession: public FileServerMediaSubsession{ +public: + static MPEG2TransportFileServerMediaSubsession* + createNew(UsageEnvironment& env, + char const* dataFileName, char const* indexFileName, + Boolean reuseFirstSource); + +protected: + MPEG2TransportFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, + MPEG2TransportStreamIndexFile* indexFile, + Boolean reuseFirstSource); + // called only by createNew(); + virtual ~MPEG2TransportFileServerMediaSubsession(); + + virtual ClientTrickPlayState* newClientTrickPlayState(); + +private: // redefined virtual functions + // Note that because - to implement 'trick play' operations - we're operating on + // more than just the input source, we reimplement some functions that are + // already implemented in "OnDemandServerMediaSubsession", rather than + // reimplementing "seekStreamSource()" and "setStreamSourceScale()": + virtual void startStream(unsigned clientSessionId, void* streamToken, + TaskFunc* rtcpRRHandler, + void* rtcpRRHandlerClientData, + unsigned short& rtpSeqNum, + unsigned& rtpTimestamp, + ServerRequestAlternativeByteHandler* serverRequestAlternativeByteHandler, + void* serverRequestAlternativeByteHandlerClientData); + virtual void pauseStream(unsigned clientSessionId, void* streamToken); + virtual void seekStream(unsigned clientSessionId, void* streamToken, double& seekNPT, double streamDuration, u_int64_t& numBytes); + virtual void setStreamScale(unsigned clientSessionId, void* streamToken, float scale); + virtual void deleteStream(unsigned clientSessionId, void*& streamToken); + + // The virtual functions thare are usually implemented by "ServerMediaSubsession"s: + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate); + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource); + + virtual void testScaleFactor(float& scale); + virtual float duration() const; + +private: + ClientTrickPlayState* lookupClient(unsigned clientSessionId); + +private: + MPEG2TransportStreamIndexFile* fIndexFile; + float fDuration; + HashTable* fClientSessionHashTable; // indexed by client session id +}; + + +// This class encapsulates the 'trick play' state for each current client (for +// a given "MPEG2TransportFileServerMediaSubsession" - i.e., Transport Stream file). +// It is used only within the implementation of "MPEG2TransportFileServerMediaSubsession", but is included here, +// in case subclasses of "MPEG2TransportFileServerMediaSubsession" want to use it. + +class ClientTrickPlayState { +public: + ClientTrickPlayState(MPEG2TransportStreamIndexFile* indexFile); + + // Functions to bring "fNPT", "fTSRecordNum" and "fIxRecordNum" in sync: + unsigned long updateStateFromNPT(double npt, double seekDuration); + void updateStateOnScaleChange(); + void updateStateOnPlayChange(Boolean reverseToPreviousVSH); + + void handleStreamDeletion(); + void setSource(MPEG2TransportStreamFramer* framer); + + void setNextScale(float nextScale) { fNextScale = nextScale; } + Boolean areChangingScale() const { return fNextScale != fScale; } + +protected: + void updateTSRecordNum(); + void reseekOriginalTransportStreamSource(); + +protected: + MPEG2TransportStreamIndexFile* fIndexFile; + ByteStreamFileSource* fOriginalTransportStreamSource; + MPEG2TransportStreamTrickModeFilter* fTrickModeFilter; + MPEG2TransportStreamFromESSource* fTrickPlaySource; + MPEG2TransportStreamFramer* fFramer; + float fScale, fNextScale, fNPT; + unsigned long fTSRecordNum, fIxRecordNum; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamFramer.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamFramer.hh new file mode 100644 index 0000000..a9e2dff --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamFramer.hh @@ -0,0 +1,78 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that passes through (unchanged) chunks that contain an integral number +// of MPEG-2 Transport Stream packets, but returning (in "fDurationInMicroseconds") +// an updated estimate of the time gap between chunks. +// C++ header + +#ifndef _MPEG2_TRANSPORT_STREAM_FRAMER_HH +#define _MPEG2_TRANSPORT_STREAM_FRAMER_HH + +#ifndef _FRAMED_FILTER_HH +#include "FramedFilter.hh" +#endif + +#ifndef _HASH_TABLE_HH +#include "HashTable.hh" +#endif + +class MPEG2TransportStreamFramer: public FramedFilter { +public: + static MPEG2TransportStreamFramer* + createNew(UsageEnvironment& env, FramedSource* inputSource); + + u_int64_t tsPacketCount() const { return fTSPacketCount; } + + void changeInputSource(FramedSource* newInputSource) { fInputSource = newInputSource; } + + void clearPIDStatusTable(); + void setNumTSPacketsToStream(unsigned long numTSRecordsToStream); + void setPCRLimit(float pcrLimit); + +protected: + MPEG2TransportStreamFramer(UsageEnvironment& env, FramedSource* inputSource); + // called only by createNew() + virtual ~MPEG2TransportStreamFramer(); + +private: + // Redefined virtual functions: + virtual void doGetNextFrame(); + virtual void doStopGettingFrames(); + +private: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize, + struct timeval presentationTime); + + Boolean updateTSPacketDurationEstimate(unsigned char* pkt, double timeNow); + +private: + u_int64_t fTSPacketCount; + double fTSPacketDurationEstimate; + HashTable* fPIDStatusTable; + u_int64_t fTSPCRCount; + Boolean fLimitNumTSPacketsToStream; + unsigned long fNumTSPacketsToStream; // used iff "fLimitNumTSPacketsToStream" is True + Boolean fLimitTSPacketsToStreamByPCR; + float fPCRLimit; // used iff "fLimitTSPacketsToStreamByPCR" is True +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamFromESSource.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamFromESSource.hh new file mode 100644 index 0000000..e62288d --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamFromESSource.hh @@ -0,0 +1,60 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter for converting one or more MPEG Elementary Streams +// to a MPEG-2 Transport Stream +// C++ header + +#ifndef _MPEG2_TRANSPORT_STREAM_FROM_ES_SOURCE_HH +#define _MPEG2_TRANSPORT_STREAM_FROM_ES_SOURCE_HH + +#ifndef _MPEG2_TRANSPORT_STREAM_MULTIPLEXOR_HH +#include "MPEG2TransportStreamMultiplexor.hh" +#endif + +class MPEG2TransportStreamFromESSource: public MPEG2TransportStreamMultiplexor { +public: + static MPEG2TransportStreamFromESSource* createNew(UsageEnvironment& env); + + void addNewVideoSource(FramedSource* inputSource, int mpegVersion, int16_t PID = -1); + // Note: For MPEG-4 video, set "mpegVersion" to 4; for H.264 video, set "mpegVersion" to 5. + void addNewAudioSource(FramedSource* inputSource, int mpegVersion, int16_t PID = -1); + // Note: In these functions, if "PID" is not -1, then it (currently, just the low 8 bits) + // is used as the stream's PID. Otherwise (if "PID" is -1) the 'stream_id' is used as + // the PID. + +protected: + MPEG2TransportStreamFromESSource(UsageEnvironment& env); + // called only by createNew() + virtual ~MPEG2TransportStreamFromESSource(); + + void addNewInputSource(FramedSource* inputSource, + u_int8_t streamId, int mpegVersion, int16_t PID = -1); + // used to implement addNew*Source() above + +private: + // Redefined virtual functions: + virtual void doStopGettingFrames(); + virtual void awaitNewBuffer(unsigned char* oldBuffer); + +private: + friend class InputESSourceRecord; + class InputESSourceRecord* fInputSources; + unsigned fVideoSourceCounter, fAudioSourceCounter; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamFromPESSource.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamFromPESSource.hh new file mode 100644 index 0000000..769dcf0 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamFromPESSource.hh @@ -0,0 +1,62 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter for converting a stream of MPEG PES packets to a MPEG-2 Transport Stream +// C++ header + +#ifndef _MPEG2_TRANSPORT_STREAM_FROM_PES_SOURCE_HH +#define _MPEG2_TRANSPORT_STREAM_FROM_PES_SOURCE_HH + +#ifndef _MPEG2_TRANSPORT_STREAM_MULTIPLEXOR_HH +#include "MPEG2TransportStreamMultiplexor.hh" +#endif +#ifndef _MPEG_1OR2_DEMUXED_ELEMENTARY_STREAM_HH +#include "MPEG1or2DemuxedElementaryStream.hh" +#endif + +class MPEG2TransportStreamFromPESSource: public MPEG2TransportStreamMultiplexor { +public: + static MPEG2TransportStreamFromPESSource* + createNew(UsageEnvironment& env, MPEG1or2DemuxedElementaryStream* inputSource); + +protected: + MPEG2TransportStreamFromPESSource(UsageEnvironment& env, + MPEG1or2DemuxedElementaryStream* inputSource); + // called only by createNew() + virtual ~MPEG2TransportStreamFromPESSource(); + +private: + // Redefined virtual functions: + virtual void doStopGettingFrames(); + virtual void awaitNewBuffer(unsigned char* oldBuffer); + +private: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + +private: + MPEG1or2DemuxedElementaryStream* fInputSource; + unsigned char* fInputBuffer; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamIndexFile.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamIndexFile.hh new file mode 100644 index 0000000..c483575 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamIndexFile.hh @@ -0,0 +1,96 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A class that encapsulates MPEG-2 Transport Stream 'index files'/ +// These index files are used to implement 'trick play' operations +// (seek-by-time, fast forward, reverse play) on Transport Stream files. +// +// C++ header + +#ifndef _MPEG2_TRANSPORT_STREAM_INDEX_FILE_HH +#define _MPEG2_TRANSPORT_STREAM_INDEX_FILE_HH + +#ifndef _MEDIA_HH +#include "Media.hh" +#endif + +#define INDEX_RECORD_SIZE 11 + +class MPEG2TransportStreamIndexFile: public Medium { +public: + static MPEG2TransportStreamIndexFile* createNew(UsageEnvironment& env, + char const* indexFileName); + + virtual ~MPEG2TransportStreamIndexFile(); + + // Functions that map between a playing time and a Transport packet number + // in the original Transport Stream file: + + void lookupTSPacketNumFromNPT(float& npt, unsigned long& tsPacketNumber, + unsigned long& indexRecordNumber); + // Looks up the Transport Stream Packet number corresponding to "npt". + // (This may modify "npt" to a more exact value.) + // (We also return the index record number that we looked up.) + + void lookupPCRFromTSPacketNum(unsigned long& tsPacketNumber, Boolean reverseToPreviousCleanPoint, + float& pcr, unsigned long& indexRecordNumber); + // Looks up the PCR timestamp for the transport packet "tsPacketNumber". + // (Adjust "tsPacketNumber" only if "reverseToPreviousCleanPoint" is True.) + // (We also return the index record number that we looked up.) + + // Miscellaneous functions used to implement 'trick play': + Boolean readIndexRecordValues(unsigned long indexRecordNum, + unsigned long& transportPacketNum, u_int8_t& offset, + u_int8_t& size, float& pcr, u_int8_t& recordType); + float getPlayingDuration(); + void stopReading() { closeFid(); } + + int mpegVersion(); + // returns the best guess for the version of MPEG being used for data within the underlying Transport Stream file. + // (1,2,4, or 5 (representing H.264). 0 means 'don't know' (usually because the index file is empty)) + +private: + MPEG2TransportStreamIndexFile(UsageEnvironment& env, char const* indexFileName); + + Boolean openFid(); + Boolean seekToIndexRecord(unsigned long indexRecordNumber); + Boolean readIndexRecord(unsigned long indexRecordNum); // into "fBuf" + Boolean readOneIndexRecord(unsigned long indexRecordNum); // closes "fFid" at end + void closeFid(); + + u_int8_t recordTypeFromBuf() { return fBuf[0]; } + u_int8_t offsetFromBuf() { return fBuf[1]; } + u_int8_t sizeFromBuf() { return fBuf[2]; } + float pcrFromBuf(); // after "fBuf" has been read + unsigned long tsPacketNumFromBuf(); + void setMPEGVersionFromRecordType(u_int8_t recordType); + + Boolean rewindToCleanPoint(unsigned long&ixFound); + // used to implement "lookupTSPacketNumber()" + +private: + char* fFileName; + FILE* fFid; // used internally when reading from the file + int fMPEGVersion; + unsigned long fCurrentIndexRecordNum; // within "fFid" + float fCachedPCR; + unsigned long fCachedTSPacketNumber, fCachedIndexRecordNumber; + unsigned long fNumIndexRecords; + unsigned char fBuf[INDEX_RECORD_SIZE]; // used for reading index records from file +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamMultiplexor.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamMultiplexor.hh new file mode 100644 index 0000000..e93fbaa --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamMultiplexor.hh @@ -0,0 +1,88 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A class for generating MPEG-2 Transport Stream from one or more input +// Elementary Stream data sources +// C++ header + +#ifndef _MPEG2_TRANSPORT_STREAM_MULTIPLEXOR_HH +#define _MPEG2_TRANSPORT_STREAM_MULTIPLEXOR_HH + +#ifndef _FRAMED_SOURCE_HH +#include "FramedSource.hh" +#endif +#ifndef _MPEG_1OR2_DEMUX_HH +#include "MPEG1or2Demux.hh" // for SCR +#endif + +#define PID_TABLE_SIZE 256 + +class MPEG2TransportStreamMultiplexor: public FramedSource { +protected: + MPEG2TransportStreamMultiplexor(UsageEnvironment& env); + virtual ~MPEG2TransportStreamMultiplexor(); + + virtual void awaitNewBuffer(unsigned char* oldBuffer) = 0; + // implemented by subclasses + + void handleNewBuffer(unsigned char* buffer, unsigned bufferSize, + int mpegVersion, MPEG1or2Demux::SCR scr, int16_t PID = -1); + // called by "awaitNewBuffer()" + // Note: For MPEG-4 video, set "mpegVersion" to 4; for H.264 video, set "mpegVersion" to 5. + // The buffer is assumed to be a PES packet, with a proper PES header. + // If "PID" is not -1, then it (currently, only the low 8 bits) is used as the stream's PID, + // otherwise the "stream_id" in the PES header is reused to be the stream's PID. + +private: + // Redefined virtual functions: + virtual void doGetNextFrame(); + +private: + void deliverDataToClient(u_int8_t pid, unsigned char* buffer, unsigned bufferSize, + unsigned& startPositionInBuffer); + + void deliverPATPacket(); + void deliverPMTPacket(Boolean hasChanged); + + void setProgramStreamMap(unsigned frameSize); + +protected: + Boolean fHaveVideoStreams; + +private: + unsigned fOutgoingPacketCounter; + unsigned fProgramMapVersion; + u_int8_t fPreviousInputProgramMapVersion, fCurrentInputProgramMapVersion; + // These two fields are used if we see "program_stream_map"s in the input. + struct { + unsigned counter; + u_int8_t streamType; // for use in Program Maps + } fPIDState[PID_TABLE_SIZE]; + u_int8_t fPCR_PID, fCurrentPID; + // Note: We map 8-bit stream_ids directly to PIDs + MPEG1or2Demux::SCR fPCR; + unsigned char* fInputBuffer; + unsigned fInputBufferSize, fInputBufferBytesUsed; + Boolean fIsFirstAdaptationField; +}; + + +// The CRC calculation function that Transport Streams use. We make this function public +// here in case it's useful elsewhere: +u_int32_t calculateCRC(u_int8_t const* data, unsigned dataLength, u_int32_t initialValue = 0xFFFFFFFF); + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamTrickModeFilter.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamTrickModeFilter.hh new file mode 100644 index 0000000..46b55e8 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportStreamTrickModeFilter.hh @@ -0,0 +1,99 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved.// A filter that converts a MPEG Transport Stream file - with corresponding index file +// - to a corresponding Video Elementary Stream. It also uses a "scale" parameter +// to implement 'trick mode' (fast forward or reverse play, using I-frames) on +// the video stream. +// C++ header + +#ifndef _MPEG2_TRANSPORT_STREAM_TRICK_MODE_FILTER_HH +#define _MPEG2_TRANSPORT_STREAM_TRICK_MODE_FILTER_HH + +#ifndef _FRAMED_FILTER_HH +#include "FramedFilter.hh" +#endif + +#ifndef _MPEG2_TRANSPORT_STREAM_INDEX_FILE_HH +#include "MPEG2TransportStreamIndexFile.hh" +#endif + +#ifndef TRANSPORT_PACKET_SIZE +#define TRANSPORT_PACKET_SIZE 188 +#endif + +class MPEG2TransportStreamTrickModeFilter: public FramedFilter { +public: + static MPEG2TransportStreamTrickModeFilter* + createNew(UsageEnvironment& env, FramedSource* inputSource, + MPEG2TransportStreamIndexFile* indexFile, int scale); + + Boolean seekTo(unsigned long tsPacketNumber, unsigned long indexRecordNumber); + + unsigned long nextIndexRecordNum() const { return fNextIndexRecordNum; } + + void forgetInputSource() { fInputSource = NULL; } + // this lets us delete this without also deleting the input Transport Stream + +protected: + MPEG2TransportStreamTrickModeFilter(UsageEnvironment& env, FramedSource* inputSource, + MPEG2TransportStreamIndexFile* indexFile, int scale); + // called only by createNew() + virtual ~MPEG2TransportStreamTrickModeFilter(); + +private: + // Redefined virtual functions: + virtual void doGetNextFrame(); + virtual void doStopGettingFrames(); + +private: + void attemptDeliveryToClient(); + void seekToTransportPacket(unsigned long tsPacketNum); + void readTransportPacket(unsigned long tsPacketNum); // asynchronously + + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize); + + static void onSourceClosure(void* clientData); + void onSourceClosure1(); + +private: + Boolean fHaveStarted; + MPEG2TransportStreamIndexFile* fIndexFile; + int fScale; // absolute value + int fDirection; // 1 => forward; -1 => reverse + enum { + SKIPPING_FRAME, + DELIVERING_SAVED_FRAME, + SAVING_AND_DELIVERING_FRAME + } fState; + unsigned fFrameCount; + unsigned long fNextIndexRecordNum; // next to be read from the index file + unsigned long fNextTSPacketNum; // next to be read from the transport stream file + unsigned char fInputBuffer[TRANSPORT_PACKET_SIZE]; + unsigned long fCurrentTSPacketNum; // corresponding to data currently in the buffer + unsigned long fDesiredTSPacketNum; + u_int8_t fDesiredDataOffset, fDesiredDataSize; + float fDesiredDataPCR, fFirstPCR; + unsigned long fSavedFrameIndexRecordStart; + unsigned long fSavedSequentialIndexRecordNum; + Boolean fUseSavedFrameNextTime; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportUDPServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportUDPServerMediaSubsession.hh new file mode 100644 index 0000000..c2e3c02 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG2TransportUDPServerMediaSubsession.hh @@ -0,0 +1,55 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from an incoming UDP (or RTP/UDP) MPEG-2 Transport Stream +// C++ header + +#ifndef _MPEG2_TRANSPORT_UDP_SERVER_MEDIA_SUBSESSION_HH +#define _MPEG2_TRANSPORT_UDP_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _ON_DEMAND_SERVER_MEDIA_SUBSESSION_HH +#include "OnDemandServerMediaSubsession.hh" +#endif + +class MPEG2TransportUDPServerMediaSubsession: public OnDemandServerMediaSubsession { +public: + static MPEG2TransportUDPServerMediaSubsession* + createNew(UsageEnvironment& env, + char const* inputAddressStr, // An IP multicast address, or use "0.0.0.0" or NULL for unicast input + Port const& inputPort, + Boolean inputStreamIsRawUDP = False); // otherwise (default) the input stream is RTP/UDP +protected: + MPEG2TransportUDPServerMediaSubsession(UsageEnvironment& env, + char const* inputAddressStr, Port const& inputPort, Boolean inputStreamIsRawUDP); + // called only by createNew(); + virtual ~MPEG2TransportUDPServerMediaSubsession(); + +protected: // redefined virtual functions + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate); + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource); +protected: + char const* fInputAddressStr; + Port fInputPort; + Groupsock* fInputGroupsock; + Boolean fInputStreamIsRawUDP; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG4ESVideoRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG4ESVideoRTPSink.hh new file mode 100644 index 0000000..76b0254 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG4ESVideoRTPSink.hh @@ -0,0 +1,72 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for MPEG-4 Elementary Stream video (RFC 3016) +// C++ header + +#ifndef _MPEG4ES_VIDEO_RTP_SINK_HH +#define _MPEG4ES_VIDEO_RTP_SINK_HH + +#ifndef _VIDEO_RTP_SINK_HH +#include "VideoRTPSink.hh" +#endif + +class MPEG4ESVideoRTPSink: public VideoRTPSink { +public: + static MPEG4ESVideoRTPSink* createNew(UsageEnvironment& env, + Groupsock* RTPgs, unsigned char rtpPayloadFormat, + u_int32_t rtpTimestampFrequency = 90000); + static MPEG4ESVideoRTPSink* createNew(UsageEnvironment& env, + Groupsock* RTPgs, unsigned char rtpPayloadFormat, u_int32_t rtpTimestampFrequency, + u_int8_t profileAndLevelIndication, char const* configStr); + // an optional variant of "createNew()", useful if we know, in advance, the stream's 'configuration' info. + + +protected: + MPEG4ESVideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, u_int32_t rtpTimestampFrequency, + u_int8_t profileAndLevelIndication = 0, char const* configStr = NULL); + // called only by createNew() + + virtual ~MPEG4ESVideoRTPSink(); + +protected: // redefined virtual functions: + virtual Boolean sourceIsCompatibleWithUs(MediaSource& source); + + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + virtual Boolean allowFragmentationAfterStart() const; + virtual Boolean + frameCanAppearAfterPacketStart(unsigned char const* frameStart, + unsigned numBytesInFrame) const; + + virtual char const* auxSDPLine(); + +protected: + Boolean fVOPIsPresent; + +private: + u_int8_t fProfileAndLevelIndication; + unsigned char* fConfigBytes; + unsigned fNumConfigBytes; + + char* fFmtpSDPLine; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG4ESVideoRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG4ESVideoRTPSource.hh new file mode 100644 index 0000000..2420acc --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG4ESVideoRTPSource.hh @@ -0,0 +1,51 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MP4V-ES video RTP stream sources +// C++ header + +#ifndef _MPEG4_ES_VIDEO_RTP_SOURCE_HH +#define _MPEG4_ES_VIDEO_RTP_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +class MPEG4ESVideoRTPSource: public MultiFramedRTPSource { +public: + static MPEG4ESVideoRTPSource* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + +protected: + virtual ~MPEG4ESVideoRTPSource(); + +private: + MPEG4ESVideoRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + // called only by createNew() + +private: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual char const* MIMEtype() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG4GenericRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG4GenericRTPSink.hh new file mode 100644 index 0000000..1b9e0ed --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG4GenericRTPSink.hh @@ -0,0 +1,70 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MPEG4-GENERIC ("audio", "video", or "application") RTP stream sinks +// C++ header + +#ifndef _MPEG4_GENERIC_RTP_SINK_HH +#define _MPEG4_GENERIC_RTP_SINK_HH + +#ifndef _MULTI_FRAMED_RTP_SINK_HH +#include "MultiFramedRTPSink.hh" +#endif + +class MPEG4GenericRTPSink: public MultiFramedRTPSink { +public: + static MPEG4GenericRTPSink* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + u_int8_t rtpPayloadFormat, u_int32_t rtpTimestampFrequency, + char const* sdpMediaTypeString, char const* mpeg4Mode, + char const* configString, + unsigned numChannels = 1); + +protected: + MPEG4GenericRTPSink(UsageEnvironment& env, Groupsock* RTPgs, + u_int8_t rtpPayloadFormat, + u_int32_t rtpTimestampFrequency, + char const* sdpMediaTypeString, + char const* mpeg4Mode, char const* configString, + unsigned numChannels); + // called only by createNew() + + virtual ~MPEG4GenericRTPSink(); + +private: // redefined virtual functions: + virtual + Boolean frameCanAppearAfterPacketStart(unsigned char const* frameStart, + unsigned numBytesInFrame) const; + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + virtual unsigned specialHeaderSize() const; + + virtual char const* sdpMediaType() const; + + virtual char const* auxSDPLine(); // for the "a=fmtp:" SDP line + +private: + char const* fSDPMediaTypeString; + char const* fMPEG4Mode; + char const* fConfigString; + char* fFmtpSDPLine; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG4GenericRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG4GenericRTPSource.hh new file mode 100644 index 0000000..f38cef1 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG4GenericRTPSource.hh @@ -0,0 +1,80 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MPEG4-GENERIC ("audio", "video", or "application") RTP stream sources +// C++ header + +#ifndef _MPEG4_GENERIC_RTP_SOURCE_HH +#define _MPEG4_GENERIC_RTP_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +class MPEG4GenericRTPSource: public MultiFramedRTPSource { +public: + static MPEG4GenericRTPSource* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency, + char const* mediumName, + char const* mode, unsigned sizeLength, unsigned indexLength, + unsigned indexDeltaLength + // add other parameters later + ); + // mediumName is "audio", "video", or "application" + // it *cannot* be NULL + +protected: + virtual ~MPEG4GenericRTPSource(); + +private: + MPEG4GenericRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency, + char const* mediumName, + char const* mode, + unsigned sizeLength, unsigned indexLength, + unsigned indexDeltaLength + ); + // called only by createNew() + +private: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual char const* MIMEtype() const; + +private: + char* fMIMEType; + + char* fMode; + unsigned fSizeLength, fIndexLength, fIndexDeltaLength; + unsigned fNumAUHeaders; // in the most recently read packet + unsigned fNextAUHeader; // index of the next AU Header to read + struct AUHeader* fAUHeaders; + + friend class MPEG4GenericBufferedPacket; +}; + + + +// A function that looks up the sampling frequency from an +// "AudioSpecificConfig" string. (0 means 'unknown') +unsigned samplingFrequencyFromAudioSpecificConfig(char const* configStr); + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG4LATMAudioRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG4LATMAudioRTPSink.hh new file mode 100644 index 0000000..54bedbd --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG4LATMAudioRTPSink.hh @@ -0,0 +1,69 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for MPEG-4 audio, using LATM multiplexing (RFC 3016) +// (Note that the initial 'size' field is assumed to be present at the start of +// each frame.) +// C++ header + +#ifndef _MPEG4_LATM_AUDIO_RTP_SINK_HH +#define _MPEG4_LATM_AUDIO_RTP_SINK_HH + +#ifndef _AUDIO_RTP_SINK_HH +#include "AudioRTPSink.hh" +#endif + +class MPEG4LATMAudioRTPSink: public AudioRTPSink { +public: + static MPEG4LATMAudioRTPSink* createNew(UsageEnvironment& env, + Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + u_int32_t rtpTimestampFrequency, + char const* streamMuxConfigString, + unsigned numChannels, + Boolean allowMultipleFramesPerPacket = False); + +protected: + MPEG4LATMAudioRTPSink(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + u_int32_t rtpTimestampFrequency, + char const* streamMuxConfigString, + unsigned numChannels, + Boolean allowMultipleFramesPerPacket); + // called only by createNew() + + virtual ~MPEG4LATMAudioRTPSink(); + +private: // redefined virtual functions: + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + virtual Boolean + frameCanAppearAfterPacketStart(unsigned char const* frameStart, + unsigned numBytesInFrame) const; + + virtual char const* auxSDPLine(); // for the "a=fmtp:" SDP line + +private: + char const* fStreamMuxConfigString; + char* fFmtpSDPLine; + Boolean fAllowMultipleFramesPerPacket; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG4LATMAudioRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG4LATMAudioRTPSource.hh new file mode 100644 index 0000000..9048c0e --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG4LATMAudioRTPSource.hh @@ -0,0 +1,101 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// MPEG-4 audio, using LATM multiplexing +// C++ header + +#ifndef _MPEG4_LATM_AUDIO_RTP_SOURCE_HH +#define _MPEG4_LATM_AUDIO_RTP_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +class MPEG4LATMAudioRTPSource: public MultiFramedRTPSource { +public: + static MPEG4LATMAudioRTPSource* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + + // By default, the LATM data length field is included at the beginning of each + // returned frame. To omit this field, call the following: + void omitLATMDataLengthField(); + + Boolean returnedFrameIncludesLATMDataLengthField() const { return fIncludeLATMDataLengthField; } + +protected: + virtual ~MPEG4LATMAudioRTPSource(); + +private: + MPEG4LATMAudioRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + // called only by createNew() + +private: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual char const* MIMEtype() const; + +private: + Boolean fIncludeLATMDataLengthField; +}; + + +// A utility for parsing a "StreamMuxConfig" string +Boolean +parseStreamMuxConfigStr(char const* configStr, + // result parameters: + Boolean& audioMuxVersion, + Boolean& allStreamsSameTimeFraming, + unsigned char& numSubFrames, + unsigned char& numProgram, + unsigned char& numLayer, + unsigned char*& audioSpecificConfig, + unsigned& audioSpecificConfigSize); + // Parses "configStr" as a sequence of hexadecimal digits, representing + // a "StreamMuxConfig" (as defined in ISO.IEC 14496-3, table 1.21). + // Returns, in "audioSpecificConfig", a binary representation of + // the enclosed "AudioSpecificConfig" structure (of size + // "audioSpecificConfigSize" bytes). The memory for this is allocated + // dynamically by this function; the caller is responsible for + // freeing it. Other values, that precede "AudioSpecificConfig", + // are returned in the other parameters. + // Returns True iff the parsing succeeds. + // IMPORTANT NOTE: The implementation of this function currently assumes + // that everything after the first "numLayer" field is an + // "AudioSpecificConfig". Therefore, it will not work properly if + // "audioMuxVersion" != 0, "numProgram" > 0, or "numLayer" > 0. + // Also, any 'other data' or CRC info will be included at + // the end of "audioSpecificConfig". + +unsigned char* parseStreamMuxConfigStr(char const* configStr, + // result parameter: + unsigned& audioSpecificConfigSize); + // A variant of the above that returns just the "AudioSpecificConfig" data + // (or NULL) if the parsing failed, without bothering with the other + // result parameters. + +unsigned char* parseGeneralConfigStr(char const* configStr, + // result parameter: + unsigned& configSize); + // A routine that parses an arbitrary config string, returning + // the result in binary form. + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG4VideoFileServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG4VideoFileServerMediaSubsession.hh new file mode 100644 index 0000000..b44c202 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG4VideoFileServerMediaSubsession.hh @@ -0,0 +1,61 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from a MPEG-4 video file. +// C++ header + +#ifndef _MPEG4_VIDEO_FILE_SERVER_MEDIA_SUBSESSION_HH +#define _MPEG4_VIDEO_FILE_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _FILE_SERVER_MEDIA_SUBSESSION_HH +#include "FileServerMediaSubsession.hh" +#endif + +class MPEG4VideoFileServerMediaSubsession: public FileServerMediaSubsession{ +public: + static MPEG4VideoFileServerMediaSubsession* + createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource); + + // Used to implement "getAuxSDPLine()": + void checkForAuxSDPLine1(); + void afterPlayingDummy1(); + +protected: + MPEG4VideoFileServerMediaSubsession(UsageEnvironment& env, + char const* fileName, Boolean reuseFirstSource); + // called only by createNew(); + virtual ~MPEG4VideoFileServerMediaSubsession(); + + void setDoneFlag() { fDoneFlag = ~0; } + +protected: // redefined virtual functions + virtual char const* getAuxSDPLine(RTPSink* rtpSink, + FramedSource* inputSource); + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate); + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource); + +private: + char* fAuxSDPLine; + char fDoneFlag; // used when setting up "fAuxSDPLine" + RTPSink* fDummyRTPSink; // ditto +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG4VideoStreamDiscreteFramer.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG4VideoStreamDiscreteFramer.hh new file mode 100644 index 0000000..d8cad73 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG4VideoStreamDiscreteFramer.hh @@ -0,0 +1,73 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A simplified version of "MPEG4VideoStreamFramer" that takes only complete, +// discrete frames (rather than an arbitrary byte stream) as input. +// This avoids the parsing and data copying overhead of the full +// "MPEG4VideoStreamFramer". +// C++ header + +#ifndef _MPEG4_VIDEO_STREAM_DISCRETE_FRAMER_HH +#define _MPEG4_VIDEO_STREAM_DISCRETE_FRAMER_HH + +#ifndef _MPEG4_VIDEO_STREAM_FRAMER_HH +#include "MPEG4VideoStreamFramer.hh" +#endif + +class MPEG4VideoStreamDiscreteFramer: public MPEG4VideoStreamFramer { +public: + static MPEG4VideoStreamDiscreteFramer* + createNew(UsageEnvironment& env, FramedSource* inputSource, Boolean leavePresentationTimesUnmodified = False); + +protected: + MPEG4VideoStreamDiscreteFramer(UsageEnvironment& env, + FramedSource* inputSource, Boolean leavePresentationTimesUnmodified); + // called only by createNew() + virtual ~MPEG4VideoStreamDiscreteFramer(); + +protected: + // redefined virtual functions: + virtual void doGetNextFrame(); + +protected: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + + Boolean getNextFrameBit(u_int8_t& result); + Boolean getNextFrameBits(unsigned numBits, u_int32_t& result); + // Which are used by: + void analyzeVOLHeader(); + +protected: + Boolean fLeavePresentationTimesUnmodified; + u_int32_t vop_time_increment_resolution; + unsigned fNumVTIRBits; + // # of bits needed to count to "vop_time_increment_resolution" + struct timeval fLastNonBFramePresentationTime; + unsigned fLastNonBFrameVop_time_increment; + +private: + unsigned fNumBitsSeenSoFar; // used by the getNextFrameBit*() routines +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEG4VideoStreamFramer.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEG4VideoStreamFramer.hh new file mode 100644 index 0000000..3078b3d --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEG4VideoStreamFramer.hh @@ -0,0 +1,75 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up an MPEG-4 video elementary stream into +// frames for: +// - Visual Object Sequence (VS) Header + Visual Object (VO) Header +// + Video Object Layer (VOL) Header +// - Group of VOP (GOV) Header +// - VOP frame +// C++ header + +#ifndef _MPEG4_VIDEO_STREAM_FRAMER_HH +#define _MPEG4_VIDEO_STREAM_FRAMER_HH + +#ifndef _MPEG_VIDEO_STREAM_FRAMER_HH +#include "MPEGVideoStreamFramer.hh" +#endif + +class MPEG4VideoStreamFramer: public MPEGVideoStreamFramer { +public: + static MPEG4VideoStreamFramer* + createNew(UsageEnvironment& env, FramedSource* inputSource); + + u_int8_t profile_and_level_indication() const { + return fProfileAndLevelIndication; + } + + unsigned char* getConfigBytes(unsigned& numBytes) const; + + void setConfigInfo(u_int8_t profileAndLevelIndication, char const* configStr); + // Assigns the "profile_and_level_indication" number, and the 'config' bytes. + // If this function is not called, then this data is only assigned later, when it appears in the input stream. + +protected: + MPEG4VideoStreamFramer(UsageEnvironment& env, + FramedSource* inputSource, + Boolean createParser = True); + // called only by createNew(), or by subclass constructors + virtual ~MPEG4VideoStreamFramer(); + + void startNewConfig(); + void appendToNewConfig(unsigned char* newConfigBytes, + unsigned numNewBytes); + void completeNewConfig(); + +private: + // redefined virtual functions: + virtual Boolean isMPEG4VideoStreamFramer() const; + +protected: + u_int8_t fProfileAndLevelIndication; + unsigned char* fConfigBytes; + unsigned fNumConfigBytes; + +private: + unsigned char* fNewConfigBytes; + unsigned fNumNewConfigBytes; + friend class MPEG4VideoStreamParser; // hack +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MPEGVideoStreamFramer.hh b/AnyCore/lib_rtsp/liveMedia/include/MPEGVideoStreamFramer.hh new file mode 100644 index 0000000..b624396 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MPEGVideoStreamFramer.hh @@ -0,0 +1,84 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A filter that breaks up an MPEG video elementary stream into +// headers and frames +// C++ header + +#ifndef _MPEG_VIDEO_STREAM_FRAMER_HH +#define _MPEG_VIDEO_STREAM_FRAMER_HH + +#ifndef _FRAMED_FILTER_HH +#include "FramedFilter.hh" +#endif + +class TimeCode { +public: + TimeCode(); + virtual ~TimeCode(); + + int operator==(TimeCode const& arg2); + unsigned days, hours, minutes, seconds, pictures; +}; + +class MPEGVideoStreamFramer: public FramedFilter { +public: + Boolean& pictureEndMarker() { return fPictureEndMarker; } + // a hack for implementing the RTP 'M' bit + + void flushInput(); // called if there is a discontinuity (seeking) in the input + +protected: + MPEGVideoStreamFramer(UsageEnvironment& env, FramedSource* inputSource); + // we're an abstract base class + virtual ~MPEGVideoStreamFramer(); + + void computePresentationTime(unsigned numAdditionalPictures); + // sets "fPresentationTime" + void setTimeCode(unsigned hours, unsigned minutes, unsigned seconds, + unsigned pictures, unsigned picturesSinceLastGOP); + +private: // redefined virtual functions + virtual void doGetNextFrame(); + +private: + void reset(); + + static void continueReadProcessing(void* clientData, + unsigned char* ptr, unsigned size, + struct timeval presentationTime); + void continueReadProcessing(); + +protected: + double fFrameRate; // Note: For MPEG-4, this is really a 'tick rate' + unsigned fPictureCount; // hack used to implement doGetNextFrame() + Boolean fPictureEndMarker; + struct timeval fPresentationTimeBase; + + // parsing state + class MPEGVideoStreamParser* fParser; + friend class MPEGVideoStreamParser; // hack + +private: + TimeCode fCurGOPTimeCode, fPrevGOPTimeCode; + unsigned fPicturesAdjustment; + double fPictureTimeBase; + unsigned fTcSecsBase; + Boolean fHaveSeenFirstTimeCode; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MatroskaFile.hh b/AnyCore/lib_rtsp/liveMedia/include/MatroskaFile.hh new file mode 100644 index 0000000..1376845 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MatroskaFile.hh @@ -0,0 +1,181 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A class that encapsulates a Matroska file. +// C++ header + +#ifndef _MATROSKA_FILE_HH +#define _MATROSKA_FILE_HH + +#ifndef _RTP_SINK_HH +#include "RTPSink.hh" +#endif +#ifndef _HASH_TABLE_HH +#include "HashTable.hh" +#endif + +class MatroskaTrack; // forward +class MatroskaDemux; // forward + +class MatroskaFile: public Medium { +public: + typedef void (onCreationFunc)(MatroskaFile* newFile, void* clientData); + static void createNew(UsageEnvironment& env, char const* fileName, onCreationFunc* onCreation, void* onCreationClientData, + char const* preferredLanguage = "eng"); + // Note: Unlike most "createNew()" functions, this one doesn't return a new object immediately. Instead, because this class + // requires file reading (to parse the Matroska 'Track' headers) before a new object can be initialized, the creation of a new + // object is signalled by calling - from the event loop - an 'onCreationFunc' that is passed as a parameter to "createNew()". + + MatroskaTrack* lookup(unsigned trackNumber) const; + + // Create a demultiplexor for extracting tracks from this file. (Separate clients will typically have separate demultiplexors.) + MatroskaDemux* newDemux(); + + // Parameters of the file ('Segment'); set when the file is parsed: + unsigned timecodeScale() { return fTimecodeScale; } // in nanoseconds + float segmentDuration() { return fSegmentDuration; } // in units of "timecodeScale()" + float fileDuration(); // in seconds + + char const* fileName() const { return fFileName; } + + unsigned chosenVideoTrackNumber() { return fChosenVideoTrackNumber; } + unsigned chosenAudioTrackNumber() { return fChosenAudioTrackNumber; } + unsigned chosenSubtitleTrackNumber() { return fChosenSubtitleTrackNumber; } + + FramedSource* + createSourceForStreaming(FramedSource* baseSource, unsigned trackNumber, + unsigned& estBitrate, unsigned& numFiltersInFrontOfTrack); + // Takes a data source (which must be a demultiplexed track from this file) and returns + // a (possibly modified) data source that can be used for streaming. + + RTPSink* createRTPSinkForTrackNumber(unsigned trackNumber, Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic); + // Creates a "RTPSink" object that would be appropriate for streaming the specified track, + // or NULL if no appropriate "RTPSink" exists + +private: + MatroskaFile(UsageEnvironment& env, char const* fileName, onCreationFunc* onCreation, void* onCreationClientData, + char const* preferredLanguage); + // called only by createNew() + virtual ~MatroskaFile(); + + static void handleEndOfTrackHeaderParsing(void* clientData); + void handleEndOfTrackHeaderParsing(); + + void addTrack(MatroskaTrack* newTrack, unsigned trackNumber); + void addCuePoint(double cueTime, u_int64_t clusterOffsetInFile, unsigned blockNumWithinCluster); + Boolean lookupCuePoint(double& cueTime, u_int64_t& resultClusterOffsetInFile, unsigned& resultBlockNumWithinCluster); + void printCuePoints(FILE* fid); + + void removeDemux(MatroskaDemux* demux); + +private: + friend class MatroskaFileParser; + friend class MatroskaDemux; + char const* fFileName; + onCreationFunc* fOnCreation; + void* fOnCreationClientData; + char const* fPreferredLanguage; + + unsigned fTimecodeScale; // in nanoseconds + float fSegmentDuration; // in units of "fTimecodeScale" + u_int64_t fSegmentDataOffset, fClusterOffset, fCuesOffset; + + class MatroskaTrackTable* fTrackTable; + HashTable* fDemuxesTable; + class CuePoint* fCuePoints; + unsigned fChosenVideoTrackNumber, fChosenAudioTrackNumber, fChosenSubtitleTrackNumber; + class MatroskaFileParser* fParserForInitialization; +}; + +// We define our own track type codes as bits (powers of 2), so we can use the set of track types as a bitmap, representing a set: +// (Note that MATROSKA_TRACK_TYPE_OTHER must be last, and have the largest value.) +#define MATROSKA_TRACK_TYPE_VIDEO 0x01 +#define MATROSKA_TRACK_TYPE_AUDIO 0x02 +#define MATROSKA_TRACK_TYPE_SUBTITLE 0x04 +#define MATROSKA_TRACK_TYPE_OTHER 0x08 + +class MatroskaTrack { +public: + MatroskaTrack(); + virtual ~MatroskaTrack(); + + // track parameters + unsigned trackNumber; + u_int8_t trackType; + Boolean isEnabled, isDefault, isForced; + unsigned defaultDuration; + char* name; + char* language; + char* codecID; + unsigned samplingFrequency; + unsigned numChannels; + char const* mimeType; + unsigned codecPrivateSize; + u_int8_t* codecPrivate; + Boolean codecPrivateUsesH264FormatForH265; // a hack specifically for H.265 video tracks + Boolean codecIsOpus; // a hack for Opus audio + unsigned headerStrippedBytesSize; + u_int8_t* headerStrippedBytes; + unsigned subframeSizeSize; // 0 means: frames do not have subframes (the default behavior) + Boolean haveSubframes() const { return subframeSizeSize > 0; } +}; + +class MatroskaDemux: public Medium { +public: + FramedSource* newDemuxedTrack(); + FramedSource* newDemuxedTrack(unsigned& resultTrackNumber); + // Returns a new stream ("FramedSource" subclass) that represents the next preferred media + // track (video, audio, subtitle - in that order) from the file. (Preferred media tracks + // are based on the file's language preference.) + // This function returns NULL when no more media tracks exist. + + FramedSource* newDemuxedTrackByTrackNumber(unsigned trackNumber); + // As above, but creates a new stream for a specific track number within the Matroska file. + // (You should not call this function more than once with the same track number.) + + // Note: We assume that: + // - Every track created by "newDemuxedTrack()" is later read + // - All calls to "newDemuxedTrack()" are made before any track is read + +protected: + friend class MatroskaFile; + friend class MatroskaFileParser; + class MatroskaDemuxedTrack* lookupDemuxedTrack(unsigned trackNumber); + + MatroskaDemux(MatroskaFile& ourFile); // we're created only by a "MatroskaFile" (a friend) + virtual ~MatroskaDemux(); + +private: + friend class MatroskaDemuxedTrack; + void removeTrack(unsigned trackNumber); + void continueReading(); // called by a demuxed track to tell us that it has a pending read ("doGetNextFrame()") + void seekToTime(double& seekNPT); + + static void handleEndOfFile(void* clientData); + void handleEndOfFile(); + +private: + MatroskaFile& fOurFile; + class MatroskaFileParser* fOurParser; + HashTable* fDemuxedTracksTable; + + // Used to implement "newServerMediaSubsession()": + u_int8_t fNextTrackTypeToCheck; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MatroskaFileServerDemux.hh b/AnyCore/lib_rtsp/liveMedia/include/MatroskaFileServerDemux.hh new file mode 100644 index 0000000..0f8b629 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MatroskaFileServerDemux.hh @@ -0,0 +1,84 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A server demultiplexor for a Matroska file +// C++ header + +#ifndef _MATROSKA_FILE_SERVER_DEMUX_HH +#define _MATROSKA_FILE_SERVER_DEMUX_HH + +#ifndef _SERVER_MEDIA_SESSION_HH +#include "ServerMediaSession.hh" +#endif + +#ifndef _MATROSKA_FILE_HH +#include "MatroskaFile.hh" +#endif + +class MatroskaFileServerDemux: public Medium { +public: + typedef void (onCreationFunc)(MatroskaFileServerDemux* newDemux, void* clientData); + static void createNew(UsageEnvironment& env, char const* fileName, + onCreationFunc* onCreation, void* onCreationClientData, + char const* preferredLanguage = "eng"); + // Note: Unlike most "createNew()" functions, this one doesn't return a new object immediately. Instead, because this class + // requires file reading (to parse the Matroska 'Track' headers) before a new object can be initialized, the creation of a new + // object is signalled by calling - from the event loop - an 'onCreationFunc' that is passed as a parameter to "createNew()". + + ServerMediaSubsession* newServerMediaSubsession(); + ServerMediaSubsession* newServerMediaSubsession(unsigned& resultTrackNumber); + // Returns a new "ServerMediaSubsession" object that represents the next preferred media track + // (video, audio, subtitle - in that order) from the file. (Preferred media tracks are based on the file's language preference.) + // This function returns NULL when no more media tracks exist. + + ServerMediaSubsession* newServerMediaSubsessionByTrackNumber(unsigned trackNumber); + // As above, but creates a new "ServerMediaSubsession" object for a specific track number within the Matroska file. + // (You should not call this function more than once with the same track number.) + + // The following public: member functions are called only by the "ServerMediaSubsession" objects: + + MatroskaFile* ourMatroskaFile() { return fOurMatroskaFile; } + char const* fileName() const { return fFileName; } + float fileDuration() const { return fOurMatroskaFile->fileDuration(); } + + FramedSource* newDemuxedTrack(unsigned clientSessionId, unsigned trackNumber); + // Used by the "ServerMediaSubsession" objects to implement their "createNewStreamSource()" virtual function. + +private: + MatroskaFileServerDemux(UsageEnvironment& env, char const* fileName, + onCreationFunc* onCreation, void* onCreationClientData, + char const* preferredLanguage); + // called only by createNew() + virtual ~MatroskaFileServerDemux(); + + static void onMatroskaFileCreation(MatroskaFile* newFile, void* clientData); + void onMatroskaFileCreation(MatroskaFile* newFile); +private: + char const* fFileName; + onCreationFunc* fOnCreation; + void* fOnCreationClientData; + MatroskaFile* fOurMatroskaFile; + + // Used to implement "newServerMediaSubsession()": + u_int8_t fNextTrackTypeToCheck; + + // Used to set up demuxing, to implement "newDemuxedTrack()": + unsigned fLastClientSessionId; + MatroskaDemux* fLastCreatedDemux; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/Media.hh b/AnyCore/lib_rtsp/liveMedia/include/Media.hh new file mode 100644 index 0000000..7705a14 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/Media.hh @@ -0,0 +1,138 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Medium +// C++ header + +#ifndef _MEDIA_HH +#define _MEDIA_HH + +#ifndef _LIVEMEDIA_VERSION_HH +#include "liveMedia_version.hh" +#endif + +#ifndef _HASH_TABLE_HH +#include "HashTable.hh" +#endif + +#ifndef _USAGE_ENVIRONMENT_HH +#include "UsageEnvironment.hh" +#endif + +// Lots of files end up needing the following, so just #include them here: +#ifndef _NET_COMMON_H +#include "NetCommon.h" +#endif +#include + +// The following makes the Borland compiler happy: +#ifdef __BORLANDC__ +#define _strnicmp strnicmp +#define fabsf(x) fabs(x) +#endif + +#define mediumNameMaxLen 30 + +class Medium { +public: + static Boolean lookupByName(UsageEnvironment& env, + char const* mediumName, + Medium*& resultMedium); + static void close(UsageEnvironment& env, char const* mediumName); + static void close(Medium* medium); // alternative close() method using ptrs + // (has no effect if medium == NULL) + + UsageEnvironment& envir() const {return fEnviron;} + + char const* name() const {return fMediumName;} + + // Test for specific types of media: + virtual Boolean isSource() const; + virtual Boolean isSink() const; + virtual Boolean isRTCPInstance() const; + virtual Boolean isRTSPClient() const; + virtual Boolean isRTSPServer() const; + virtual Boolean isMediaSession() const; + virtual Boolean isServerMediaSession() const; + virtual Boolean isDarwinInjector() const; + +protected: + friend class MediaLookupTable; + Medium(UsageEnvironment& env); // abstract base class + virtual ~Medium(); // instances are deleted using close() only + + TaskToken& nextTask() { + return fNextTask; + } + +private: + UsageEnvironment& fEnviron; + char fMediumName[mediumNameMaxLen]; + TaskToken fNextTask; +}; + + +// A data structure for looking up a Medium by its string name. +// (It is used only to implement "Medium", but we make it visible here, in case developers want to use it to iterate over +// the whole set of "Medium" objects that we've created.) +class MediaLookupTable { +public: + static MediaLookupTable* ourMedia(UsageEnvironment& env); + HashTable const& getTable() { return *fTable; } + +protected: + MediaLookupTable(UsageEnvironment& env); + virtual ~MediaLookupTable(); + +private: + friend class Medium; + + Medium* lookup(char const* name) const; + // Returns NULL if none already exists + + void addNew(Medium* medium, char* mediumName); + void remove(char const* name); + + void generateNewName(char* mediumName, unsigned maxLen); + +private: + UsageEnvironment& fEnv; + HashTable* fTable; + unsigned fNameGenerator; +}; + + +// The structure pointed to by the "liveMediaPriv" UsageEnvironment field: +class _Tables { +public: + static _Tables* getOurTables(UsageEnvironment& env, Boolean createIfNotPresent = True); + // returns a pointer to an "ourTables" structure (creating it if necessary) + void reclaimIfPossible(); + // used to delete ourselves when we're no longer used + + MediaLookupTable* mediaTable; + void* socketTable; + +protected: + _Tables(UsageEnvironment& env); + virtual ~_Tables(); + +private: + UsageEnvironment& fEnv; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MediaSession.hh b/AnyCore/lib_rtsp/liveMedia/include/MediaSession.hh new file mode 100644 index 0000000..2b85587 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MediaSession.hh @@ -0,0 +1,337 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A data structure that represents a session that consists of +// potentially multiple (audio and/or video) sub-sessions +// (This data structure is used for media *receivers* - i.e., clients. +// For media streamers, use "ServerMediaSession" instead.) +// C++ header + +/* NOTE: To support receiving your own custom RTP payload format, you must first define a new + subclass of "MultiFramedRTPSource" (or "BasicUDPSource") that implements it. + Then define your own subclass of "MediaSession" and "MediaSubsession", as follows: + - In your subclass of "MediaSession" (named, for example, "myMediaSession"): + - Define and implement your own static member function + static myMediaSession* createNew(UsageEnvironment& env, char const* sdpDescription); + and call this - instead of "MediaSession::createNew()" - in your application, + when you create a new "MediaSession" object. + - Reimplement the "createNewMediaSubsession()" virtual function, as follows: + MediaSubsession* myMediaSession::createNewMediaSubsession() { return new myMediaSubsession(*this); } + - In your subclass of "MediaSubsession" (named, for example, "myMediaSubsession"): + - Reimplement the "createSourceObjects()" virtual function, perhaps similar to this: + Boolean myMediaSubsession::createSourceObjects(int useSpecialRTPoffset) { + if (strcmp(fCodecName, "X-MY-RTP-PAYLOAD-FORMAT") == 0) { + // This subsession uses our custom RTP payload format: + fReadSource = fRTPSource = myRTPPayloadFormatRTPSource::createNew( ); + return True; + } else { + // This subsession uses some other RTP payload format - perhaps one that we already implement: + return ::createSourceObjects(useSpecialRTPoffset); + } + } +*/ + +#ifndef _MEDIA_SESSION_HH +#define _MEDIA_SESSION_HH + +#ifndef _RTCP_HH +#include "RTCP.hh" +#endif +#ifndef _FRAMED_FILTER_HH +#include "FramedFilter.hh" +#endif + +class MediaSubsession; // forward + +class MediaSession: public Medium { +public: + static MediaSession* createNew(UsageEnvironment& env, + char const* sdpDescription); + + static Boolean lookupByName(UsageEnvironment& env, char const* sourceName, + MediaSession*& resultSession); + + Boolean hasSubsessions() const { return fSubsessionsHead != NULL; } + + char* connectionEndpointName() const { return fConnectionEndpointName; } + char const* CNAME() const { return fCNAME; } + struct in_addr const& sourceFilterAddr() const { return fSourceFilterAddr; } + float& scale() { return fScale; } + char* mediaSessionType() const { return fMediaSessionType; } + char* sessionName() const { return fSessionName; } + char* sessionDescription() const { return fSessionDescription; } + char const* controlPath() const { return fControlPath; } + + double& playStartTime() { return fMaxPlayStartTime; } + double& playEndTime() { return fMaxPlayEndTime; } + char* absStartTime() const; + char* absEndTime() const; + // Used only to set the local fields: + char*& _absStartTime() { return fAbsStartTime; } + char*& _absEndTime() { return fAbsEndTime; } + + Boolean initiateByMediaType(char const* mimeType, + MediaSubsession*& resultSubsession, + int useSpecialRTPoffset = -1); + // Initiates the first subsession with the specified MIME type + // Returns the resulting subsession, or 'multi source' (not both) + +protected: // redefined virtual functions + virtual Boolean isMediaSession() const; + +protected: + MediaSession(UsageEnvironment& env); + // called only by createNew(); + virtual ~MediaSession(); + + virtual MediaSubsession* createNewMediaSubsession(); + + Boolean initializeWithSDP(char const* sdpDescription); + Boolean parseSDPLine(char const* input, char const*& nextLine); + Boolean parseSDPLine_s(char const* sdpLine); + Boolean parseSDPLine_i(char const* sdpLine); + Boolean parseSDPLine_c(char const* sdpLine); + Boolean parseSDPAttribute_type(char const* sdpLine); + Boolean parseSDPAttribute_control(char const* sdpLine); + Boolean parseSDPAttribute_range(char const* sdpLine); + Boolean parseSDPAttribute_source_filter(char const* sdpLine); + + static char* lookupPayloadFormat(unsigned char rtpPayloadType, + unsigned& rtpTimestampFrequency, + unsigned& numChannels); + static unsigned guessRTPTimestampFrequency(char const* mediumName, + char const* codecName); + +protected: + friend class MediaSubsessionIterator; + char* fCNAME; // used for RTCP + + // Linkage fields: + MediaSubsession* fSubsessionsHead; + MediaSubsession* fSubsessionsTail; + + // Fields set from a SDP description: + char* fConnectionEndpointName; + double fMaxPlayStartTime; + double fMaxPlayEndTime; + char* fAbsStartTime; + char* fAbsEndTime; + struct in_addr fSourceFilterAddr; // used for SSM + float fScale; // set from a RTSP "Scale:" header + char* fMediaSessionType; // holds a=type value + char* fSessionName; // holds s= value + char* fSessionDescription; // holds i= value + char* fControlPath; // holds optional a=control: string +}; + + +class MediaSubsessionIterator { +public: + MediaSubsessionIterator(MediaSession const& session); + virtual ~MediaSubsessionIterator(); + + MediaSubsession* next(); // NULL if none + void reset(); + +private: + MediaSession const& fOurSession; + MediaSubsession* fNextPtr; +}; + + +class MediaSubsession { +public: + MediaSession& parentSession() { return fParent; } + MediaSession const& parentSession() const { return fParent; } + + unsigned short clientPortNum() const { return fClientPortNum; } + unsigned char rtpPayloadFormat() const { return fRTPPayloadFormat; } + char const* savedSDPLines() const { return fSavedSDPLines; } + char const* mediumName() const { return fMediumName; } + char const* codecName() const { return fCodecName; } + char const* protocolName() const { return fProtocolName; } + char const* controlPath() const { return fControlPath; } + Boolean isSSM() const { return fSourceFilterAddr.s_addr != 0; } + + unsigned short videoWidth() const { return fVideoWidth; } + unsigned short videoHeight() const { return fVideoHeight; } + unsigned videoFPS() const { return fVideoFPS; } + unsigned numChannels() const { return fNumChannels; } + float& scale() { return fScale; } + + RTPSource* rtpSource() { return fRTPSource; } + RTCPInstance* rtcpInstance() { return fRTCPInstance; } + unsigned rtpTimestampFrequency() const { return fRTPTimestampFrequency; } + Boolean rtcpIsMuxed() const { return fMultiplexRTCPWithRTP; } + FramedSource* readSource() { return fReadSource; } + // This is the source that client sinks read from. It is usually + // (but not necessarily) the same as "rtpSource()" + void addFilter(FramedFilter* filter); + // Changes "readSource()" to "filter" (which must have just been created with "readSource()" as its input) + + double playStartTime() const; + double playEndTime() const; + char* absStartTime() const; + char* absEndTime() const; + // Used only to set the local fields: + double& _playStartTime() { return fPlayStartTime; } + double& _playEndTime() { return fPlayEndTime; } + char*& _absStartTime() { return fAbsStartTime; } + char*& _absEndTime() { return fAbsEndTime; } + + Boolean initiate(int useSpecialRTPoffset = -1); + // Creates a "RTPSource" for this subsession. (Has no effect if it's + // already been created.) Returns True iff this succeeds. + void deInitiate(); // Destroys any previously created RTPSource + Boolean setClientPortNum(unsigned short portNum); + // Sets the preferred client port number that any "RTPSource" for + // this subsession would use. (By default, the client port number + // is gotten from the original SDP description, or - if the SDP + // description does not specfy a client port number - an ephemeral + // (even) port number is chosen.) This routine must *not* be + // called after initiate(). + void receiveRawMP3ADUs() { fReceiveRawMP3ADUs = True; } // optional hack for audio/MPA-ROBUST; must not be called after initiate() + void receiveRawJPEGFrames() { fReceiveRawJPEGFrames = True; } // optional hack for video/JPEG; must not be called after initiate() + char*& connectionEndpointName() { return fConnectionEndpointName; } + char const* connectionEndpointName() const { + return fConnectionEndpointName; + } + + // 'Bandwidth' parameter, set in the "b=" SDP line: + unsigned bandwidth() const { return fBandwidth; } + + // General SDP attribute accessor functions: + char const* attrVal_str(char const* attrName) const; + // returns "" if attribute doesn't exist (and has no default value), or is not a string + char const* attrVal_strToLower(char const* attrName) const; + // returns "" if attribute doesn't exist (and has no default value), or is not a string + unsigned attrVal_int(char const* attrName) const; + // also returns 0 if attribute doesn't exist (and has no default value) + unsigned attrVal_unsigned(char const* attrName) const { return (unsigned)attrVal_int(attrName); } + Boolean attrVal_bool(char const* attrName) const { return attrVal_int(attrName) != 0; } + + // Old, now-deprecated SDP attribute accessor functions, kept here for backwards-compatibility: + char const* fmtp_config() const; + char const* fmtp_configuration() const { return fmtp_config(); } + char const* fmtp_spropparametersets() const { return attrVal_str("sprop-parameter-sets"); } + char const* fmtp_spropvps() const { return attrVal_str("sprop-vps"); } + char const* fmtp_spropsps() const { return attrVal_str("sprop-sps"); } + char const* fmtp_sproppps() const { return attrVal_str("sprop-pps"); } + + netAddressBits connectionEndpointAddress() const; + // Converts "fConnectionEndpointName" to an address (or 0 if unknown) + void setDestinations(netAddressBits defaultDestAddress); + // Uses "fConnectionEndpointName" and "serverPortNum" to set + // the destination address and port of the RTP and RTCP objects. + // This is typically called by RTSP clients after doing "SETUP". + + char const* sessionId() const { return fSessionId; } + void setSessionId(char const* sessionId); + + // Public fields that external callers can use to keep state. + // (They are responsible for all storage management on these fields) + unsigned short serverPortNum; // in host byte order (used by RTSP) + unsigned char rtpChannelId, rtcpChannelId; // used by RTSP (for RTP/TCP) + MediaSink* sink; // callers can use this to keep track of who's playing us + void* miscPtr; // callers can use this for whatever they want + + // Parameters set from a RTSP "RTP-Info:" header: + struct { + u_int16_t seqNum; + u_int32_t timestamp; + Boolean infoIsNew; // not part of the RTSP header; instead, set whenever this struct is filled in + } rtpInfo; + + double getNormalPlayTime(struct timeval const& presentationTime); + // Computes the stream's "Normal Play Time" (NPT) from the given "presentationTime". + // (For the definition of "Normal Play Time", see RFC 2326, section 3.6.) + // This function is useful only if the "rtpInfo" structure was previously filled in + // (e.g., by a "RTP-Info:" header in a RTSP response). + // Also, for this function to work properly, the RTP stream's presentation times must (eventually) be + // synchronized via RTCP. + // (Note: If this function returns a negative number, then the result should be ignored by the caller.) + +protected: + friend class MediaSession; + friend class MediaSubsessionIterator; + MediaSubsession(MediaSession& parent); + virtual ~MediaSubsession(); + + UsageEnvironment& env() { return fParent.envir(); } + void setNext(MediaSubsession* next) { fNext = next; } + + void setAttribute(char const* name, char const* value = NULL, Boolean valueIsHexadecimal = False); + + Boolean parseSDPLine_c(char const* sdpLine); + Boolean parseSDPLine_b(char const* sdpLine); + Boolean parseSDPAttribute_rtpmap(char const* sdpLine); + Boolean parseSDPAttribute_rtcpmux(char const* sdpLine); + Boolean parseSDPAttribute_control(char const* sdpLine); + Boolean parseSDPAttribute_range(char const* sdpLine); + Boolean parseSDPAttribute_fmtp(char const* sdpLine); + Boolean parseSDPAttribute_source_filter(char const* sdpLine); + Boolean parseSDPAttribute_x_dimensions(char const* sdpLine); + Boolean parseSDPAttribute_framerate(char const* sdpLine); + + virtual Boolean createSourceObjects(int useSpecialRTPoffset); + // create "fRTPSource" and "fReadSource" member objects, after we've been initialized via SDP + +protected: + // Linkage fields: + MediaSession& fParent; + MediaSubsession* fNext; + + // Fields set from a SDP description: + char* fConnectionEndpointName; // may also be set by RTSP SETUP response + unsigned short fClientPortNum; // in host byte order + // This field is also set by initiate() + unsigned char fRTPPayloadFormat; + char* fSavedSDPLines; + char* fMediumName; + char* fCodecName; + char* fProtocolName; + unsigned fRTPTimestampFrequency; + Boolean fMultiplexRTCPWithRTP; + char* fControlPath; // holds optional a=control: string + struct in_addr fSourceFilterAddr; // used for SSM + unsigned fBandwidth; // in kilobits-per-second, from b= line + + double fPlayStartTime; + double fPlayEndTime; + char* fAbsStartTime; + char* fAbsEndTime; + unsigned short fVideoWidth, fVideoHeight; + // screen dimensions (set by an optional a=x-dimensions: , line) + unsigned fVideoFPS; + // frame rate (set by an optional "a=framerate: " or "a=x-framerate: " line) + unsigned fNumChannels; + // optionally set by "a=rtpmap:" lines for audio sessions. Default: 1 + float fScale; // set from a RTSP "Scale:" header + double fNPT_PTS_Offset; // set by "getNormalPlayTime()"; add this to a PTS to get NPT + HashTable* fAttributeTable; // for "a=fmtp:" attributes. (Later an array by payload type #####) + + // Fields set or used by initiate(): + Groupsock* fRTPSocket; Groupsock* fRTCPSocket; // works even for unicast + RTPSource* fRTPSource; RTCPInstance* fRTCPInstance; + FramedSource* fReadSource; + Boolean fReceiveRawMP3ADUs, fReceiveRawJPEGFrames; + + // Other fields: + char* fSessionId; // used by RTSP +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MediaSink.hh b/AnyCore/lib_rtsp/liveMedia/include/MediaSink.hh new file mode 100644 index 0000000..3bc2926 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MediaSink.hh @@ -0,0 +1,135 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Media Sinks +// C++ header + +#ifndef _MEDIA_SINK_HH +#define _MEDIA_SINK_HH + +#ifndef _FRAMED_SOURCE_HH +#include "FramedSource.hh" +#endif + +class MediaSink: public Medium { +public: + static Boolean lookupByName(UsageEnvironment& env, char const* sinkName, + MediaSink*& resultSink); + + typedef void (afterPlayingFunc)(void* clientData); + Boolean startPlaying(MediaSource& source, + afterPlayingFunc* afterFunc, + void* afterClientData); + virtual void stopPlaying(); + + // Test for specific types of sink: + virtual Boolean isRTPSink() const; + + FramedSource* source() const {return fSource;} + +protected: + MediaSink(UsageEnvironment& env); // abstract base class + virtual ~MediaSink(); + + virtual Boolean sourceIsCompatibleWithUs(MediaSource& source); + // called by startPlaying() + virtual Boolean continuePlaying() = 0; + // called by startPlaying() + + static void onSourceClosure(void* clientData); // can be used in "getNextFrame()" calls + void onSourceClosure(); + // should be called (on ourselves) by continuePlaying() when it + // discovers that the source we're playing from has closed. + + FramedSource* fSource; + +private: + // redefined virtual functions: + virtual Boolean isSink() const; + +private: + // The following fields are used when we're being played: + afterPlayingFunc* fAfterFunc; + void* fAfterClientData; +}; + +// A data structure that a sink may use for an output packet: +class OutPacketBuffer { +public: + OutPacketBuffer(unsigned preferredPacketSize, unsigned maxPacketSize, + unsigned maxBufferSize = 0); + // if "maxBufferSize" is >0, use it - instead of "maxSize" to compute the buffer size + ~OutPacketBuffer(); + + static unsigned maxSize; + static void increaseMaxSizeTo(unsigned newMaxSize) { if (newMaxSize > OutPacketBuffer::maxSize) OutPacketBuffer::maxSize = newMaxSize; } + + unsigned char* curPtr() const {return &fBuf[fPacketStart + fCurOffset];} + unsigned totalBytesAvailable() const { + return fLimit - (fPacketStart + fCurOffset); + } + unsigned totalBufferSize() const { return fLimit; } + unsigned char* packet() const {return &fBuf[fPacketStart];} + unsigned curPacketSize() const {return fCurOffset;} + + void increment(unsigned numBytes) {fCurOffset += numBytes;} + + void enqueue(unsigned char const* from, unsigned numBytes); + void enqueueWord(u_int32_t word); + void insert(unsigned char const* from, unsigned numBytes, unsigned toPosition); + void insertWord(u_int32_t word, unsigned toPosition); + void extract(unsigned char* to, unsigned numBytes, unsigned fromPosition); + u_int32_t extractWord(unsigned fromPosition); + + void skipBytes(unsigned numBytes); + + Boolean isPreferredSize() const {return fCurOffset >= fPreferred;} + Boolean wouldOverflow(unsigned numBytes) const { + return (fCurOffset+numBytes) > fMax; + } + unsigned numOverflowBytes(unsigned numBytes) const { + return (fCurOffset+numBytes) - fMax; + } + Boolean isTooBigForAPacket(unsigned numBytes) const { + return numBytes > fMax; + } + + void setOverflowData(unsigned overflowDataOffset, + unsigned overflowDataSize, + struct timeval const& presentationTime, + unsigned durationInMicroseconds); + unsigned overflowDataSize() const {return fOverflowDataSize;} + struct timeval overflowPresentationTime() const {return fOverflowPresentationTime;} + unsigned overflowDurationInMicroseconds() const {return fOverflowDurationInMicroseconds;} + Boolean haveOverflowData() const {return fOverflowDataSize > 0;} + void useOverflowData(); + + void adjustPacketStart(unsigned numBytes); + void resetPacketStart(); + void resetOffset() { fCurOffset = 0; } + void resetOverflowData() { fOverflowDataOffset = fOverflowDataSize = 0; } + +private: + unsigned fPacketStart, fCurOffset, fPreferred, fMax, fLimit; + unsigned char* fBuf; + + unsigned fOverflowDataOffset, fOverflowDataSize; + struct timeval fOverflowPresentationTime; + unsigned fOverflowDurationInMicroseconds; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MediaSource.hh b/AnyCore/lib_rtsp/liveMedia/include/MediaSource.hh new file mode 100644 index 0000000..d608a40 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MediaSource.hh @@ -0,0 +1,58 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Media Sources +// C++ header + +#ifndef _MEDIA_SOURCE_HH +#define _MEDIA_SOURCE_HH + +#ifndef _MEDIA_HH +#include "Media.hh" +#endif + +class MediaSource: public Medium { +public: + static Boolean lookupByName(UsageEnvironment& env, char const* sourceName, + MediaSource*& resultSource); + virtual void getAttributes() const; + // attributes are returned in "env's" 'result message' + + // The MIME type of this source: + virtual char const* MIMEtype() const; + + // Test for specific types of source: + virtual Boolean isFramedSource() const; + virtual Boolean isRTPSource() const; + virtual Boolean isMPEG1or2VideoStreamFramer() const; + virtual Boolean isMPEG4VideoStreamFramer() const; + virtual Boolean isH264VideoStreamFramer() const; + virtual Boolean isH265VideoStreamFramer() const; + virtual Boolean isDVVideoStreamFramer() const; + virtual Boolean isJPEGVideoSource() const; + virtual Boolean isAMRAudioSource() const; + +protected: + MediaSource(UsageEnvironment& env); // abstract base class + virtual ~MediaSource(); + +private: + // redefined virtual functions: + virtual Boolean isSource() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MultiFramedRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/MultiFramedRTPSink.hh new file mode 100644 index 0000000..594f339 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MultiFramedRTPSink.hh @@ -0,0 +1,140 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for a common kind of payload format: Those which pack multiple, +// complete codec frames (as many as possible) into each RTP packet. +// C++ header + +#ifndef _MULTI_FRAMED_RTP_SINK_HH +#define _MULTI_FRAMED_RTP_SINK_HH + +#ifndef _RTP_SINK_HH +#include "RTPSink.hh" +#endif + +class MultiFramedRTPSink: public RTPSink { +public: + void setPacketSizes(unsigned preferredPacketSize, unsigned maxPacketSize); + + typedef void (onSendErrorFunc)(void* clientData); + void setOnSendErrorFunc(onSendErrorFunc* onSendErrorFunc, void* onSendErrorFuncData) { + // Can be used to set a callback function to be called if there's an error sending RTP packets on our socket. + fOnSendErrorFunc = onSendErrorFunc; + fOnSendErrorData = onSendErrorFuncData; + } + +protected: + MultiFramedRTPSink(UsageEnvironment& env, + Groupsock* rtpgs, unsigned char rtpPayloadType, + unsigned rtpTimestampFrequency, + char const* rtpPayloadFormatName, + unsigned numChannels = 1); + // we're a virtual base class + + virtual ~MultiFramedRTPSink(); + + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + // perform any processing specific to the particular payload format + virtual Boolean allowFragmentationAfterStart() const; + // whether a frame can be fragmented if other frame(s) appear earlier + // in the packet (by default: False) + virtual Boolean allowOtherFramesAfterLastFragment() const; + // whether other frames can be packed into a packet following the + // final fragment of a previous, fragmented frame (by default: False) + virtual Boolean frameCanAppearAfterPacketStart(unsigned char const* frameStart, + unsigned numBytesInFrame) const; + // whether this frame can appear in position >1 in a pkt (default: True) + virtual unsigned specialHeaderSize() const; + // returns the size of any special header used (following the RTP header) (default: 0) + virtual unsigned frameSpecificHeaderSize() const; + // returns the size of any frame-specific header used (before each frame + // within the packet) (default: 0) + virtual unsigned computeOverflowForNewFrame(unsigned newFrameSize) const; + // returns the number of overflow bytes that would be produced by adding a new + // frame of size "newFrameSize" to the current RTP packet. + // (By default, this just calls "numOverflowBytes()", but subclasses can redefine + // this to (e.g.) impose a granularity upon RTP payload fragments.) + + // Functions that might be called by doSpecialFrameHandling(), or other subclass virtual functions: + Boolean isFirstPacket() const { return fIsFirstPacket; } + Boolean isFirstFrameInPacket() const { return fNumFramesUsedSoFar == 0; } + unsigned curFragmentationOffset() const { return fCurFragmentationOffset; } + void setMarkerBit(); + void setTimestamp(struct timeval framePresentationTime); + void setSpecialHeaderWord(unsigned word, /* 32 bits, in host order */ + unsigned wordPosition = 0); + void setSpecialHeaderBytes(unsigned char const* bytes, unsigned numBytes, + unsigned bytePosition = 0); + void setFrameSpecificHeaderWord(unsigned word, /* 32 bits, in host order */ + unsigned wordPosition = 0); + void setFrameSpecificHeaderBytes(unsigned char const* bytes, unsigned numBytes, + unsigned bytePosition = 0); + void setFramePadding(unsigned numPaddingBytes); + unsigned numFramesUsedSoFar() const { return fNumFramesUsedSoFar; } + unsigned ourMaxPacketSize() const { return fOurMaxPacketSize; } + +public: // redefined virtual functions: + virtual void stopPlaying(); + +protected: // redefined virtual functions: + virtual Boolean continuePlaying(); + +private: + void buildAndSendPacket(Boolean isFirstPacket); + void packFrame(); + void sendPacketIfNecessary(); + static void sendNext(void* firstArg); + friend void sendNext(void*); + + static void afterGettingFrame(void* clientData, + unsigned numBytesRead, unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned numBytesRead, unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + Boolean isTooBigForAPacket(unsigned numBytes) const; + + static void ourHandleClosure(void* clientData); + +private: + OutPacketBuffer* fOutBuf; + + Boolean fNoFramesLeft; + unsigned fNumFramesUsedSoFar; + unsigned fCurFragmentationOffset; + Boolean fPreviousFrameEndedFragmentation; + + Boolean fIsFirstPacket; + struct timeval fNextSendTime; + unsigned fTimestampPosition; + unsigned fSpecialHeaderPosition; + unsigned fSpecialHeaderSize; // size in bytes of any special header used + unsigned fCurFrameSpecificHeaderPosition; + unsigned fCurFrameSpecificHeaderSize; // size in bytes of cur frame-specific header + unsigned fTotalFrameSpecificHeaderSizes; // size of all frame-specific hdrs in pkt + unsigned fOurMaxPacketSize; + + onSendErrorFunc* fOnSendErrorFunc; + void* fOnSendErrorData; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/MultiFramedRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/MultiFramedRTPSource.hh new file mode 100644 index 0000000..cab0ae0 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/MultiFramedRTPSource.hh @@ -0,0 +1,159 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP source for a common kind of payload format: Those which pack multiple, +// complete codec frames (as many as possible) into each RTP packet. +// C++ header + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#define _MULTI_FRAMED_RTP_SOURCE_HH + +#ifndef _RTP_SOURCE_HH +#include "RTPSource.hh" +#endif + +class BufferedPacket; // forward +class BufferedPacketFactory; // forward + +class MultiFramedRTPSource: public RTPSource { +protected: + MultiFramedRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency, + BufferedPacketFactory* packetFactory = NULL); + // virtual base class + virtual ~MultiFramedRTPSource(); + + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + // Subclasses redefine this to handle any special, payload format + // specific header that follows the RTP header. + + virtual Boolean packetIsUsableInJitterCalculation(unsigned char* packet, + unsigned packetSize); + // The default implementation returns True, but this can be redefined + +protected: + Boolean fCurrentPacketBeginsFrame; + Boolean fCurrentPacketCompletesFrame; + +protected: + // redefined virtual functions: + virtual void doStopGettingFrames(); + +private: + // redefined virtual functions: + virtual void doGetNextFrame(); + virtual void setPacketReorderingThresholdTime(unsigned uSeconds); + +private: + void reset(); + void doGetNextFrame1(); + + static void networkReadHandler(MultiFramedRTPSource* source, int /*mask*/); + void networkReadHandler1(); + + Boolean fAreDoingNetworkReads; + BufferedPacket* fPacketReadInProgress; + Boolean fNeedDelivery; + Boolean fPacketLossInFragmentedFrame; + unsigned char* fSavedTo; + unsigned fSavedMaxSize; + + // A buffer to (optionally) hold incoming pkts that have been reorderered + class ReorderingPacketBuffer* fReorderingBuffer; +}; + + +// A 'packet data' class that's used to implement the above. +// Note that this can be subclassed - if desired - to redefine +// "nextEnclosedFrameSize()". + +class BufferedPacket { +public: + BufferedPacket(); + virtual ~BufferedPacket(); + + Boolean hasUsableData() const { return fTail > fHead; } + unsigned useCount() const { return fUseCount; } + + Boolean fillInData(RTPInterface& rtpInterface, struct sockaddr_in& fromAddress, Boolean& packetReadWasIncomplete); + void assignMiscParams(unsigned short rtpSeqNo, unsigned rtpTimestamp, + struct timeval presentationTime, + Boolean hasBeenSyncedUsingRTCP, + Boolean rtpMarkerBit, struct timeval timeReceived); + void skip(unsigned numBytes); // used to skip over an initial header + void removePadding(unsigned numBytes); // used to remove trailing bytes + void appendData(unsigned char* newData, unsigned numBytes); + void use(unsigned char* to, unsigned toSize, + unsigned& bytesUsed, unsigned& bytesTruncated, + unsigned short& rtpSeqNo, unsigned& rtpTimestamp, + struct timeval& presentationTime, + Boolean& hasBeenSyncedUsingRTCP, Boolean& rtpMarkerBit); + + BufferedPacket*& nextPacket() { return fNextPacket; } + + unsigned short rtpSeqNo() const { return fRTPSeqNo; } + struct timeval const& timeReceived() const { return fTimeReceived; } + + unsigned char* data() const { return &fBuf[fHead]; } + unsigned dataSize() const { return fTail-fHead; } + Boolean rtpMarkerBit() const { return fRTPMarkerBit; } + Boolean& isFirstPacket() { return fIsFirstPacket; } + unsigned bytesAvailable() const { return fPacketSize - fTail; } + +protected: + virtual void reset(); + virtual unsigned nextEnclosedFrameSize(unsigned char*& framePtr, + unsigned dataSize); + // The above function has been deprecated. Instead, new subclasses should use: + virtual void getNextEnclosedFrameParameters(unsigned char*& framePtr, + unsigned dataSize, + unsigned& frameSize, + unsigned& frameDurationInMicroseconds); + + unsigned fPacketSize; + unsigned char* fBuf; + unsigned fHead; + unsigned fTail; + +private: + BufferedPacket* fNextPacket; // used to link together packets + + unsigned fUseCount; + unsigned short fRTPSeqNo; + unsigned fRTPTimestamp; + struct timeval fPresentationTime; // corresponding to "fRTPTimestamp" + Boolean fHasBeenSyncedUsingRTCP; + Boolean fRTPMarkerBit; + Boolean fIsFirstPacket; + struct timeval fTimeReceived; +}; + +// A 'factory' class for creating "BufferedPacket" objects. +// If you want to subclass "BufferedPacket", then you'll also +// want to subclass this, to redefine createNewPacket() + +class BufferedPacketFactory { +public: + BufferedPacketFactory(); + virtual ~BufferedPacketFactory(); + + virtual BufferedPacket* createNewPacket(MultiFramedRTPSource* ourSource); +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/OggFile.hh b/AnyCore/lib_rtsp/liveMedia/include/OggFile.hh new file mode 100644 index 0000000..040044f --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/OggFile.hh @@ -0,0 +1,177 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A class that encapsulates an Ogg file +// C++ header + +#ifndef _OGG_FILE_HH +#define _OGG_FILE_HH + +#ifndef _RTP_SINK_HH +#include "RTPSink.hh" +#endif +#ifndef _HASH_TABLE_HH +#include "HashTable.hh" +#endif + +class OggTrack; // forward +class OggDemux; // forward + +class OggFile: public Medium { +public: + typedef void (onCreationFunc)(OggFile* newFile, void* clientData); + static void createNew(UsageEnvironment& env, char const* fileName, + onCreationFunc* onCreation, void* onCreationClientData); + // Note: Unlike most "createNew()" functions, this one doesn't return a new object + // immediately. Instead, because this class requires file reading (to parse the + // Ogg track headers) before a new object can be initialized, the creation of a new object + // is signalled by calling - from the event loop - an 'onCreationFunc' that is passed as + // a parameter to "createNew()". + + OggTrack* lookup(u_int32_t trackNumber); + + OggDemux* newDemux(); + // Creates a demultiplexor for extracting tracks from this file. + // (Separate clients will typically have separate demultiplexors.) + + char const* fileName() const { return fFileName; } + unsigned numTracks() const; + + FramedSource* + createSourceForStreaming(FramedSource* baseSource, u_int32_t trackNumber, + unsigned& estBitrate, unsigned& numFiltersInFrontOfTrack); + // Takes a data source (which must be a demultiplexed track from this file) and returns + // a (possibly modified) data source that can be used for streaming. + + RTPSink* createRTPSinkForTrackNumber(u_int32_t trackNumber, Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic); + // Creates a "RTPSink" object that would be appropriate for streaming the specified track, + // or NULL if no appropriate "RTPSink" exists + + class OggTrackTable& trackTable() { return *fTrackTable; } + +private: + OggFile(UsageEnvironment& env, char const* fileName, onCreationFunc* onCreation, void* onCreationClientData); + // called only by createNew() + virtual ~OggFile(); + + static void handleEndOfBosPageParsing(void* clientData); + void handleEndOfBosPageParsing(); + + void addTrack(OggTrack* newTrack); + void removeDemux(OggDemux* demux); + +private: + friend class OggFileParser; + friend class OggDemux; + char const* fFileName; + onCreationFunc* fOnCreation; + void* fOnCreationClientData; + + class OggTrackTable* fTrackTable; + HashTable* fDemuxesTable; + class OggFileParser* fParserForInitialization; +}; + +class OggTrack { +public: + OggTrack(); + virtual ~OggTrack(); + + // track parameters + u_int32_t trackNumber; // bitstream serial number + char const* mimeType; // NULL if not known + + unsigned samplingFrequency, numChannels; // for audio tracks + unsigned estBitrate; // estimate, in kbps (for RTCP) + + // Special headers for Vorbis audio, Theora video, and Opus audio tracks: + struct _vtoHdrs { + u_int8_t* header[3]; // "identification", "comment", "setup" + unsigned headerSize[3]; + + // Fields specific to Vorbis audio: + unsigned blocksize[2]; // samples per frame (packet) + unsigned uSecsPerPacket[2]; // computed as (blocksize[i]*1000000)/samplingFrequency + unsigned vorbis_mode_count; + unsigned ilog_vorbis_mode_count_minus_1; + u_int8_t* vorbis_mode_blockflag; + // an array (of size "vorbis_mode_count") of indexes into the (2-entry) "blocksize" array + + // Fields specific to Theora video: + u_int8_t KFGSHIFT; + unsigned uSecsPerFrame; + + } vtoHdrs; + + Boolean weNeedHeaders() const { + return + vtoHdrs.header[0] == NULL || + vtoHdrs.header[1] == NULL || + (vtoHdrs.header[2] == NULL && strcmp(mimeType, "audio/OPUS") != 0); + } +}; + +class OggTrackTableIterator { +public: + OggTrackTableIterator(class OggTrackTable& ourTable); + virtual ~OggTrackTableIterator(); + + OggTrack* next(); + +private: + HashTable::Iterator* fIter; +}; + +class OggDemux: public Medium { +public: + FramedSource* newDemuxedTrack(u_int32_t& resultTrackNumber); + // Returns a new stream ("FramedSource" subclass) that represents the next media track + // from the file. This function returns NULL when no more media tracks exist. + + FramedSource* newDemuxedTrackByTrackNumber(unsigned trackNumber); + // As above, but creates a new stream for a specific track number within the Matroska file. + // (You should not call this function more than once with the same track number.) + + // Note: We assume that: + // - Every track created by "newDemuxedTrack()" is later read + // - All calls to "newDemuxedTrack()" are made before any track is read + +protected: + friend class OggFile; + friend class OggFileParser; + class OggDemuxedTrack* lookupDemuxedTrack(u_int32_t trackNumber); + + OggDemux(OggFile& ourFile); + virtual ~OggDemux(); + +private: + friend class OggDemuxedTrack; + void removeTrack(u_int32_t trackNumber); + void continueReading(); // called by a demuxed track to tell us that it has a pending read ("doGetNextFrame()") + + static void handleEndOfFile(void* clientData); + void handleEndOfFile(); + +private: + OggFile& fOurFile; + class OggFileParser* fOurParser; + HashTable* fDemuxedTracksTable; + OggTrackTableIterator* fIter; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/OggFileServerDemux.hh b/AnyCore/lib_rtsp/liveMedia/include/OggFileServerDemux.hh new file mode 100644 index 0000000..eb2f62b --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/OggFileServerDemux.hh @@ -0,0 +1,81 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A server demultiplexor for an Ogg file +// C++ header + +#ifndef _OGG_FILE_SERVER_DEMUX_HH +#define _OGG_FILE_SERVER_DEMUX_HH + +#ifndef _SERVER_MEDIA_SESSION_HH +#include "ServerMediaSession.hh" +#endif + +#ifndef _OGG_FILE_HH +#include "OggFile.hh" +#endif + +class OggFileServerDemux: public Medium { +public: + typedef void (onCreationFunc)(OggFileServerDemux* newDemux, void* clientData); + static void createNew(UsageEnvironment& env, char const* fileName, + onCreationFunc* onCreation, void* onCreationClientData); + // Note: Unlike most "createNew()" functions, this one doesn't return a new object immediately. Instead, because this class + // requires file reading (to parse the Ogg 'Track' headers) before a new object can be initialized, the creation of a new + // object is signalled by calling - from the event loop - an 'onCreationFunc' that is passed as a parameter to "createNew()". + + ServerMediaSubsession* newServerMediaSubsession(); + ServerMediaSubsession* newServerMediaSubsession(u_int32_t& resultTrackNumber); + // Returns a new "ServerMediaSubsession" object that represents the next media track + // from the file. This function returns NULL when no more media tracks exist. + + ServerMediaSubsession* newServerMediaSubsessionByTrackNumber(u_int32_t trackNumber); + // As above, but creates a new "ServerMediaSubsession" object for a specific track number + // within the Ogg file. + // (You should not call this function more than once with the same track number.) + + // The following public: member functions are called only by the "ServerMediaSubsession" objects: + + OggFile* ourOggFile() { return fOurOggFile; } + char const* fileName() const { return fFileName; } + + FramedSource* newDemuxedTrack(unsigned clientSessionId, u_int32_t trackNumber); + // Used by the "ServerMediaSubsession" objects to implement their "createNewStreamSource()" virtual function. + +private: + OggFileServerDemux(UsageEnvironment& env, char const* fileName, + onCreationFunc* onCreation, void* onCreationClientData); + // called only by createNew() + virtual ~OggFileServerDemux(); + + static void onOggFileCreation(OggFile* newFile, void* clientData); + void onOggFileCreation(OggFile* newFile); +private: + char const* fFileName; + onCreationFunc* fOnCreation; + void* fOnCreationClientData; + OggFile* fOurOggFile; + + // Used to implement "newServerMediaSubsession()": + OggTrackTableIterator* fIter; + + // Used to set up demuxing, to implement "newDemuxedTrack()": + unsigned fLastClientSessionId; + OggDemux* fLastCreatedDemux; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/OggFileSink.hh b/AnyCore/lib_rtsp/liveMedia/include/OggFileSink.hh new file mode 100644 index 0000000..8f5a2b3 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/OggFileSink.hh @@ -0,0 +1,79 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// 'Ogg' File Sink (recording a single media track only) +// C++ header + +#ifndef _OGG_FILE_SINK_HH +#define _OGG_FILE_SINK_HH + +#ifndef _FILE_SINK_HH +#include "FileSink.hh" +#endif + +class OggFileSink: public FileSink { +public: + static OggFileSink* createNew(UsageEnvironment& env, char const* fileName, + unsigned samplingFrequency = 0, // used for granule_position + char const* configStr = NULL, + // "configStr" is an optional 'SDP format' string (Base64-encoded) + // representing 'packed configuration headers' ("identification", "comment", "setup") + // to prepend to the output. (For 'Vorbis" audio and 'Theora' video.) + unsigned bufferSize = 100000, + Boolean oneFilePerFrame = False); + // See "FileSink.hh" for a description of these parameters. + +protected: + OggFileSink(UsageEnvironment& env, FILE* fid, unsigned samplingFrequency, char const* configStr, + unsigned bufferSize, char const* perFrameFileNamePrefix); + // called only by createNew() + virtual ~OggFileSink(); + +protected: // redefined virtual functions: + virtual Boolean continuePlaying(); + virtual void addData(unsigned char const* data, unsigned dataSize, + struct timeval presentationTime); + virtual void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, + struct timeval presentationTime); + +private: + static void ourOnSourceClosure(void* clientData); + void ourOnSourceClosure(); + +private: + unsigned fSamplingFrequency; + char const* fConfigStr; + Boolean fHaveWrittenFirstFrame, fHaveSeenEOF; + struct timeval fFirstPresentationTime; + int64_t fGranulePosition; + int64_t fGranulePositionAdjustment; // used to ensure that "fGranulePosition" stays monotonic + u_int32_t fPageSequenceNumber; + u_int8_t fPageHeaderBytes[27]; + // the header of each Ogg page, through the "number_page_segments" byte + + // Special fields used for Theora video: + Boolean fIsTheora; + u_int64_t fGranuleIncrementPerFrame; // == 1 << KFGSHIFT + + // Because the last Ogg page before EOF needs to have a special 'eos' bit set in the header, + // we need to defer the writing of each incoming frame. To do this, we maintain a 2nd buffer: + unsigned char* fAltBuffer; + unsigned fAltFrameSize, fAltNumTruncatedBytes; + struct timeval fAltPresentationTime; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/OnDemandServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/include/OnDemandServerMediaSubsession.hh new file mode 100644 index 0000000..28d72f5 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/OnDemandServerMediaSubsession.hh @@ -0,0 +1,199 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand. +// C++ header + +#ifndef _ON_DEMAND_SERVER_MEDIA_SUBSESSION_HH +#define _ON_DEMAND_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _SERVER_MEDIA_SESSION_HH +#include "ServerMediaSession.hh" +#endif +#ifndef _RTP_SINK_HH +#include "RTPSink.hh" +#endif +#ifndef _BASIC_UDP_SINK_HH +#include "BasicUDPSink.hh" +#endif +#ifndef _RTCP_HH +#include "RTCP.hh" +#endif + +class OnDemandServerMediaSubsession: public ServerMediaSubsession { +protected: // we're a virtual base class + OnDemandServerMediaSubsession(UsageEnvironment& env, Boolean reuseFirstSource, + portNumBits initialPortNum = 6970, + Boolean multiplexRTCPWithRTP = False); + virtual ~OnDemandServerMediaSubsession(); + +protected: // redefined virtual functions + virtual char const* sdpLines(); + virtual void getStreamParameters(unsigned clientSessionId, + netAddressBits clientAddress, + Port const& clientRTPPort, + Port const& clientRTCPPort, + int tcpSocketNum, + unsigned char rtpChannelId, + unsigned char rtcpChannelId, + netAddressBits& destinationAddress, + u_int8_t& destinationTTL, + Boolean& isMulticast, + Port& serverRTPPort, + Port& serverRTCPPort, + void*& streamToken); + virtual void startStream(unsigned clientSessionId, void* streamToken, + TaskFunc* rtcpRRHandler, + void* rtcpRRHandlerClientData, + unsigned short& rtpSeqNum, + unsigned& rtpTimestamp, + ServerRequestAlternativeByteHandler* serverRequestAlternativeByteHandler, + void* serverRequestAlternativeByteHandlerClientData); + virtual void pauseStream(unsigned clientSessionId, void* streamToken); + virtual void seekStream(unsigned clientSessionId, void* streamToken, double& seekNPT, double streamDuration, u_int64_t& numBytes); + virtual void seekStream(unsigned clientSessionId, void* streamToken, char*& absStart, char*& absEnd); + virtual void nullSeekStream(unsigned clientSessionId, void* streamToken, + double streamEndTime, u_int64_t& numBytes); + virtual void setStreamScale(unsigned clientSessionId, void* streamToken, float scale); + virtual float getCurrentNPT(void* streamToken); + virtual FramedSource* getStreamSource(void* streamToken); + virtual void deleteStream(unsigned clientSessionId, void*& streamToken); + +protected: // new virtual functions, possibly redefined by subclasses + virtual char const* getAuxSDPLine(RTPSink* rtpSink, + FramedSource* inputSource); + virtual void seekStreamSource(FramedSource* inputSource, double& seekNPT, double streamDuration, u_int64_t& numBytes); + // This routine is used to seek by relative (i.e., NPT) time. + // "streamDuration", if >0.0, specifies how much data to stream, past "seekNPT". (If <=0.0, all remaining data is streamed.) + // "numBytes" returns the size (in bytes) of the data to be streamed, or 0 if unknown or unlimited. + virtual void seekStreamSource(FramedSource* inputSource, char*& absStart, char*& absEnd); + // This routine is used to seek by 'absolute' time. + // "absStart" should be a string of the form "YYYYMMDDTHHMMSSZ" or "YYYYMMDDTHHMMSS.Z". + // "absEnd" should be either NULL (for no end time), or a string of the same form as "absStart". + // These strings may be modified in-place, or can be reassigned to a newly-allocated value (after delete[]ing the original). + virtual void setStreamSourceScale(FramedSource* inputSource, float scale); + virtual void setStreamSourceDuration(FramedSource* inputSource, double streamDuration, u_int64_t& numBytes); + virtual void closeStreamSource(FramedSource* inputSource); + +protected: // new virtual functions, defined by all subclasses + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate) = 0; + // "estBitrate" is the stream's estimated bitrate, in kbps + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource) = 0; + +public: + void multiplexRTCPWithRTP() { fMultiplexRTCPWithRTP = True; } + // An alternative to passing the "multiplexRTCPWithRTP" parameter as True in the constructor + +private: + void setSDPLinesFromRTPSink(RTPSink* rtpSink, FramedSource* inputSource, + unsigned estBitrate); + // used to implement "sdpLines()" + +protected: + char* fSDPLines; + HashTable* fDestinationsHashTable; // indexed by client session id + +private: + Boolean fReuseFirstSource; + portNumBits fInitialPortNum; + Boolean fMultiplexRTCPWithRTP; + void* fLastStreamToken; + char fCNAME[100]; // for RTCP + friend class StreamState; +}; + + +// A class that represents the state of an ongoing stream. This is used only internally, in the implementation of +// "OnDemandServerMediaSubsession", but we expose the definition here, in case subclasses of "OnDemandServerMediaSubsession" +// want to access it. + +class Destinations { +public: + Destinations(struct in_addr const& destAddr, + Port const& rtpDestPort, + Port const& rtcpDestPort) + : isTCP(False), addr(destAddr), rtpPort(rtpDestPort), rtcpPort(rtcpDestPort) { + } + Destinations(int tcpSockNum, unsigned char rtpChanId, unsigned char rtcpChanId) + : isTCP(True), rtpPort(0) /*dummy*/, rtcpPort(0) /*dummy*/, + tcpSocketNum(tcpSockNum), rtpChannelId(rtpChanId), rtcpChannelId(rtcpChanId) { + } + +public: + Boolean isTCP; + struct in_addr addr; + Port rtpPort; + Port rtcpPort; + int tcpSocketNum; + unsigned char rtpChannelId, rtcpChannelId; +}; + +class StreamState { +public: + StreamState(OnDemandServerMediaSubsession& master, + Port const& serverRTPPort, Port const& serverRTCPPort, + RTPSink* rtpSink, BasicUDPSink* udpSink, + unsigned totalBW, FramedSource* mediaSource, + Groupsock* rtpGS, Groupsock* rtcpGS); + virtual ~StreamState(); + + void startPlaying(Destinations* destinations, + TaskFunc* rtcpRRHandler, void* rtcpRRHandlerClientData, + ServerRequestAlternativeByteHandler* serverRequestAlternativeByteHandler, + void* serverRequestAlternativeByteHandlerClientData); + void pause(); + void endPlaying(Destinations* destinations); + void reclaim(); + + unsigned& referenceCount() { return fReferenceCount; } + + Port const& serverRTPPort() const { return fServerRTPPort; } + Port const& serverRTCPPort() const { return fServerRTCPPort; } + + RTPSink* rtpSink() const { return fRTPSink; } + + float streamDuration() const { return fStreamDuration; } + + FramedSource* mediaSource() const { return fMediaSource; } + float& startNPT() { return fStartNPT; } + +private: + OnDemandServerMediaSubsession& fMaster; + Boolean fAreCurrentlyPlaying; + unsigned fReferenceCount; + + Port fServerRTPPort, fServerRTCPPort; + + RTPSink* fRTPSink; + BasicUDPSink* fUDPSink; + + float fStreamDuration; + unsigned fTotalBW; + RTCPInstance* fRTCPInstance; + + FramedSource* fMediaSource; + float fStartNPT; // initial 'normal play time'; reset after each seek + + Groupsock* fRTPgs; + Groupsock* fRTCPgs; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/OutputFile.hh b/AnyCore/lib_rtsp/liveMedia/include/OutputFile.hh new file mode 100644 index 0000000..caca04a --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/OutputFile.hh @@ -0,0 +1,31 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Common routines for opening/closing named output files +// C++ header + +#ifndef _OUTPUT_FILE_HH +#define _OUTPUT_FILE_HH + +#include +#include + +FILE* OpenOutputFile(UsageEnvironment& env, char const* fileName); + +void CloseOutputFile(FILE* fid); + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/PassiveServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/include/PassiveServerMediaSubsession.hh new file mode 100644 index 0000000..ec14976 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/PassiveServerMediaSubsession.hh @@ -0,0 +1,82 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that represents an existing +// 'RTPSink', rather than one that creates new 'RTPSink's on demand. +// C++ header + +#ifndef _PASSIVE_SERVER_MEDIA_SUBSESSION_HH +#define _PASSIVE_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _SERVER_MEDIA_SESSION_HH +#include "ServerMediaSession.hh" +#endif + +#ifndef _RTP_SINK_HH +#include "RTPSink.hh" +#endif +#ifndef _RTCP_HH +#include "RTCP.hh" +#endif + +class PassiveServerMediaSubsession: public ServerMediaSubsession { +public: + static PassiveServerMediaSubsession* createNew(RTPSink& rtpSink, + RTCPInstance* rtcpInstance = NULL); + +protected: + PassiveServerMediaSubsession(RTPSink& rtpSink, RTCPInstance* rtcpInstance); + // called only by createNew(); + virtual ~PassiveServerMediaSubsession(); + + virtual Boolean rtcpIsMuxed(); + +protected: // redefined virtual functions + virtual char const* sdpLines(); + virtual void getStreamParameters(unsigned clientSessionId, + netAddressBits clientAddress, + Port const& clientRTPPort, + Port const& clientRTCPPort, + int tcpSocketNum, + unsigned char rtpChannelId, + unsigned char rtcpChannelId, + netAddressBits& destinationAddress, + u_int8_t& destinationTTL, + Boolean& isMulticast, + Port& serverRTPPort, + Port& serverRTCPPort, + void*& streamToken); + virtual void startStream(unsigned clientSessionId, void* streamToken, + TaskFunc* rtcpRRHandler, + void* rtcpRRHandlerClientData, + unsigned short& rtpSeqNum, + unsigned& rtpTimestamp, + ServerRequestAlternativeByteHandler* serverRequestAlternativeByteHandler, + void* serverRequestAlternativeByteHandlerClientData); + virtual float getCurrentNPT(void* streamToken); + virtual void deleteStream(unsigned clientSessionId, void*& streamToken); + +protected: + char* fSDPLines; + +private: + RTPSink& fRTPSink; + RTCPInstance* fRTCPInstance; + HashTable* fClientRTCPSourceRecords; // indexed by client session id; used to implement RTCP "RR" handling +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/ProxyServerMediaSession.hh b/AnyCore/lib_rtsp/liveMedia/include/ProxyServerMediaSession.hh new file mode 100644 index 0000000..9069957 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/ProxyServerMediaSession.hh @@ -0,0 +1,211 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A subclass of "ServerMediaSession" that can be used to create a (unicast) RTSP servers that acts as a 'proxy' for +// another (unicast or multicast) RTSP/RTP stream. +// C++ header + +#ifndef _PROXY_SERVER_MEDIA_SESSION_HH +#define _PROXY_SERVER_MEDIA_SESSION_HH + +#ifndef _SERVER_MEDIA_SESSION_HH +#include "ServerMediaSession.hh" +#endif +#ifndef _MEDIA_SESSION_HH +#include "MediaSession.hh" +#endif +#ifndef _RTSP_CLIENT_HH +#include "RTSPClient.hh" +#endif + +// A subclass of "RTSPClient", used to refer to the particular "ProxyServerMediaSession" object being used. +// It is used only within the implementation of "ProxyServerMediaSession", but is defined here, in case developers wish to +// subclass it. + +class ProxyRTSPClient: public RTSPClient { +public: + ProxyRTSPClient(class ProxyServerMediaSession& ourServerMediaSession, char const* rtspURL, + char const* username, char const* password, + portNumBits tunnelOverHTTPPortNum, int verbosityLevel, int socketNumToServer); + virtual ~ProxyRTSPClient(); + + void continueAfterDESCRIBE(char const* sdpDescription); + void continueAfterLivenessCommand(int resultCode, Boolean serverSupportsGetParameter); + void continueAfterSETUP(); + +private: + void reset(); + + Authenticator* auth() { return fOurAuthenticator; } + + void scheduleLivenessCommand(); + static void sendLivenessCommand(void* clientData); + + void scheduleDESCRIBECommand(); + static void sendDESCRIBE(void* clientData); + + static void subsessionTimeout(void* clientData); + void handleSubsessionTimeout(); + +private: + friend class ProxyServerMediaSession; + friend class ProxyServerMediaSubsession; + ProxyServerMediaSession& fOurServerMediaSession; + char* fOurURL; + Authenticator* fOurAuthenticator; + Boolean fStreamRTPOverTCP; + class ProxyServerMediaSubsession *fSetupQueueHead, *fSetupQueueTail; + unsigned fNumSetupsDone; + unsigned fNextDESCRIBEDelay; // in seconds + Boolean fServerSupportsGetParameter, fLastCommandWasPLAY; + TaskToken fLivenessCommandTask, fDESCRIBECommandTask, fSubsessionTimerTask; +}; + + +typedef ProxyRTSPClient* +createNewProxyRTSPClientFunc(ProxyServerMediaSession& ourServerMediaSession, + char const* rtspURL, + char const* username, char const* password, + portNumBits tunnelOverHTTPPortNum, int verbosityLevel, + int socketNumToServer); +ProxyRTSPClient* +defaultCreateNewProxyRTSPClientFunc(ProxyServerMediaSession& ourServerMediaSession, + char const* rtspURL, + char const* username, char const* password, + portNumBits tunnelOverHTTPPortNum, int verbosityLevel, + int socketNumToServer); + +class ProxyServerMediaSession: public ServerMediaSession { +public: + static ProxyServerMediaSession* createNew(UsageEnvironment& env, + RTSPServer* ourRTSPServer, // Note: We can be used by just one "RTSPServer" + char const* inputStreamURL, // the "rtsp://" URL of the stream we'll be proxying + char const* streamName = NULL, + char const* username = NULL, char const* password = NULL, + portNumBits tunnelOverHTTPPortNum = 0, + // for streaming the *proxied* (i.e., back-end) stream + int verbosityLevel = 0, + int socketNumToServer = -1); + // Hack: "tunnelOverHTTPPortNum" == 0xFFFF (i.e., all-ones) means: Stream RTP/RTCP-over-TCP, but *not* using HTTP + // "verbosityLevel" == 1 means display basic proxy setup info; "verbosityLevel" == 2 means display RTSP client protocol also. + // If "socketNumToServer" is >= 0, then it is the socket number of an already-existing TCP connection to the server. + // (In this case, "inputStreamURL" must point to the socket's endpoint, so that it can be accessed via the socket.) + + virtual ~ProxyServerMediaSession(); + + char const* url() const; + + char describeCompletedFlag; + // initialized to 0; set to 1 when the back-end "DESCRIBE" completes. + // (This can be used as a 'watch variable' in "doEventLoop()".) + Boolean describeCompletedSuccessfully() const { return fClientMediaSession != NULL; } + // This can be used - along with "describeCompletdFlag" - to check whether the back-end "DESCRIBE" completed *successfully*. + +protected: + ProxyServerMediaSession(UsageEnvironment& env, RTSPServer* ourRTSPServer, + char const* inputStreamURL, char const* streamName, + char const* username, char const* password, + portNumBits tunnelOverHTTPPortNum, int verbosityLevel, + int socketNumToServer, + createNewProxyRTSPClientFunc* ourCreateNewProxyRTSPClientFunc + = defaultCreateNewProxyRTSPClientFunc); + + // If you subclass "ProxyRTSPClient", then you will also need to define your own function + // - with signature "createNewProxyRTSPClientFunc" (see above) - that creates a new object + // of this subclass. You should also subclass "ProxyServerMediaSession" and, in your + // subclass's constructor, initialize the parent class (i.e., "ProxyServerMediaSession") + // constructor by passing your new function as the "ourCreateNewProxyRTSPClientFunc" + // parameter. + +protected: + RTSPServer* fOurRTSPServer; + ProxyRTSPClient* fProxyRTSPClient; + MediaSession* fClientMediaSession; + +private: + friend class ProxyRTSPClient; + friend class ProxyServerMediaSubsession; + void continueAfterDESCRIBE(char const* sdpDescription); + void resetDESCRIBEState(); // undoes what was done by "contineAfterDESCRIBE()" + +private: + int fVerbosityLevel; + class PresentationTimeSessionNormalizer* fPresentationTimeSessionNormalizer; + createNewProxyRTSPClientFunc* fCreateNewProxyRTSPClientFunc; +}; + + +////////// PresentationTimeSessionNormalizer and PresentationTimeSubsessionNormalizer definitions ////////// + +// The following two classes are used by proxies to convert incoming streams' presentation times into wall-clock-aligned +// presentation times that are suitable for our "RTPSink"s (for the corresponding outgoing streams). +// (For multi-subsession (i.e., audio+video) sessions, the outgoing streams' presentation times retain the same relative +// separation as those of the incoming streams.) + +class PresentationTimeSubsessionNormalizer: public FramedFilter { +public: + void setRTPSink(RTPSink* rtpSink) { fRTPSink = rtpSink; } + +private: + friend class PresentationTimeSessionNormalizer; + PresentationTimeSubsessionNormalizer(PresentationTimeSessionNormalizer& parent, FramedSource* inputSource, RTPSource* rtpSource, + char const* codecName, PresentationTimeSubsessionNormalizer* next); + // called only from within "PresentationTimeSessionNormalizer" + virtual ~PresentationTimeSubsessionNormalizer(); + + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + +private: // redefined virtual functions: + virtual void doGetNextFrame(); + +private: + PresentationTimeSessionNormalizer& fParent; + RTPSource* fRTPSource; + RTPSink* fRTPSink; + char const* fCodecName; + PresentationTimeSubsessionNormalizer* fNext; +}; + +class PresentationTimeSessionNormalizer: public Medium { +public: + PresentationTimeSessionNormalizer(UsageEnvironment& env); + virtual ~PresentationTimeSessionNormalizer(); + + PresentationTimeSubsessionNormalizer* + createNewPresentationTimeSubsessionNormalizer(FramedSource* inputSource, RTPSource* rtpSource, char const* codecName); + +private: // called only from within "~PresentationTimeSubsessionNormalizer": + friend class PresentationTimeSubsessionNormalizer; + void normalizePresentationTime(PresentationTimeSubsessionNormalizer* ssNormalizer, + struct timeval& toPT, struct timeval const& fromPT); + void removePresentationTimeSubsessionNormalizer(PresentationTimeSubsessionNormalizer* ssNormalizer); + +private: + PresentationTimeSubsessionNormalizer* fSubsessionNormalizers; + PresentationTimeSubsessionNormalizer* fMasterSSNormalizer; // used for subsessions that have been RTCP-synced + + struct timeval fPTAdjustment; // Added to (RTCP-synced) subsession presentation times to 'normalize' them with wall-clock time. +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/QCELPAudioRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/QCELPAudioRTPSource.hh new file mode 100644 index 0000000..9cd000a --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/QCELPAudioRTPSource.hh @@ -0,0 +1,39 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Qualcomm "PureVoice" (aka. "QCELP") Audio RTP Sources +// C++ header + +#ifndef _QCELP_AUDIO_RTP_SOURCE_HH +#define _QCELP_AUDIO_RTP_SOURCE_HH + +#ifndef _RTP_SOURCE_HH +#include "RTPSource.hh" +#endif + +class QCELPAudioRTPSource { +public: + static FramedSource* createNew(UsageEnvironment& env, + Groupsock* RTPgs, + RTPSource*& resultRTPSource, + unsigned char rtpPayloadFormat = 12, + unsigned rtpTimestampFrequency = 8000); + // This returns a source to read from, but "resultRTPSource" will + // point to RTP-related state. +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/QuickTimeFileSink.hh b/AnyCore/lib_rtsp/liveMedia/include/QuickTimeFileSink.hh new file mode 100644 index 0000000..95c672e --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/QuickTimeFileSink.hh @@ -0,0 +1,188 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A sink that generates a QuickTime file from a composite media session +// C++ header + +#ifndef _QUICKTIME_FILE_SINK_HH +#define _QUICKTIME_FILE_SINK_HH + +#ifndef _MEDIA_SESSION_HH +#include "MediaSession.hh" +#endif + +class QuickTimeFileSink: public Medium { +public: + static QuickTimeFileSink* createNew(UsageEnvironment& env, + MediaSession& inputSession, + char const* outputFileName, + unsigned bufferSize = 20000, + unsigned short movieWidth = 240, + unsigned short movieHeight = 180, + unsigned movieFPS = 15, + Boolean packetLossCompensate = False, + Boolean syncStreams = False, + Boolean generateHintTracks = False, + Boolean generateMP4Format = False); + + typedef void (afterPlayingFunc)(void* clientData); + Boolean startPlaying(afterPlayingFunc* afterFunc, + void* afterClientData); + + unsigned numActiveSubsessions() const { return fNumSubsessions; } + +private: + QuickTimeFileSink(UsageEnvironment& env, MediaSession& inputSession, + char const* outputFileName, unsigned bufferSize, + unsigned short movieWidth, unsigned short movieHeight, + unsigned movieFPS, Boolean packetLossCompensate, + Boolean syncStreams, Boolean generateHintTracks, + Boolean generateMP4Format); + // called only by createNew() + virtual ~QuickTimeFileSink(); + + Boolean continuePlaying(); + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + static void onSourceClosure(void* clientData); + void onSourceClosure1(); + static void onRTCPBye(void* clientData); + void completeOutputFile(); + +private: + friend class SubsessionIOState; + MediaSession& fInputSession; + FILE* fOutFid; + unsigned fBufferSize; + Boolean fPacketLossCompensate; + Boolean fSyncStreams, fGenerateMP4Format; + struct timeval fNewestSyncTime, fFirstDataTime; + Boolean fAreCurrentlyBeingPlayed; + afterPlayingFunc* fAfterFunc; + void* fAfterClientData; + unsigned fAppleCreationTime; + unsigned fLargestRTPtimestampFrequency; + unsigned fNumSubsessions, fNumSyncedSubsessions; + struct timeval fStartTime; + Boolean fHaveCompletedOutputFile; + +private: + ///// Definitions specific to the QuickTime file format: + + unsigned addWord64(u_int64_t word); + unsigned addWord(unsigned word); + unsigned addHalfWord(unsigned short halfWord); + unsigned addByte(unsigned char byte) { + putc(byte, fOutFid); + return 1; + } + unsigned addZeroWords(unsigned numWords); + unsigned add4ByteString(char const* str); + unsigned addArbitraryString(char const* str, + Boolean oneByteLength = True); + unsigned addAtomHeader(char const* atomName); + unsigned addAtomHeader64(char const* atomName); + // strlen(atomName) must be 4 + void setWord(int64_t filePosn, unsigned size); + void setWord64(int64_t filePosn, u_int64_t size); + + unsigned movieTimeScale() const {return fLargestRTPtimestampFrequency;} + + // Define member functions for outputting various types of atom: +#define _atom(name) unsigned addAtom_##name() + _atom(ftyp); // for MP4 format files + _atom(moov); + _atom(mvhd); + _atom(iods); // for MP4 format files + _atom(trak); + _atom(tkhd); + _atom(edts); + _atom(elst); + _atom(tref); + _atom(hint); + _atom(mdia); + _atom(mdhd); + _atom(hdlr); + _atom(minf); + _atom(smhd); + _atom(vmhd); + _atom(gmhd); + _atom(gmin); + unsigned addAtom_hdlr2(); + _atom(dinf); + _atom(dref); + _atom(alis); + _atom(stbl); + _atom(stsd); + unsigned addAtom_genericMedia(); + unsigned addAtom_soundMediaGeneral(); + _atom(ulaw); + _atom(alaw); + _atom(Qclp); + _atom(wave); + _atom(frma); + _atom(Fclp); + _atom(Hclp); + _atom(mp4a); +// _atom(wave); +// _atom(frma); + _atom(esds); + _atom(srcq); + _atom(h263); + _atom(avc1); + _atom(avcC); + _atom(mp4v); + _atom(rtp); + _atom(tims); + _atom(stts); + _atom(stss); + _atom(stsc); + _atom(stsz); + _atom(co64); + _atom(udta); + _atom(name); + _atom(hnti); + _atom(sdp); + _atom(hinf); + _atom(totl); + _atom(npck); + _atom(tpay); + _atom(trpy); + _atom(nump); + _atom(tpyl); + _atom(dmed); + _atom(dimm); + _atom(drep); + _atom(tmin); + _atom(tmax); + _atom(pmax); + _atom(dmax); + _atom(payt); + unsigned addAtom_dummy(); + +private: + unsigned short fMovieWidth, fMovieHeight; + unsigned fMovieFPS; + int64_t fMDATposition; + int64_t fMVHD_durationPosn; + unsigned fMaxTrackDurationM; // in movie time units + class SubsessionIOState* fCurrentIOState; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/QuickTimeGenericRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/QuickTimeGenericRTPSource.hh new file mode 100644 index 0000000..1d140e0 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/QuickTimeGenericRTPSource.hh @@ -0,0 +1,68 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP Sources containing generic QuickTime stream data, as defined in +// +// C++ header + +#ifndef _QUICKTIME_GENERIC_RTP_SOURCE_HH +#define _QUICKTIME_GENERIC_RTP_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +class QuickTimeGenericRTPSource: public MultiFramedRTPSource { +public: + static QuickTimeGenericRTPSource* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, unsigned rtpTimestampFrequency, + char const* mimeTypeString); + + // QuickTime-specific information, set from the QuickTime header + // in each packet. This, along with the data following the header, + // is used by receivers. + struct QTState { + char PCK; + unsigned timescale; + char* sdAtom; + unsigned sdAtomSize; + unsigned short width, height; + // later add other state as needed ##### + } qtState; + +protected: + virtual ~QuickTimeGenericRTPSource(); + +private: + QuickTimeGenericRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency, + char const* mimeTypeString); + // called only by createNew() + +private: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual char const* MIMEtype() const; + +private: + char const* fMIMEtypeString; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/RTCP.hh b/AnyCore/lib_rtsp/liveMedia/include/RTCP.hh new file mode 100644 index 0000000..7cf1e88 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/RTCP.hh @@ -0,0 +1,206 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTCP +// C++ header + +#ifndef _RTCP_HH +#define _RTCP_HH + +#ifndef _RTP_SINK_HH +#include "RTPSink.hh" +#endif +#ifndef _RTP_SOURCE_HH +#include "RTPSource.hh" +#endif + +class SDESItem { +public: + SDESItem(unsigned char tag, unsigned char const* value); + + unsigned char const* data() const {return fData;} + unsigned totalSize() const; + +private: + unsigned char fData[2 + 0xFF]; // first 2 bytes are tag and length +}; + +class RTCPMemberDatabase; // forward + +class RTCPInstance: public Medium { +public: + static RTCPInstance* createNew(UsageEnvironment& env, Groupsock* RTCPgs, + unsigned totSessionBW, /* in kbps */ + unsigned char const* cname, + RTPSink* sink, + RTPSource* source, + Boolean isSSMSource = False); + + static Boolean lookupByName(UsageEnvironment& env, char const* instanceName, + RTCPInstance*& resultInstance); + + unsigned numMembers() const; + unsigned totSessionBW() const { return fTotSessionBW; } + + void setByeHandler(TaskFunc* handlerTask, void* clientData, + Boolean handleActiveParticipantsOnly = True); + // Assigns a handler routine to be called if a "BYE" arrives. + // The handler is called once only; for subsequent "BYE"s, + // "setByeHandler()" would need to be called again. + // If "handleActiveParticipantsOnly" is True, then the handler is called + // only if the SSRC is for a known sender (if we have a "RTPSource"), + // or if the SSRC is for a known receiver (if we have a "RTPSink"). + // This prevents (for example) the handler for a multicast receiver being + // called if some other multicast receiver happens to exit. + // If "handleActiveParticipantsOnly" is False, then the handler is called + // for any incoming RTCP "BYE". + // (To remove an existing "BYE" handler, call "setByeHandler()" again, with a "handlerTask" of NULL.) + void setSRHandler(TaskFunc* handlerTask, void* clientData); + void setRRHandler(TaskFunc* handlerTask, void* clientData); + // Assigns a handler routine to be called if a "SR" or "RR" + // (respectively) arrives. Unlike "setByeHandler()", the handler will + // be called once for each incoming "SR" or "RR". (To turn off handling, + // call the function again with "handlerTask" (and "clientData") as NULL. + void setSpecificRRHandler(netAddressBits fromAddress, Port fromPort, + TaskFunc* handlerTask, void* clientData); + // Like "setRRHandler()", but applies only to "RR" packets that come from + // a specific source address and port. (Note that if both a specific + // and a general "RR" handler function is set, then both will be called.) + void unsetSpecificRRHandler(netAddressBits fromAddress, Port fromPort); // equivalent to setSpecificRRHandler(..., NULL, NULL); + + Groupsock* RTCPgs() const { return fRTCPInterface.gs(); } + + void setStreamSocket(int sockNum, unsigned char streamChannelId); + void addStreamSocket(int sockNum, unsigned char streamChannelId); + void removeStreamSocket(int sockNum, unsigned char streamChannelId) { + fRTCPInterface.removeStreamSocket(sockNum, streamChannelId); + } + // hacks to allow sending RTP over TCP (RFC 2236, section 10.12) + + void setAuxilliaryReadHandler(AuxHandlerFunc* handlerFunc, + void* handlerClientData) { + fRTCPInterface.setAuxilliaryReadHandler(handlerFunc, + handlerClientData); + } + + void injectReport(u_int8_t const* packet, unsigned packetSize, struct sockaddr_in const& fromAddress); + // Allows an outside party to inject an RTCP report (from other than the network interface) + +protected: + RTCPInstance(UsageEnvironment& env, Groupsock* RTPgs, unsigned totSessionBW, + unsigned char const* cname, + RTPSink* sink, RTPSource* source, + Boolean isSSMSource); + // called only by createNew() + virtual ~RTCPInstance(); + +private: + // redefined virtual functions: + virtual Boolean isRTCPInstance() const; + +private: + Boolean addReport(Boolean alwaysAdd = False); + void addSR(); + void addRR(); + void enqueueCommonReportPrefix(unsigned char packetType, u_int32_t SSRC, + unsigned numExtraWords = 0); + void enqueueCommonReportSuffix(); + void enqueueReportBlock(RTPReceptionStats* receptionStats); + void addSDES(); + void addBYE(); + + void sendBuiltPacket(); + + static void onExpire(RTCPInstance* instance); + void onExpire1(); + + static void incomingReportHandler(RTCPInstance* instance, int /*mask*/); + void incomingReportHandler1(); + void processIncomingReport(unsigned packetSize, struct sockaddr_in const& fromAddress, + int tcpSocketNum, unsigned char tcpStreamChannelId); + void onReceive(int typeOfPacket, int totPacketSize, u_int32_t ssrc); + +private: + u_int8_t* fInBuf; + unsigned fNumBytesAlreadyRead; + OutPacketBuffer* fOutBuf; + RTPInterface fRTCPInterface; + unsigned fTotSessionBW; + RTPSink* fSink; + RTPSource* fSource; + Boolean fIsSSMSource; + + SDESItem fCNAME; + RTCPMemberDatabase* fKnownMembers; + unsigned fOutgoingReportCount; // used for SSRC member aging + + double fAveRTCPSize; + int fIsInitial; + double fPrevReportTime; + double fNextReportTime; + int fPrevNumMembers; + + int fLastSentSize; + int fLastReceivedSize; + u_int32_t fLastReceivedSSRC; + int fTypeOfEvent; + int fTypeOfPacket; + Boolean fHaveJustSentPacket; + unsigned fLastPacketSentSize; + + TaskFunc* fByeHandlerTask; + void* fByeHandlerClientData; + Boolean fByeHandleActiveParticipantsOnly; + TaskFunc* fSRHandlerTask; + void* fSRHandlerClientData; + TaskFunc* fRRHandlerTask; + void* fRRHandlerClientData; + AddressPortLookupTable* fSpecificRRHandlerTable; + +public: // because this stuff is used by an external "C" function + void schedule(double nextTime); + void reschedule(double nextTime); + void sendReport(); + void sendBYE(); + int typeOfEvent() {return fTypeOfEvent;} + int sentPacketSize() {return fLastSentSize;} + int packetType() {return fTypeOfPacket;} + int receivedPacketSize() {return fLastReceivedSize;} + int checkNewSSRC(); + void removeLastReceivedSSRC(); + void removeSSRC(u_int32_t ssrc, Boolean alsoRemoveStats); +}; + +// RTCP packet types: +const unsigned char RTCP_PT_SR = 200; +const unsigned char RTCP_PT_RR = 201; +const unsigned char RTCP_PT_SDES = 202; +const unsigned char RTCP_PT_BYE = 203; +const unsigned char RTCP_PT_APP = 204; + +// SDES tags: +const unsigned char RTCP_SDES_END = 0; +const unsigned char RTCP_SDES_CNAME = 1; +const unsigned char RTCP_SDES_NAME = 2; +const unsigned char RTCP_SDES_EMAIL = 3; +const unsigned char RTCP_SDES_PHONE = 4; +const unsigned char RTCP_SDES_LOC = 5; +const unsigned char RTCP_SDES_TOOL = 6; +const unsigned char RTCP_SDES_NOTE = 7; +const unsigned char RTCP_SDES_PRIV = 8; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/RTPInterface.hh b/AnyCore/lib_rtsp/liveMedia/include/RTPInterface.hh new file mode 100644 index 0000000..6a2ae32 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/RTPInterface.hh @@ -0,0 +1,114 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// An abstraction of a network interface used for RTP (or RTCP). +// (This allows the RTP-over-TCP hack (RFC 2326, section 10.12) to +// be implemented transparently.) +// C++ header + +#ifndef _RTP_INTERFACE_HH +#define _RTP_INTERFACE_HH + +#ifndef _MEDIA_HH +#include +#endif +#ifndef _GROUPSOCK_HH +#include "Groupsock.hh" +#endif + +// Typedef for an optional auxilliary handler function, to be called +// when each new packet is read: +typedef void AuxHandlerFunc(void* clientData, unsigned char* packet, + unsigned& packetSize); + +typedef void ServerRequestAlternativeByteHandler(void* instance, u_int8_t requestByte); +// A hack that allows a handler for RTP/RTCP packets received over TCP to process RTSP commands that may also appear within +// the same TCP connection. A RTSP server implementation would supply a function like this - as a parameter to +// "ServerMediaSubsession::startStream()". + +class tcpStreamRecord { +public: + tcpStreamRecord(int streamSocketNum, unsigned char streamChannelId, + tcpStreamRecord* next); + virtual ~tcpStreamRecord(); + +public: + tcpStreamRecord* fNext; + int fStreamSocketNum; + unsigned char fStreamChannelId; +}; + +class RTPInterface { +public: + RTPInterface(Medium* owner, Groupsock* gs); + virtual ~RTPInterface(); + + Groupsock* gs() const { return fGS; } + + void setStreamSocket(int sockNum, unsigned char streamChannelId); + void addStreamSocket(int sockNum, unsigned char streamChannelId); + void removeStreamSocket(int sockNum, unsigned char streamChannelId); + static void setServerRequestAlternativeByteHandler(UsageEnvironment& env, int socketNum, + ServerRequestAlternativeByteHandler* handler, void* clientData); + static void clearServerRequestAlternativeByteHandler(UsageEnvironment& env, int socketNum); + + Boolean sendPacket(unsigned char* packet, unsigned packetSize); + void startNetworkReading(TaskScheduler::BackgroundHandlerProc* + handlerProc); + Boolean handleRead(unsigned char* buffer, unsigned bufferMaxSize, + // out parameters: + unsigned& bytesRead, struct sockaddr_in& fromAddress, + int& tcpSocketNum, unsigned char& tcpStreamChannelId, + Boolean& packetReadWasIncomplete); + // Note: If "tcpSocketNum" < 0, then the packet was received over UDP, and "tcpStreamChannelId" + // is undefined (and irrelevant). + // Otherwise (if "tcpSocketNum" >= 0), the packet was received (interleaved) over TCP, and + // "tcpStreamChannelId" will return the channel id. + + void stopNetworkReading(); + + UsageEnvironment& envir() const { return fOwner->envir(); } + + void setAuxilliaryReadHandler(AuxHandlerFunc* handlerFunc, + void* handlerClientData) { + fAuxReadHandlerFunc = handlerFunc; + fAuxReadHandlerClientData = handlerClientData; + } + +private: + // Helper functions for sending a RTP or RTCP packet over a TCP connection: + Boolean sendRTPorRTCPPacketOverTCP(unsigned char* packet, unsigned packetSize, + int socketNum, unsigned char streamChannelId); + Boolean sendDataOverTCP(int socketNum, u_int8_t const* data, unsigned dataSize, Boolean forceSendToSucceed); + +private: + friend class SocketDescriptor; + Medium* fOwner; + Groupsock* fGS; + tcpStreamRecord* fTCPStreams; // optional, for RTP-over-TCP streaming/receiving + + unsigned short fNextTCPReadSize; + // how much data (if any) is available to be read from the TCP stream + int fNextTCPReadStreamSocketNum; + unsigned char fNextTCPReadStreamChannelId; + TaskScheduler::BackgroundHandlerProc* fReadHandlerProc; // if any + + AuxHandlerFunc* fAuxReadHandlerFunc; + void* fAuxReadHandlerClientData; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/RTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/RTPSink.hh new file mode 100644 index 0000000..af01181 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/RTPSink.hh @@ -0,0 +1,232 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP Sinks +// C++ header + +#ifndef _RTP_SINK_HH +#define _RTP_SINK_HH + +#ifndef _MEDIA_SINK_HH +#include "MediaSink.hh" +#endif +#ifndef _RTP_INTERFACE_HH +#include "RTPInterface.hh" +#endif + +class RTPTransmissionStatsDB; // forward + +class RTPSink: public MediaSink { +public: + static Boolean lookupByName(UsageEnvironment& env, char const* sinkName, + RTPSink*& resultSink); + + // used by RTSP servers: + Groupsock const& groupsockBeingUsed() const { return *(fRTPInterface.gs()); } + Groupsock& groupsockBeingUsed() { return *(fRTPInterface.gs()); } + + unsigned char rtpPayloadType() const { return fRTPPayloadType; } + unsigned rtpTimestampFrequency() const { return fTimestampFrequency; } + void setRTPTimestampFrequency(unsigned freq) { + fTimestampFrequency = freq; + } + char const* rtpPayloadFormatName() const {return fRTPPayloadFormatName;} + + unsigned numChannels() const { return fNumChannels; } + + virtual char const* sdpMediaType() const; // for use in SDP m= lines + virtual char* rtpmapLine() const; // returns a string to be delete[]d + virtual char const* auxSDPLine(); + // optional SDP line (e.g. a=fmtp:...) + + u_int16_t currentSeqNo() const { return fSeqNo; } + u_int32_t presetNextTimestamp(); + // ensures that the next timestamp to be used will correspond to + // the current 'wall clock' time. + + RTPTransmissionStatsDB& transmissionStatsDB() const { + return *fTransmissionStatsDB; + } + + Boolean nextTimestampHasBeenPreset() const { return fNextTimestampHasBeenPreset; } + Boolean& enableRTCPReports() { return fEnableRTCPReports; } + + void getTotalBitrate(unsigned& outNumBytes, double& outElapsedTime); + // returns the number of bytes sent since the last time that we + // were called, and resets the counter. + + struct timeval const& creationTime() const { return fCreationTime; } + struct timeval const& initialPresentationTime() const { return fInitialPresentationTime; } + struct timeval const& mostRecentPresentationTime() const { return fMostRecentPresentationTime; } + void resetPresentationTimes(); + + // Hacks to allow sending RTP over TCP (RFC 2236, section 10.12): + void setStreamSocket(int sockNum, unsigned char streamChannelId) { + fRTPInterface.setStreamSocket(sockNum, streamChannelId); + } + void addStreamSocket(int sockNum, unsigned char streamChannelId) { + fRTPInterface.addStreamSocket(sockNum, streamChannelId); + } + void removeStreamSocket(int sockNum, unsigned char streamChannelId) { + fRTPInterface.removeStreamSocket(sockNum, streamChannelId); + } + unsigned& estimatedBitrate() { return fEstimatedBitrate; } // kbps; usually 0 (i.e., unset) + +protected: + RTPSink(UsageEnvironment& env, + Groupsock* rtpGS, unsigned char rtpPayloadType, + u_int32_t rtpTimestampFrequency, + char const* rtpPayloadFormatName, + unsigned numChannels); + // abstract base class + + virtual ~RTPSink(); + + // used by RTCP: + friend class RTCPInstance; + friend class RTPTransmissionStats; + u_int32_t SSRC() const {return fSSRC;} + // later need a means of changing the SSRC if there's a collision ##### + u_int32_t convertToRTPTimestamp(struct timeval tv); + unsigned packetCount() const {return fPacketCount;} + unsigned octetCount() const {return fOctetCount;} + +protected: + RTPInterface fRTPInterface; + unsigned char fRTPPayloadType; + unsigned fPacketCount, fOctetCount, fTotalOctetCount /*incl RTP hdr*/; + struct timeval fTotalOctetCountStartTime, fInitialPresentationTime, fMostRecentPresentationTime; + u_int32_t fCurrentTimestamp; + u_int16_t fSeqNo; + +private: + // redefined virtual functions: + virtual Boolean isRTPSink() const; + +private: + u_int32_t fSSRC, fTimestampBase; + unsigned fTimestampFrequency; + Boolean fNextTimestampHasBeenPreset; + Boolean fEnableRTCPReports; // whether RTCP "SR" reports should be sent for this sink (default: True) + char const* fRTPPayloadFormatName; + unsigned fNumChannels; + struct timeval fCreationTime; + unsigned fEstimatedBitrate; // set on creation if known; otherwise 0 + + RTPTransmissionStatsDB* fTransmissionStatsDB; +}; + + +class RTPTransmissionStats; // forward + +class RTPTransmissionStatsDB { +public: + unsigned numReceivers() const { return fNumReceivers; } + + class Iterator { + public: + Iterator(RTPTransmissionStatsDB& receptionStatsDB); + virtual ~Iterator(); + + RTPTransmissionStats* next(); + // NULL if none + + private: + HashTable::Iterator* fIter; + }; + + // The following is called whenever a RTCP RR packet is received: + void noteIncomingRR(u_int32_t SSRC, struct sockaddr_in const& lastFromAddress, + unsigned lossStats, unsigned lastPacketNumReceived, + unsigned jitter, unsigned lastSRTime, unsigned diffSR_RRTime); + + // The following is called when a RTCP BYE packet is received: + void removeRecord(u_int32_t SSRC); + + RTPTransmissionStats* lookup(u_int32_t SSRC) const; + +private: // constructor and destructor, called only by RTPSink: + friend class RTPSink; + RTPTransmissionStatsDB(RTPSink& rtpSink); + virtual ~RTPTransmissionStatsDB(); + +private: + void add(u_int32_t SSRC, RTPTransmissionStats* stats); + +private: + friend class Iterator; + unsigned fNumReceivers; + RTPSink& fOurRTPSink; + HashTable* fTable; +}; + +class RTPTransmissionStats { +public: + u_int32_t SSRC() const {return fSSRC;} + struct sockaddr_in const& lastFromAddress() const {return fLastFromAddress;} + unsigned lastPacketNumReceived() const {return fLastPacketNumReceived;} + unsigned firstPacketNumReported() const {return fFirstPacketNumReported;} + unsigned totNumPacketsLost() const {return fTotNumPacketsLost;} + unsigned jitter() const {return fJitter;} + unsigned lastSRTime() const { return fLastSRTime; } + unsigned diffSR_RRTime() const { return fDiffSR_RRTime; } + unsigned roundTripDelay() const; + // The round-trip delay (in units of 1/65536 seconds) computed from + // the most recently-received RTCP RR packet. + struct timeval timeCreated() const {return fTimeCreated;} + struct timeval lastTimeReceived() const {return fTimeReceived;} + void getTotalOctetCount(u_int32_t& hi, u_int32_t& lo); + void getTotalPacketCount(u_int32_t& hi, u_int32_t& lo); + + // Information which requires at least two RRs to have been received: + unsigned packetsReceivedSinceLastRR() const; + u_int8_t packetLossRatio() const { return fPacketLossRatio; } + // as an 8-bit fixed-point number + int packetsLostBetweenRR() const; + +private: + // called only by RTPTransmissionStatsDB: + friend class RTPTransmissionStatsDB; + RTPTransmissionStats(RTPSink& rtpSink, u_int32_t SSRC); + virtual ~RTPTransmissionStats(); + + void noteIncomingRR(struct sockaddr_in const& lastFromAddress, + unsigned lossStats, unsigned lastPacketNumReceived, + unsigned jitter, + unsigned lastSRTime, unsigned diffSR_RRTime); + +private: + RTPSink& fOurRTPSink; + u_int32_t fSSRC; + struct sockaddr_in fLastFromAddress; + unsigned fLastPacketNumReceived; + u_int8_t fPacketLossRatio; + unsigned fTotNumPacketsLost; + unsigned fJitter; + unsigned fLastSRTime; + unsigned fDiffSR_RRTime; + struct timeval fTimeCreated, fTimeReceived; + Boolean fAtLeastTwoRRsHaveBeenReceived; + unsigned fOldLastPacketNumReceived; + unsigned fOldTotNumPacketsLost; + Boolean fFirstPacket; + unsigned fFirstPacketNumReported; + u_int32_t fLastOctetCount, fTotalOctetCount_hi, fTotalOctetCount_lo; + u_int32_t fLastPacketCount, fTotalPacketCount_hi, fTotalPacketCount_lo; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/RTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/RTPSource.hh new file mode 100644 index 0000000..3417a36 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/RTPSource.hh @@ -0,0 +1,266 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP Sources +// C++ header + +#ifndef _RTP_SOURCE_HH +#define _RTP_SOURCE_HH + +#ifndef _FRAMED_SOURCE_HH +#include "FramedSource.hh" +#endif +#ifndef _RTP_INTERFACE_HH +#include "RTPInterface.hh" +#endif + +class RTPReceptionStatsDB; // forward + +class RTPSource: public FramedSource { +public: + static Boolean lookupByName(UsageEnvironment& env, char const* sourceName, + RTPSource*& resultSource); + + Boolean curPacketMarkerBit() const { return fCurPacketMarkerBit; } + + unsigned char rtpPayloadFormat() const { return fRTPPayloadFormat; } + + virtual Boolean hasBeenSynchronizedUsingRTCP(); + + Groupsock* RTPgs() const { return fRTPInterface.gs(); } + + virtual void setPacketReorderingThresholdTime(unsigned uSeconds) = 0; + + // used by RTCP: + u_int32_t SSRC() const { return fSSRC; } + // Note: This is *our* SSRC, not the SSRC in incoming RTP packets. + // later need a means of changing the SSRC if there's a collision ##### + void registerForMultiplexedRTCPPackets(class RTCPInstance* rtcpInstance) { + fRTCPInstanceForMultiplexedRTCPPackets = rtcpInstance; + } + void deregisterForMultiplexedRTCPPackets() { registerForMultiplexedRTCPPackets(NULL); } + + unsigned timestampFrequency() const {return fTimestampFrequency;} + + RTPReceptionStatsDB& receptionStatsDB() const { + return *fReceptionStatsDB; + } + + u_int32_t lastReceivedSSRC() const { return fLastReceivedSSRC; } + // Note: This is the SSRC in the most recently received RTP packet; not *our* SSRC + + Boolean& enableRTCPReports() { return fEnableRTCPReports; } + Boolean const& enableRTCPReports() const { return fEnableRTCPReports; } + + void setStreamSocket(int sockNum, unsigned char streamChannelId) { + // hack to allow sending RTP over TCP (RFC 2236, section 10.12) + fRTPInterface.setStreamSocket(sockNum, streamChannelId); + } + + void setAuxilliaryReadHandler(AuxHandlerFunc* handlerFunc, + void* handlerClientData) { + fRTPInterface.setAuxilliaryReadHandler(handlerFunc, + handlerClientData); + } + + // Note that RTP receivers will usually not need to call either of the following two functions, because + // RTP sequence numbers and timestamps are usually not useful to receivers. + // (Our implementation of RTP reception already does all needed handling of RTP sequence numbers and timestamps.) + u_int16_t curPacketRTPSeqNum() const { return fCurPacketRTPSeqNum; } +private: friend class MediaSubsession; // "MediaSubsession" is the only outside class that ever needs to see RTP timestamps! + u_int32_t curPacketRTPTimestamp() const { return fCurPacketRTPTimestamp; } + +protected: + RTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, u_int32_t rtpTimestampFrequency); + // abstract base class + virtual ~RTPSource(); + +protected: + RTPInterface fRTPInterface; + u_int16_t fCurPacketRTPSeqNum; + u_int32_t fCurPacketRTPTimestamp; + Boolean fCurPacketMarkerBit; + Boolean fCurPacketHasBeenSynchronizedUsingRTCP; + u_int32_t fLastReceivedSSRC; + class RTCPInstance* fRTCPInstanceForMultiplexedRTCPPackets; + +private: + // redefined virtual functions: + virtual Boolean isRTPSource() const; + virtual void getAttributes() const; + +private: + unsigned char fRTPPayloadFormat; + unsigned fTimestampFrequency; + u_int32_t fSSRC; + Boolean fEnableRTCPReports; // whether RTCP "RR" reports should be sent for this source (default: True) + + RTPReceptionStatsDB* fReceptionStatsDB; +}; + + +class RTPReceptionStats; // forward + +class RTPReceptionStatsDB { +public: + unsigned totNumPacketsReceived() const { return fTotNumPacketsReceived; } + unsigned numActiveSourcesSinceLastReset() const { + return fNumActiveSourcesSinceLastReset; + } + + void reset(); + // resets periodic stats (called each time they're used to + // generate a reception report) + + class Iterator { + public: + Iterator(RTPReceptionStatsDB& receptionStatsDB); + virtual ~Iterator(); + + RTPReceptionStats* next(Boolean includeInactiveSources = False); + // NULL if none + + private: + HashTable::Iterator* fIter; + }; + + // The following is called whenever a RTP packet is received: + void noteIncomingPacket(u_int32_t SSRC, u_int16_t seqNum, + u_int32_t rtpTimestamp, + unsigned timestampFrequency, + Boolean useForJitterCalculation, + struct timeval& resultPresentationTime, + Boolean& resultHasBeenSyncedUsingRTCP, + unsigned packetSize /* payload only */); + + // The following is called whenever a RTCP SR packet is received: + void noteIncomingSR(u_int32_t SSRC, + u_int32_t ntpTimestampMSW, u_int32_t ntpTimestampLSW, + u_int32_t rtpTimestamp); + + // The following is called when a RTCP BYE packet is received: + void removeRecord(u_int32_t SSRC); + + RTPReceptionStats* lookup(u_int32_t SSRC) const; + +protected: // constructor and destructor, called only by RTPSource: + friend class RTPSource; + RTPReceptionStatsDB(); + virtual ~RTPReceptionStatsDB(); + +protected: + void add(u_int32_t SSRC, RTPReceptionStats* stats); + +protected: + friend class Iterator; + unsigned fNumActiveSourcesSinceLastReset; + +private: + HashTable* fTable; + unsigned fTotNumPacketsReceived; // for all SSRCs +}; + +class RTPReceptionStats { +public: + u_int32_t SSRC() const { return fSSRC; } + unsigned numPacketsReceivedSinceLastReset() const { + return fNumPacketsReceivedSinceLastReset; + } + unsigned totNumPacketsReceived() const { return fTotNumPacketsReceived; } + double totNumKBytesReceived() const; + + unsigned totNumPacketsExpected() const { + return (fHighestExtSeqNumReceived - fBaseExtSeqNumReceived) + 1; + } + + unsigned baseExtSeqNumReceived() const { return fBaseExtSeqNumReceived; } + unsigned lastResetExtSeqNumReceived() const { + return fLastResetExtSeqNumReceived; + } + unsigned highestExtSeqNumReceived() const { + return fHighestExtSeqNumReceived; + } + + unsigned jitter() const; + + unsigned lastReceivedSR_NTPmsw() const { return fLastReceivedSR_NTPmsw; } + unsigned lastReceivedSR_NTPlsw() const { return fLastReceivedSR_NTPlsw; } + struct timeval const& lastReceivedSR_time() const { + return fLastReceivedSR_time; + } + + unsigned minInterPacketGapUS() const { return fMinInterPacketGapUS; } + unsigned maxInterPacketGapUS() const { return fMaxInterPacketGapUS; } + struct timeval const& totalInterPacketGaps() const { + return fTotalInterPacketGaps; + } + +protected: + // called only by RTPReceptionStatsDB: + friend class RTPReceptionStatsDB; + RTPReceptionStats(u_int32_t SSRC, u_int16_t initialSeqNum); + RTPReceptionStats(u_int32_t SSRC); + virtual ~RTPReceptionStats(); + +private: + void noteIncomingPacket(u_int16_t seqNum, u_int32_t rtpTimestamp, + unsigned timestampFrequency, + Boolean useForJitterCalculation, + struct timeval& resultPresentationTime, + Boolean& resultHasBeenSyncedUsingRTCP, + unsigned packetSize /* payload only */); + void noteIncomingSR(u_int32_t ntpTimestampMSW, u_int32_t ntpTimestampLSW, + u_int32_t rtpTimestamp); + void init(u_int32_t SSRC); + void initSeqNum(u_int16_t initialSeqNum); + void reset(); + // resets periodic stats (called each time they're used to + // generate a reception report) + +protected: + u_int32_t fSSRC; + unsigned fNumPacketsReceivedSinceLastReset; + unsigned fTotNumPacketsReceived; + u_int32_t fTotBytesReceived_hi, fTotBytesReceived_lo; + Boolean fHaveSeenInitialSequenceNumber; + unsigned fBaseExtSeqNumReceived; + unsigned fLastResetExtSeqNumReceived; + unsigned fHighestExtSeqNumReceived; + int fLastTransit; // used in the jitter calculation + u_int32_t fPreviousPacketRTPTimestamp; + double fJitter; + // The following are recorded whenever we receive a RTCP SR for this SSRC: + unsigned fLastReceivedSR_NTPmsw; // NTP timestamp (from SR), most-signif + unsigned fLastReceivedSR_NTPlsw; // NTP timestamp (from SR), least-signif + struct timeval fLastReceivedSR_time; + struct timeval fLastPacketReceptionTime; + unsigned fMinInterPacketGapUS, fMaxInterPacketGapUS; + struct timeval fTotalInterPacketGaps; + +private: + // Used to convert from RTP timestamp to 'wall clock' time: + Boolean fHasBeenSynchronized; + u_int32_t fSyncTimestamp; + struct timeval fSyncTime; +}; + + +Boolean seqNumLT(u_int16_t s1, u_int16_t s2); + // a 'less-than' on 16-bit sequence numbers + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/RTSPClient.hh b/AnyCore/lib_rtsp/liveMedia/include/RTSPClient.hh new file mode 100644 index 0000000..52931ba --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/RTSPClient.hh @@ -0,0 +1,369 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A generic RTSP client - for a single "rtsp://" URL +// C++ header + +#ifndef _RTSP_CLIENT_HH +#define _RTSP_CLIENT_HH + +#ifndef _MEDIA_SESSION_HH +#include "MediaSession.hh" +#endif +#ifndef _NET_ADDRESS_HH +#include "NetAddress.hh" +#endif +#ifndef _DIGEST_AUTHENTICATION_HH +#include "DigestAuthentication.hh" +#endif +#ifndef _RTSP_SERVER_HH +#include "RTSPServer.hh" // For the optional "HandlerForREGISTERCommand" mini-server +#endif + +class RTSPClient: public Medium { +public: + static RTSPClient* createNew(UsageEnvironment& env, char const* rtspURL, + int verbosityLevel = 0, + char const* applicationName = NULL, + portNumBits tunnelOverHTTPPortNum = 0, + int socketNumToServer = -1); + // If "tunnelOverHTTPPortNum" is non-zero, we tunnel RTSP (and RTP) + // over a HTTP connection with the given port number, using the technique + // described in Apple's document + // If "socketNumToServer" is >= 0, then it is the socket number of an already-existing TCP connection to the server. + // (In this case, "rtspURL" must point to the socket's endpoint, so that it can be accessed via the socket.) + + typedef void (responseHandler)(RTSPClient* rtspClient, + int resultCode, char* resultString); + // A function that is called in response to a RTSP command. The parameters are as follows: + // "rtspClient": The "RTSPClient" object on which the original command was issued. + // "resultCode": If zero, then the command completed successfully. If non-zero, then the command did not complete + // successfully, and "resultCode" indicates the error, as follows: + // A positive "resultCode" is a RTSP error code (for example, 404 means "not found") + // A negative "resultCode" indicates a socket/network error; 0-"resultCode" is the standard "errno" code. + // "resultString": A ('\0'-terminated) string returned along with the response, or else NULL. + // In particular: + // "resultString" for a successful "DESCRIBE" command will be the media session's SDP description. + // "resultString" for a successful "OPTIONS" command will be a list of allowed commands. + // Note that this string can be present (i.e., not NULL) even if "resultCode" is non-zero - i.e., an error message. + // Also, "resultString" can be NULL, even if "resultCode" is zero (e.g., if the RTSP command succeeded, but without + // including an appropriate result header). + // Note also that this string is dynamically allocated, and must be freed by the handler (or the caller) + // - using "delete[]". + + unsigned sendDescribeCommand(responseHandler* responseHandler, Authenticator* authenticator = NULL); + // Issues a RTSP "DESCRIBE" command, then returns the "CSeq" sequence number that was used in the command. + // The (programmer-supplied) "responseHandler" function is called later to handle the response + // (or is called immediately - with an error code - if the command cannot be sent). + // "authenticator" (optional) is used for access control. If you have username and password strings, you can use this by + // passing an actual parameter that you created by creating an "Authenticator(username, password) object". + // (Note that if you supply a non-NULL "authenticator" parameter, you need do this only for the first command you send.) + + unsigned sendOptionsCommand(responseHandler* responseHandler, Authenticator* authenticator = NULL); + // Issues a RTSP "OPTIONS" command, then returns the "CSeq" sequence number that was used in the command. + // (The "responseHandler" and "authenticator" parameters are as described for "sendDescribeCommand".) + + unsigned sendAnnounceCommand(char const* sdpDescription, responseHandler* responseHandler, Authenticator* authenticator = NULL); + // Issues a RTSP "ANNOUNCE" command (with "sdpDescription" as parameter), + // then returns the "CSeq" sequence number that was used in the command. + // (The "responseHandler" and "authenticator" parameters are as described for "sendDescribeCommand".) + + unsigned sendSetupCommand(MediaSubsession& subsession, responseHandler* responseHandler, + Boolean streamOutgoing = False, + Boolean streamUsingTCP = False, + Boolean forceMulticastOnUnspecified = False, + Authenticator* authenticator = NULL); + // Issues a RTSP "SETUP" command, then returns the "CSeq" sequence number that was used in the command. + // (The "responseHandler" and "authenticator" parameters are as described for "sendDescribeCommand".) + + unsigned sendPlayCommand(MediaSession& session, responseHandler* responseHandler, + double start = 0.0f, double end = -1.0f, float scale = 1.0f, + Authenticator* authenticator = NULL); + // Issues an aggregate RTSP "PLAY" command on "session", then returns the "CSeq" sequence number that was used in the command. + // (Note: start=-1 means 'resume'; end=-1 means 'play to end') + // (The "responseHandler" and "authenticator" parameters are as described for "sendDescribeCommand".) + unsigned sendPlayCommand(MediaSubsession& subsession, responseHandler* responseHandler, + double start = 0.0f, double end = -1.0f, float scale = 1.0f, + Authenticator* authenticator = NULL); + // Issues a RTSP "PLAY" command on "subsession", then returns the "CSeq" sequence number that was used in the command. + // (Note: start=-1 means 'resume'; end=-1 means 'play to end') + // (The "responseHandler" and "authenticator" parameters are as described for "sendDescribeCommand".) + + // Alternative forms of "sendPlayCommand()", used to send "PLAY" commands that include an 'absolute' time range: + // (The "absStartTime" string (and "absEndTime" string, if present) *must* be of the form + // "YYYYMMDDTHHMMSSZ" or "YYYYMMDDTHHMMSS.Z") + unsigned sendPlayCommand(MediaSession& session, responseHandler* responseHandler, + char const* absStartTime, char const* absEndTime = NULL, float scale = 1.0f, + Authenticator* authenticator = NULL); + unsigned sendPlayCommand(MediaSubsession& subsession, responseHandler* responseHandler, + char const* absStartTime, char const* absEndTime = NULL, float scale = 1.0f, + Authenticator* authenticator = NULL); + + unsigned sendPauseCommand(MediaSession& session, responseHandler* responseHandler, Authenticator* authenticator = NULL); + // Issues an aggregate RTSP "PAUSE" command on "session", then returns the "CSeq" sequence number that was used in the command. + // (The "responseHandler" and "authenticator" parameters are as described for "sendDescribeCommand".) + unsigned sendPauseCommand(MediaSubsession& subsession, responseHandler* responseHandler, Authenticator* authenticator = NULL); + // Issues a RTSP "PAUSE" command on "subsession", then returns the "CSeq" sequence number that was used in the command. + // (The "responseHandler" and "authenticator" parameters are as described for "sendDescribeCommand".) + + unsigned sendRecordCommand(MediaSession& session, responseHandler* responseHandler, Authenticator* authenticator = NULL); + // Issues an aggregate RTSP "RECORD" command on "session", then returns the "CSeq" sequence number that was used in the command. + // (The "responseHandler" and "authenticator" parameters are as described for "sendDescribeCommand".) + unsigned sendRecordCommand(MediaSubsession& subsession, responseHandler* responseHandler, Authenticator* authenticator = NULL); + // Issues a RTSP "RECORD" command on "subsession", then returns the "CSeq" sequence number that was used in the command. + // (The "responseHandler" and "authenticator" parameters are as described for "sendDescribeCommand".) + + unsigned sendTeardownCommand(MediaSession& session, responseHandler* responseHandler, Authenticator* authenticator = NULL); + // Issues an aggregate RTSP "TEARDOWN" command on "session", then returns the "CSeq" sequence number that was used in the command. + // (The "responseHandler" and "authenticator" parameters are as described for "sendDescribeCommand".) + unsigned sendTeardownCommand(MediaSubsession& subsession, responseHandler* responseHandler, Authenticator* authenticator = NULL); + // Issues a RTSP "TEARDOWN" command on "subsession", then returns the "CSeq" sequence number that was used in the command. + // (The "responseHandler" and "authenticator" parameters are as described for "sendDescribeCommand".) + + unsigned sendSetParameterCommand(MediaSession& session, responseHandler* responseHandler, + char const* parameterName, char const* parameterValue, + Authenticator* authenticator = NULL); + // Issues an aggregate RTSP "SET_PARAMETER" command on "session", then returns the "CSeq" sequence number that was used in the command. + // (The "responseHandler" and "authenticator" parameters are as described for "sendDescribeCommand".) + + unsigned sendGetParameterCommand(MediaSession& session, responseHandler* responseHandler, char const* parameterName, + Authenticator* authenticator = NULL); + // Issues an aggregate RTSP "GET_PARAMETER" command on "session", then returns the "CSeq" sequence number that was used in the command. + // (The "responseHandler" and "authenticator" parameters are as described for "sendDescribeCommand".) + + void sendDummyUDPPackets(MediaSession& session, unsigned numDummyPackets = 2); + void sendDummyUDPPackets(MediaSubsession& subsession, unsigned numDummyPackets = 2); + // Sends short 'dummy' (i.e., non-RTP or RTCP) UDP packets towards the server, to increase + // the likelihood of RTP/RTCP packets from the server reaching us if we're behind a NAT. + // (If we requested RTP-over-TCP streaming, then these functions have no effect.) + // Our implementation automatically does this just prior to sending each "PLAY" command; + // You should not call these functions yourself unless you know what you're doing. + + Boolean changeResponseHandler(unsigned cseq, responseHandler* newResponseHandler); + // Changes the response handler for the previously-performed command (whose operation returned "cseq"). + // (To turn off any response handling for the command, use a "newResponseHandler" value of NULL. This might be done as part + // of an implementation of a 'timeout handler' on the command, for example.) + // This function returns True iff "cseq" was for a valid previously-performed command (whose response is still unhandled). + + int socketNum() const { return fInputSocketNum; } + + static Boolean lookupByName(UsageEnvironment& env, + char const* sourceName, + RTSPClient*& resultClient); + + static Boolean parseRTSPURL(UsageEnvironment& env, char const* url, + char*& username, char*& password, NetAddress& address, portNumBits& portNum, char const** urlSuffix = NULL); + // Parses "url" as "rtsp://[[:]@][:][/]" + // (Note that the returned "username" and "password" are either NULL, or heap-allocated strings that the caller must later delete[].) + + void setUserAgentString(char const* userAgentName); + // sets an alternative string to be used in RTSP "User-Agent:" headers + + unsigned sessionTimeoutParameter() const { return fSessionTimeoutParameter; } + + char const* url() const { return fBaseURL; } + + static unsigned responseBufferSize; + +public: // Some compilers complain if this is "private:" + // The state of a request-in-progress: + class RequestRecord { + public: + RequestRecord(unsigned cseq, char const* commandName, responseHandler* handler, + MediaSession* session = NULL, MediaSubsession* subsession = NULL, u_int32_t booleanFlags = 0, + double start = 0.0f, double end = -1.0f, float scale = 1.0f, char const* contentStr = NULL); + RequestRecord(unsigned cseq, responseHandler* handler, + char const* absStartTime, char const* absEndTime = NULL, float scale = 1.0f, + MediaSession* session = NULL, MediaSubsession* subsession = NULL); + // alternative constructor for creating "PLAY" requests that include 'absolute' time values + virtual ~RequestRecord(); + + RequestRecord*& next() { return fNext; } + unsigned& cseq() { return fCSeq; } + char const* commandName() const { return fCommandName; } + MediaSession* session() const { return fSession; } + MediaSubsession* subsession() const { return fSubsession; } + u_int32_t booleanFlags() const { return fBooleanFlags; } + double start() const { return fStart; } + double end() const { return fEnd; } + char const* absStartTime() const { return fAbsStartTime; } + char const* absEndTime() const { return fAbsEndTime; } + float scale() const { return fScale; } + char* contentStr() const { return fContentStr; } + responseHandler*& handler() { return fHandler; } + + private: + RequestRecord* fNext; + unsigned fCSeq; + char const* fCommandName; + MediaSession* fSession; + MediaSubsession* fSubsession; + u_int32_t fBooleanFlags; + double fStart, fEnd; + char *fAbsStartTime, *fAbsEndTime; // used for optional 'absolute' (i.e., "time=") range specifications + float fScale; + char* fContentStr; + responseHandler* fHandler; + }; + +protected: + RTSPClient(UsageEnvironment& env, char const* rtspURL, + int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum, int socketNumToServer); + // called only by createNew(); + virtual ~RTSPClient(); + + void reset(); + void setBaseURL(char const* url); + int grabSocket(); // allows a subclass to reuse our input socket, so that it won't get closed when we're deleted + virtual unsigned sendRequest(RequestRecord* request); + virtual Boolean setRequestFields(RequestRecord* request, + char*& cmdURL, Boolean& cmdURLWasAllocated, + char const*& protocolStr, + char*& extraHeaders, Boolean& extraHeadersWereAllocated); + // used to implement "sendRequest()"; subclasses may reimplement this (e.g., when implementing a new command name) + +private: // redefined virtual functions + virtual Boolean isRTSPClient() const; + +private: + class RequestQueue { + public: + RequestQueue(); + RequestQueue(RequestQueue& origQueue); // moves the queue contents to the new queue + virtual ~RequestQueue(); + + void enqueue(RequestRecord* request); // "request" must not be NULL + RequestRecord* dequeue(); + void putAtHead(RequestRecord* request); // "request" must not be NULL + RequestRecord* findByCSeq(unsigned cseq); + Boolean isEmpty() const { return fHead == NULL; } + + private: + RequestRecord* fHead; + RequestRecord* fTail; + }; + + void resetTCPSockets(); + void resetResponseBuffer(); + int openConnection(); // -1: failure; 0: pending; 1: success + int connectToServer(int socketNum, portNumBits remotePortNum); // used to implement "openConnection()"; result values are the same + char* createAuthenticatorString(char const* cmd, char const* url); + void handleRequestError(RequestRecord* request); + Boolean parseResponseCode(char const* line, unsigned& responseCode, char const*& responseString); + void handleIncomingRequest(); + static Boolean checkForHeader(char const* line, char const* headerName, unsigned headerNameLength, char const*& headerParams); + Boolean parseTransportParams(char const* paramsStr, + char*& serverAddressStr, portNumBits& serverPortNum, + unsigned char& rtpChannelId, unsigned char& rtcpChannelId); + Boolean parseScaleParam(char const* paramStr, float& scale); + Boolean parseRTPInfoParams(char const*& paramStr, u_int16_t& seqNum, u_int32_t& timestamp); + Boolean handleSETUPResponse(MediaSubsession& subsession, char const* sessionParamsStr, char const* transportParamsStr, + Boolean streamUsingTCP); + Boolean handlePLAYResponse(MediaSession& session, MediaSubsession& subsession, + char const* scaleParamsStr, char const* rangeParamsStr, char const* rtpInfoParamsStr); + Boolean handleTEARDOWNResponse(MediaSession& session, MediaSubsession& subsession); + Boolean handleGET_PARAMETERResponse(char const* parameterName, char*& resultValueString); + Boolean handleAuthenticationFailure(char const* wwwAuthenticateParamsStr); + Boolean resendCommand(RequestRecord* request); + char const* sessionURL(MediaSession const& session) const; + static void handleAlternativeRequestByte(void*, u_int8_t requestByte); + void handleAlternativeRequestByte1(u_int8_t requestByte); + void constructSubsessionURL(MediaSubsession const& subsession, + char const*& prefix, + char const*& separator, + char const*& suffix); + + // Support for tunneling RTSP-over-HTTP: + Boolean setupHTTPTunneling1(); // send the HTTP "GET" + static void responseHandlerForHTTP_GET(RTSPClient* rtspClient, int responseCode, char* responseString); + void responseHandlerForHTTP_GET1(int responseCode, char* responseString); + Boolean setupHTTPTunneling2(); // send the HTTP "POST" + + // Support for asynchronous connections to the server: + static void connectionHandler(void*, int /*mask*/); + void connectionHandler1(); + + // Support for handling data sent back by a server: + static void incomingDataHandler(void*, int /*mask*/); + void incomingDataHandler1(); + void handleResponseBytes(int newBytesRead); + +protected: + int fVerbosityLevel; + unsigned fCSeq; // sequence number, used in consecutive requests + Authenticator fCurrentAuthenticator; + netAddressBits fServerAddress; + +private: + portNumBits fTunnelOverHTTPPortNum; + char* fUserAgentHeaderStr; + unsigned fUserAgentHeaderStrLen; + int fInputSocketNum, fOutputSocketNum; + char* fBaseURL; + unsigned char fTCPStreamIdCount; // used for (optional) RTP/TCP + char* fLastSessionId; + unsigned fSessionTimeoutParameter; // optionally set in response "Session:" headers + char* fResponseBuffer; + unsigned fResponseBytesAlreadySeen, fResponseBufferBytesLeft; + RequestQueue fRequestsAwaitingConnection, fRequestsAwaitingHTTPTunneling, fRequestsAwaitingResponse; + + // Support for tunneling RTSP-over-HTTP: + char fSessionCookie[33]; + unsigned fSessionCookieCounter; + Boolean fHTTPTunnelingConnectionIsPending; +}; + + +////////// HandlerServerForREGISTERCommand ///////// + +// A simple server that creates a new "RTSPClient" object whenever a "REGISTER" request arrives (specifying the "rtsp://" URL +// of a stream). The new "RTSPClient" object will be created with the specified URL, and passed to the provided handler function. + +typedef void onRTSPClientCreationFunc(RTSPClient* newRTSPClient, Boolean requestStreamingOverTCP); + +class HandlerServerForREGISTERCommand: public RTSPServer { +public: + static HandlerServerForREGISTERCommand* createNew(UsageEnvironment& env, onRTSPClientCreationFunc* creationFunc, + Port ourPort = 0, UserAuthenticationDatabase* authDatabase = NULL, + int verbosityLevel = 0, char const* applicationName = NULL); + // If ourPort.num() == 0, we'll choose the port number ourself. (Use the following function to get it.) + portNumBits serverPortNum() const { return ntohs(fRTSPServerPort.num()); } + +protected: + HandlerServerForREGISTERCommand(UsageEnvironment& env, onRTSPClientCreationFunc* creationFunc, int ourSocket, Port ourPort, + UserAuthenticationDatabase* authDatabase, int verbosityLevel, char const* applicationName); + // called only by createNew(); + virtual ~HandlerServerForREGISTERCommand(); + + virtual RTSPClient* createNewRTSPClient(char const* rtspURL, int verbosityLevel, char const* applicationName, + int socketNumToServer); + // This function - by default - creates a (base) "RTSPClient" object. If you want to create a subclass + // of "RTSPClient" instead, then subclass this class, and redefine this virtual function. + +protected: // redefined virtual functions + virtual char const* allowedCommandNames(); // we support "OPTIONS" and "REGISTER" only + virtual Boolean weImplementREGISTER(char const* proxyURLSuffix, char*& responseStr); // redefined to return True + virtual void implementCmd_REGISTER(char const* url, char const* urlSuffix, int socketToRemoteServer, + Boolean deliverViaTCP, char const* proxyURLSuffix); + +private: + onRTSPClientCreationFunc* fCreationFunc; + int fVerbosityLevel; + char* fApplicationName; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/RTSPCommon.hh b/AnyCore/lib_rtsp/liveMedia/include/RTSPCommon.hh new file mode 100644 index 0000000..c529fa0 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/RTSPCommon.hh @@ -0,0 +1,67 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Common routines used by both RTSP clients and servers +// C++ header + +#ifndef _RTSP_COMMON_HH +#define _RTSP_COMMON_HH + +#ifndef _BOOLEAN_HH +#include "Boolean.hh" +#endif + +#ifndef _MEDIA_HH +#include // includes some definitions perhaps needed for Borland compilers? +#endif + +#if defined(__WIN32__) || defined(_WIN32) || defined(_QNX4) +#define _strncasecmp _strnicmp +#define snprintf _snprintf +#else +#define _strncasecmp strncasecmp +#endif + +#define RTSP_PARAM_STRING_MAX 200 + +Boolean parseRTSPRequestString(char const *reqStr, unsigned reqStrSize, + char *resultCmdName, + unsigned resultCmdNameMaxSize, + char* resultURLPreSuffix, + unsigned resultURLPreSuffixMaxSize, + char* resultURLSuffix, + unsigned resultURLSuffixMaxSize, + char* resultCSeq, + unsigned resultCSeqMaxSize, + char* resultSessionId, + unsigned resultSessionIdMaxSize, + unsigned& contentLength); + +Boolean parseRangeParam(char const* paramStr, double& rangeStart, double& rangeEnd, char*& absStartTime, char*& absEndTime, Boolean& startTimeIsNow); +Boolean parseRangeHeader(char const* buf, double& rangeStart, double& rangeEnd, char*& absStartTime, char*& absEndTime, Boolean& startTimeIsNow); + +Boolean parseScaleHeader(char const* buf, float& scale); + +Boolean RTSPOptionIsSupported(char const* commandName, char const* optionsResponseString); + // Returns True iff the RTSP command "commandName" is mentioned as one of the commands supported in "optionsResponseString" + // (which should be the 'resultString' from a previous RTSP "OPTIONS" request). + +char const* dateHeader(); // A "Date:" header that can be used in a RTSP (or HTTP) response + +void ignoreSigPipeOnSocket(int socketNum); + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/RTSPRegisterSender.hh b/AnyCore/lib_rtsp/liveMedia/include/RTSPRegisterSender.hh new file mode 100644 index 0000000..ca49c25 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/RTSPRegisterSender.hh @@ -0,0 +1,77 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A special object which, when created, sends a custom RTSP "REGISTER" command to a specified client. +// C++ header + +#ifndef _RTSP_REGISTER_SENDER_HH +#define _RTSP_REGISTER_SENDER_HH + +#ifndef _RTSP_CLIENT_HH +#include "RTSPClient.hh" +#endif + +class RTSPRegisterSender: public RTSPClient { +public: + static RTSPRegisterSender* + createNew(UsageEnvironment& env, + char const* remoteClientNameOrAddress, portNumBits remoteClientPortNum, char const* rtspURLToRegister, + RTSPClient::responseHandler* rtspResponseHandler, Authenticator* authenticator = NULL, + Boolean requestStreamingViaTCP = False, char const* proxyURLSuffix = NULL, Boolean reuseConnection = False, + int verbosityLevel = 0, char const* applicationName = NULL); + + void grabConnection(int& sock, struct sockaddr_in& remoteAddress); // so that the socket doesn't get closed when we're deleted + +protected: + RTSPRegisterSender(UsageEnvironment& env, + char const* remoteClientNameOrAddress, portNumBits remoteClientPortNum, char const* rtspURLToRegister, + RTSPClient::responseHandler* rtspResponseHandler, Authenticator* authenticator, + Boolean requestStreamingViaTCP, char const* proxyURLSuffix, Boolean reuseConnection, + int verbosityLevel, char const* applicationName); + // called only by "createNew()" + virtual ~RTSPRegisterSender(); + + // Redefined virtual functions: + virtual Boolean setRequestFields(RequestRecord* request, + char*& cmdURL, Boolean& cmdURLWasAllocated, + char const*& protocolStr, + char*& extraHeaders, Boolean& extraHeadersWereAllocated); + +public: // Some compilers complain if this is "protected:" + // A subclass of "RTSPClient::RequestRecord", specific to our "REGISTER" command: + class RequestRecord_REGISTER: public RTSPClient::RequestRecord { + public: + RequestRecord_REGISTER(unsigned cseq, RTSPClient::responseHandler* rtspResponseHandler, char const* rtspURLToRegister, + Boolean reuseConnection, Boolean requestStreamingViaTCP, char const* proxyURLSuffix); + virtual ~RequestRecord_REGISTER(); + + char const* rtspURLToRegister() const { return fRTSPURLToRegister; } + Boolean reuseConnection() const { return fReuseConnection; } + Boolean requestStreamingViaTCP() const { return fRequestStreamingViaTCP; } + char const* proxyURLSuffix() const { return fProxyURLSuffix; } + + private: + char* fRTSPURLToRegister; + Boolean fReuseConnection, fRequestStreamingViaTCP; + char* fProxyURLSuffix; + }; + +private: + portNumBits fRemoteClientPortNum; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/RTSPServer.hh b/AnyCore/lib_rtsp/liveMedia/include/RTSPServer.hh new file mode 100644 index 0000000..82dacd1 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/RTSPServer.hh @@ -0,0 +1,411 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A RTSP server +// C++ header + +#ifndef _RTSP_SERVER_HH +#define _RTSP_SERVER_HH + +#ifndef _SERVER_MEDIA_SESSION_HH +#include "ServerMediaSession.hh" +#endif +#ifndef _NET_ADDRESS_HH +#include +#endif +#ifndef _DIGEST_AUTHENTICATION_HH +#include "DigestAuthentication.hh" +#endif + +// A data structure used for optional user/password authentication: + +class UserAuthenticationDatabase { +public: + UserAuthenticationDatabase(char const* realm = NULL, + Boolean passwordsAreMD5 = False); + // If "passwordsAreMD5" is True, then each password stored into, or removed from, + // the database is actually the value computed + // by md5(::) + virtual ~UserAuthenticationDatabase(); + + virtual void addUserRecord(char const* username, char const* password); + virtual void removeUserRecord(char const* username); + + virtual char const* lookupPassword(char const* username); + // returns NULL if the user name was not present + + char const* realm() { return fRealm; } + Boolean passwordsAreMD5() { return fPasswordsAreMD5; } + +protected: + HashTable* fTable; + char* fRealm; + Boolean fPasswordsAreMD5; +}; + +#ifndef RTSP_BUFFER_SIZE +#define RTSP_BUFFER_SIZE 10000 // for incoming requests, and outgoing responses +#endif + +class RTSPServer: public Medium { +public: + static RTSPServer* createNew(UsageEnvironment& env, Port ourPort = 554, + UserAuthenticationDatabase* authDatabase = NULL, + unsigned reclamationTestSeconds = 65); + // If ourPort.num() == 0, we'll choose the port number + // Note: The caller is responsible for reclaiming "authDatabase" + // If "reclamationTestSeconds" > 0, then the "RTSPClientSession" state for + // each client will get reclaimed (and the corresponding RTP stream(s) + // torn down) if no RTSP commands - or RTCP "RR" packets - from the + // client are received in at least "reclamationTestSeconds" seconds. + + static Boolean lookupByName(UsageEnvironment& env, char const* name, + RTSPServer*& resultServer); + + void addServerMediaSession(ServerMediaSession* serverMediaSession); + + virtual ServerMediaSession* + lookupServerMediaSession(char const* streamName, Boolean isFirstLookupInSession = True); + + void removeServerMediaSession(ServerMediaSession* serverMediaSession); + // Removes the "ServerMediaSession" object from our lookup table, so it will no longer be accessible by new RTSP clients. + // (However, any *existing* RTSP client sessions that use this "ServerMediaSession" object will continue streaming. + // The "ServerMediaSession" object will not get deleted until all of these RTSP client sessions have closed.) + // (To both delete the "ServerMediaSession" object *and* close all RTSP client sessions that use it, + // call "deleteServerMediaSession(serverMediaSession)" instead.) + void removeServerMediaSession(char const* streamName); + // ditto + + void closeAllClientSessionsForServerMediaSession(ServerMediaSession* serverMediaSession); + // Closes (from the server) all RTSP client sessions that are currently using this "ServerMediaSession" object. + // Note, however, that the "ServerMediaSession" object remains accessible by new RTSP clients. + void closeAllClientSessionsForServerMediaSession(char const* streamName); + // ditto + + void deleteServerMediaSession(ServerMediaSession* serverMediaSession); + // Equivalent to: + // "closeAllClientSessionsForServerMediaSession(serverMediaSession); removeServerMediaSession(serverMediaSession);" + void deleteServerMediaSession(char const* streamName); + // Equivalent to: + // "closeAllClientSessionsForServerMediaSession(streamName); removeServerMediaSession(streamName); + + typedef void (responseHandlerForREGISTER)(RTSPServer* rtspServer, unsigned requestId, int resultCode, char* resultString); + unsigned registerStream(ServerMediaSession* serverMediaSession, + char const* remoteClientNameOrAddress, portNumBits remoteClientPortNum, + responseHandlerForREGISTER* responseHandler, + char const* username = NULL, char const* password = NULL, + Boolean receiveOurStreamViaTCP = False, + char const* proxyURLSuffix = NULL); + // 'Register' the stream represented by "serverMediaSession" with the given remote client (specifed by name and port number). + // This is done using our custom "REGISTER" RTSP command. + // The function returns a unique number that can be used to identify the request; this number is also passed to "responseHandler". + // When a response is received from the remote client (or the "REGISTER" request fails), the specified response handler + // (if non-NULL) is called. (Note that the "resultString" passed to the handler was dynamically allocated, + // and should be delete[]d by the handler after use.) + // If "receiveOurStreamViaTCP" is True, then we're requesting that the remote client access our stream using RTP/RTCP-over-TCP. + // (Otherwise, the remote client may choose regular RTP/RTCP-over-UDP streaming.) + // "proxyURLSuffix" (optional) is used only when the remote client is also a proxy server. + // It tells the proxy server the suffix that it should use in its "rtsp://" URL (when front-end clients access the stream) + + char* rtspURL(ServerMediaSession const* serverMediaSession, int clientSocket = -1) const; + // returns a "rtsp://" URL that could be used to access the + // specified session (which must already have been added to + // us using "addServerMediaSession()". + // This string is dynamically allocated; caller should delete[] + // (If "clientSocket" is non-negative, then it is used (by calling "getsockname()") to determine + // the IP address to be used in the URL.) + char* rtspURLPrefix(int clientSocket = -1) const; + // like "rtspURL()", except that it returns just the common prefix used by + // each session's "rtsp://" URL. + // This string is dynamically allocated; caller should delete[] + + UserAuthenticationDatabase* setAuthenticationDatabase(UserAuthenticationDatabase* newDB); + // Changes the server's authentication database to "newDB", returning a pointer to the old database (if there was one). + // "newDB" may be NULL (you can use this to disable authentication at runtime, if desired). + + void disableStreamingRTPOverTCP() { + fAllowStreamingRTPOverTCP = False; + } + + Boolean setUpTunnelingOverHTTP(Port httpPort); + // (Attempts to) enable RTSP-over-HTTP tunneling on the specified port. + // Returns True iff the specified port can be used in this way (i.e., it's not already being used for a separate HTTP server). + // Note: RTSP-over-HTTP tunneling is described in http://developer.apple.com/quicktime/icefloe/dispatch028.html + portNumBits httpServerPortNum() const; // in host byte order. (Returns 0 if not present.) + +protected: + RTSPServer(UsageEnvironment& env, + int ourSocket, Port ourPort, + UserAuthenticationDatabase* authDatabase, + unsigned reclamationTestSeconds); + // called only by createNew(); + virtual ~RTSPServer(); + + static int setUpOurSocket(UsageEnvironment& env, Port& ourPort); + + virtual char const* allowedCommandNames(); // used to implement "RTSPClientConnection::handleCmd_OPTIONS()" + virtual Boolean weImplementREGISTER(char const* proxyURLSuffix, char*& responseStr); + // used to implement "RTSPClientConnection::handleCmd_REGISTER()" + // Note: "responseStr" is dynamically allocated (or NULL), and should be delete[]d after the call + virtual void implementCmd_REGISTER(char const* url, char const* urlSuffix, int socketToRemoteServer, + Boolean deliverViaTCP, char const* proxyURLSuffix); + // used to implement "RTSPClientConnection::handleCmd_REGISTER()" + + virtual UserAuthenticationDatabase* getAuthenticationDatabaseForCommand(char const* cmdName); + virtual Boolean specialClientAccessCheck(int clientSocket, struct sockaddr_in& clientAddr, + char const* urlSuffix); + // a hook that allows subclassed servers to do server-specific access checking + // on each client (e.g., based on client IP address), without using digest authentication. + virtual Boolean specialClientUserAccessCheck(int clientSocket, struct sockaddr_in& clientAddr, + char const* urlSuffix, char const *username); + // another hook that allows subclassed servers to do server-specific access checking + // - this time after normal digest authentication has already taken place (and would otherwise allow access). + // (This test can only be used to further restrict access, not to grant additional access.) + +private: // redefined virtual functions + virtual Boolean isRTSPServer() const; + +public: // should be protected, but some old compilers complain otherwise + class RTSPClientSession; // forward + // The state of a TCP connection used by a RTSP client: + class RTSPClientConnection { + public: + // A data structure that's used to implement the "REGISTER" command: + class ParamsForREGISTER { + public: + ParamsForREGISTER(RTSPClientConnection* ourConnection, char const* url, char const* urlSuffix, + Boolean reuseConnection, Boolean deliverViaTCP, char const* proxyURLSuffix); + virtual ~ParamsForREGISTER(); + private: + friend class RTSPClientConnection; + RTSPClientConnection* fOurConnection; + char* fURL; + char* fURLSuffix; + Boolean fReuseConnection, fDeliverViaTCP; + char* fProxyURLSuffix; + }; + protected: + RTSPClientConnection(RTSPServer& ourServer, int clientSocket, struct sockaddr_in clientAddr); + virtual ~RTSPClientConnection(); + + friend class RTSPServer; + friend class RTSPClientSession; + // Make the handler functions for each command virtual, to allow subclasses to reimplement them, if necessary: + virtual void handleCmd_OPTIONS(); + // You probably won't need to subclass/reimplement this function; reimplement "RTSPServer::allowedCommandNames()" instead. + virtual void handleCmd_GET_PARAMETER(char const* fullRequestStr); // when operating on the entire server + virtual void handleCmd_SET_PARAMETER(char const* fullRequestStr); // when operating on the entire server + virtual void handleCmd_DESCRIBE(char const* urlPreSuffix, char const* urlSuffix, char const* fullRequestStr); + virtual void handleCmd_REGISTER(char const* url, char const* urlSuffix, char const* fullRequestStr, + Boolean reuseConnection, Boolean deliverViaTCP, char const* proxyURLSuffix); + // You probably won't need to subclass/reimplement this function; + // reimplement "RTSPServer::weImplementREGISTER()" and "RTSPServer::implementCmd_REGISTER()" instead. + virtual void handleCmd_bad(); + virtual void handleCmd_notSupported(); + virtual void handleCmd_notFound(); + virtual void handleCmd_sessionNotFound(); + virtual void handleCmd_unsupportedTransport(); + // Support for optional RTSP-over-HTTP tunneling: + virtual Boolean parseHTTPRequestString(char* resultCmdName, unsigned resultCmdNameMaxSize, + char* urlSuffix, unsigned urlSuffixMaxSize, + char* sessionCookie, unsigned sessionCookieMaxSize, + char* acceptStr, unsigned acceptStrMaxSize); + virtual void handleHTTPCmd_notSupported(); + virtual void handleHTTPCmd_notFound(); + virtual void handleHTTPCmd_OPTIONS(); + virtual void handleHTTPCmd_TunnelingGET(char const* sessionCookie); + virtual Boolean handleHTTPCmd_TunnelingPOST(char const* sessionCookie, unsigned char const* extraData, unsigned extraDataSize); + virtual void handleHTTPCmd_StreamingGET(char const* urlSuffix, char const* fullRequestStr); + protected: + UsageEnvironment& envir() { return fOurServer.envir(); } + void resetRequestBuffer(); + void closeSockets(); + static void incomingRequestHandler(void*, int /*mask*/); + void incomingRequestHandler1(); + static void handleAlternativeRequestByte(void*, u_int8_t requestByte); + void handleAlternativeRequestByte1(u_int8_t requestByte); + void handleRequestBytes(int newBytesRead); + Boolean authenticationOK(char const* cmdName, char const* urlSuffix, char const* fullRequestStr); + void changeClientInputSocket(int newSocketNum, unsigned char const* extraData, unsigned extraDataSize); + // used to implement RTSP-over-HTTP tunneling + static void continueHandlingREGISTER(ParamsForREGISTER* params); + virtual void continueHandlingREGISTER1(ParamsForREGISTER* params); + + // Shortcuts for setting up a RTSP response (prior to sending it): + void setRTSPResponse(char const* responseStr); + void setRTSPResponse(char const* responseStr, u_int32_t sessionId); + void setRTSPResponse(char const* responseStr, char const* contentStr); + void setRTSPResponse(char const* responseStr, u_int32_t sessionId, char const* contentStr); + + RTSPServer& fOurServer; + Boolean fIsActive; + int fClientInputSocket, fClientOutputSocket; + struct sockaddr_in fClientAddr; + unsigned char fRequestBuffer[RTSP_BUFFER_SIZE]; + unsigned fRequestBytesAlreadySeen, fRequestBufferBytesLeft; + unsigned char* fLastCRLF; + unsigned char fResponseBuffer[RTSP_BUFFER_SIZE]; + unsigned fRecursionCount; + char const* fCurrentCSeq; + Authenticator fCurrentAuthenticator; // used if access control is needed + char* fOurSessionCookie; // used for optional RTSP-over-HTTP tunneling + unsigned fBase64RemainderCount; // used for optional RTSP-over-HTTP tunneling (possible values: 0,1,2,3) + }; + + // The state of an individual client session (using one or more sequential TCP connections) handled by a RTSP server: + class RTSPClientSession { + protected: + RTSPClientSession(RTSPServer& ourServer, u_int32_t sessionId); + virtual ~RTSPClientSession(); + + friend class RTSPServer; + friend class RTSPClientConnection; + // Make the handler functions for each command virtual, to allow subclasses to redefine them: + virtual void handleCmd_SETUP(RTSPClientConnection* ourClientConnection, + char const* urlPreSuffix, char const* urlSuffix, char const* fullRequestStr); + virtual void handleCmd_withinSession(RTSPClientConnection* ourClientConnection, + char const* cmdName, + char const* urlPreSuffix, char const* urlSuffix, + char const* fullRequestStr); + virtual void handleCmd_TEARDOWN(RTSPClientConnection* ourClientConnection, + ServerMediaSubsession* subsession); + virtual void handleCmd_PLAY(RTSPClientConnection* ourClientConnection, + ServerMediaSubsession* subsession, char const* fullRequestStr); + virtual void handleCmd_PAUSE(RTSPClientConnection* ourClientConnection, + ServerMediaSubsession* subsession); + virtual void handleCmd_GET_PARAMETER(RTSPClientConnection* ourClientConnection, + ServerMediaSubsession* subsession, char const* fullRequestStr); + virtual void handleCmd_SET_PARAMETER(RTSPClientConnection* ourClientConnection, + ServerMediaSubsession* subsession, char const* fullRequestStr); + protected: + UsageEnvironment& envir() { return fOurServer.envir(); } + void reclaimStreamStates(); + Boolean isMulticast() const { return fIsMulticast; } + void noteLiveness(); + static void noteClientLiveness(RTSPClientSession* clientSession); + static void livenessTimeoutTask(RTSPClientSession* clientSession); + + // Shortcuts for setting up a RTSP response (prior to sending it): + void setRTSPResponse(RTSPClientConnection* ourClientConnection, char const* responseStr) { ourClientConnection->setRTSPResponse(responseStr); } + void setRTSPResponse(RTSPClientConnection* ourClientConnection, char const* responseStr, u_int32_t sessionId) { ourClientConnection->setRTSPResponse(responseStr, sessionId); } + void setRTSPResponse(RTSPClientConnection* ourClientConnection, char const* responseStr, char const* contentStr) { ourClientConnection->setRTSPResponse(responseStr, contentStr); } + void setRTSPResponse(RTSPClientConnection* ourClientConnection, char const* responseStr, u_int32_t sessionId, char const* contentStr) { ourClientConnection->setRTSPResponse(responseStr, sessionId, contentStr); } + + protected: + RTSPServer& fOurServer; + u_int32_t fOurSessionId; + ServerMediaSession* fOurServerMediaSession; + Boolean fIsMulticast, fStreamAfterSETUP; + unsigned char fTCPStreamIdCount; // used for (optional) RTP/TCP + Boolean usesTCPTransport() const { return fTCPStreamIdCount > 0; } + TaskToken fLivenessCheckTask; + unsigned fNumStreamStates; + struct streamState { + ServerMediaSubsession* subsession; + void* streamToken; + } * fStreamStates; + }; + +protected: + // If you subclass "RTSPClientConnection", then you must also redefine this virtual function in order + // to create new objects of your subclass: + virtual RTSPClientConnection* + createNewClientConnection(int clientSocket, struct sockaddr_in clientAddr); + + // If you subclass "RTSPClientSession", then you must also redefine this virtual function in order + // to create new objects of your subclass: + virtual RTSPClientSession* + createNewClientSession(u_int32_t sessionId); + + // An iterator over our "ServerMediaSession" objects: + class ServerMediaSessionIterator { + public: + ServerMediaSessionIterator(RTSPServer& server); + virtual ~ServerMediaSessionIterator(); + ServerMediaSession* next(); + private: + HashTable::Iterator* fOurIterator; + }; + +private: + static void incomingConnectionHandlerRTSP(void*, int /*mask*/); + void incomingConnectionHandlerRTSP1(); + + static void incomingConnectionHandlerHTTP(void*, int /*mask*/); + void incomingConnectionHandlerHTTP1(); + + void incomingConnectionHandler(int serverSocket); + +protected: + Port fRTSPServerPort; + +private: + friend class RTSPClientConnection; + friend class RTSPClientSession; + friend class ServerMediaSessionIterator; + friend class RegisterRequestRecord; + int fRTSPServerSocket; + int fHTTPServerSocket; // for optional RTSP-over-HTTP tunneling + Port fHTTPServerPort; // ditto + HashTable* fServerMediaSessions; // maps 'stream name' strings to "ServerMediaSession" objects + HashTable* fClientConnections; // the "ClientConnection" objects that we're using + HashTable* fClientConnectionsForHTTPTunneling; // maps client-supplied 'session cookie' strings to "RTSPClientConnection"s + // (used only for optional RTSP-over-HTTP tunneling) + HashTable* fClientSessions; // maps 'session id' strings to "RTSPClientSession" objects + HashTable* fPendingRegisterRequests; + unsigned fRegisterRequestCounter; + UserAuthenticationDatabase* fAuthDB; + unsigned fReclamationTestSeconds; + Boolean fAllowStreamingRTPOverTCP; // by default, True +}; + + +////////// A subclass of "RTSPServer" that implements the "REGISTER" command to set up proxying on the specified URL ////////// + +class RTSPServerWithREGISTERProxying: public RTSPServer { +public: + static RTSPServerWithREGISTERProxying* createNew(UsageEnvironment& env, Port ourPort = 554, + UserAuthenticationDatabase* authDatabase = NULL, + UserAuthenticationDatabase* authDatabaseForREGISTER = NULL, + unsigned reclamationTestSeconds = 65, + Boolean streamRTPOverTCP = False, + int verbosityLevelForProxying = 0); + +protected: + RTSPServerWithREGISTERProxying(UsageEnvironment& env, int ourSocket, Port ourPort, + UserAuthenticationDatabase* authDatabase, UserAuthenticationDatabase* authDatabaseForREGISTER, + unsigned reclamationTestSeconds, + Boolean streamRTPOverTCP, int verbosityLevelForProxying); + // called only by createNew(); + virtual ~RTSPServerWithREGISTERProxying(); + +protected: // redefined virtual functions + virtual char const* allowedCommandNames(); + virtual Boolean weImplementREGISTER(char const* proxyURLSuffix, char*& responseStr); + virtual void implementCmd_REGISTER(char const* url, char const* urlSuffix, int socketToRemoteServer, + Boolean deliverViaTCP, char const* proxyURLSuffix); + virtual UserAuthenticationDatabase* getAuthenticationDatabaseForCommand(char const* cmdName); + +private: + Boolean fStreamRTPOverTCP; + int fVerbosityLevelForProxying; + unsigned fRegisteredProxyCounter; + char* fAllowedCommandNames; + UserAuthenticationDatabase* fAuthDBForREGISTER; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/RTSPServerSupportingHTTPStreaming.hh b/AnyCore/lib_rtsp/liveMedia/include/RTSPServerSupportingHTTPStreaming.hh new file mode 100644 index 0000000..1bf8920 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/RTSPServerSupportingHTTPStreaming.hh @@ -0,0 +1,72 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A server that supports both RTSP, and HTTP streaming (using Apple's "HTTP Live Streaming" protocol) +// C++ header + +#ifndef _RTSP_SERVER_SUPPORTING_HTTP_STREAMING_HH +#define _RTSP_SERVER_SUPPORTING_HTTP_STREAMING_HH + +#ifndef _RTSP_SERVER_HH +#include "RTSPServer.hh" +#endif +#ifndef _BYTE_STREAM_MEMORY_BUFFER_SOURCE_HH +#include "ByteStreamMemoryBufferSource.hh" +#endif +#ifndef _TCP_STREAM_SINK_HH +#include "TCPStreamSink.hh" +#endif + +class RTSPServerSupportingHTTPStreaming: public RTSPServer { +public: + static RTSPServerSupportingHTTPStreaming* createNew(UsageEnvironment& env, Port rtspPort = 554, + UserAuthenticationDatabase* authDatabase = NULL, + unsigned reclamationTestSeconds = 65); + + Boolean setHTTPPort(Port httpPort) { return setUpTunnelingOverHTTP(httpPort); } + +protected: + RTSPServerSupportingHTTPStreaming(UsageEnvironment& env, + int ourSocket, Port ourPort, + UserAuthenticationDatabase* authDatabase, + unsigned reclamationTestSeconds); + // called only by createNew(); + virtual ~RTSPServerSupportingHTTPStreaming(); + +protected: // redefined virtual functions + virtual RTSPClientConnection* createNewClientConnection(int clientSocket, struct sockaddr_in clientAddr); + +public: // should be protected, but some old compilers complain otherwise + class RTSPClientConnectionSupportingHTTPStreaming: public RTSPServer::RTSPClientConnection { + public: + RTSPClientConnectionSupportingHTTPStreaming(RTSPServer& ourServer, int clientSocket, struct sockaddr_in clientAddr); + virtual ~RTSPClientConnectionSupportingHTTPStreaming(); + + protected: // redefined virtual functions + virtual void handleHTTPCmd_StreamingGET(char const* urlSuffix, char const* fullRequestStr); + + protected: + static void afterStreaming(void* clientData); + + private: + u_int32_t fClientSessionId; + ByteStreamMemoryBufferSource* fPlaylistSource; + TCPStreamSink* fTCPSink; + }; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/SIPClient.hh b/AnyCore/lib_rtsp/liveMedia/include/SIPClient.hh new file mode 100644 index 0000000..4a9ef2c --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/SIPClient.hh @@ -0,0 +1,149 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A generic SIP client +// C++ header + +#ifndef _SIP_CLIENT_HH +#define _SIP_CLIENT_HH + +#ifndef _MEDIA_SESSION_HH +#include "MediaSession.hh" +#endif +#ifndef _NET_ADDRESS_HH +#include "NetAddress.hh" +#endif +#ifndef _DIGEST_AUTHENTICATION_HH +#include "DigestAuthentication.hh" +#endif + +// Possible states in the "INVITE" transition diagram (RFC 3261, Figure 5) +enum inviteClientState { Calling, Proceeding, Completed, Terminated }; + +class SIPClient: public Medium { +public: + static SIPClient* createNew(UsageEnvironment& env, + unsigned char desiredAudioRTPPayloadFormat, + char const* mimeSubtype = NULL, + int verbosityLevel = 0, + char const* applicationName = NULL); + + void setProxyServer(unsigned proxyServerAddress, + portNumBits proxyServerPortNum); + + void setClientStartPortNum(portNumBits clientStartPortNum) { + fClientStartPortNum = clientStartPortNum; + } + + char* invite(char const* url, Authenticator* authenticator = NULL); + // Issues a SIP "INVITE" command + // Returns the session SDP description if this command succeeds + char* inviteWithPassword(char const* url, + char const* username, char const* password); + // Uses "invite()" to do an "INVITE" - first + // without using "password", then (if we get an Unauthorized + // response) with an authentication response computed from "password" + + Boolean sendACK(); // on current call + Boolean sendBYE(); // on current call + + static Boolean parseSIPURL(UsageEnvironment& env, char const* url, + NetAddress& address, portNumBits& portNum); + // (ignores any "[:]@" in "url") + static Boolean parseSIPURLUsernamePassword(char const* url, + char*& username, + char*& password); + char const* getInviteSdpReply() const { return fInviteSDPDescriptionReturned; } + + void setUserAgentString(char const* userAgentName); + // sets an alternative string to be used in SIP "User-Agent:" headers + +protected: + virtual ~SIPClient(); + +private: + SIPClient(UsageEnvironment& env, + unsigned char desiredAudioRTPPayloadFormat, + char const* mimeSubtype, + int verbosityLevel, + char const* applicationName); + // called only by createNew(); + + void reset(); + + // Routines used to implement invite*(): + char* invite1(Authenticator* authenticator); + Boolean processURL(char const* url); + Boolean sendINVITE(); + static void inviteResponseHandler(void* clientData, int mask); + void doInviteStateMachine(unsigned responseCode); + void doInviteStateTerminated(unsigned responseCode); + TaskToken fTimerA, fTimerB, fTimerD; + static void timerAHandler(void* clientData); + static void timerBHandler(void* clientData); + static void timerDHandler(void* clientData); + unsigned const fT1; // in microseconds + unsigned fTimerALen; // in microseconds; initially fT1, then doubles + unsigned fTimerACount; + + // Routines used to implement all commands: + char* createAuthenticatorString(Authenticator const* authenticator, + char const* cmd, char const* url); + Boolean sendRequest(char const* requestString, unsigned requestLength); + unsigned getResponseCode(); + unsigned getResponse(char*& responseBuffer, unsigned responseBufferSize); + Boolean parseResponseCode(char const* line, unsigned& responseCode); + +private: + // Set for all calls: + unsigned char fDesiredAudioRTPPayloadFormat; + char* fMIMESubtype; + unsigned fMIMESubtypeSize; + int fVerbosityLevel; + unsigned fCSeq; // sequence number, used in consecutive requests + char const* fApplicationName; + unsigned fApplicationNameSize; + char const* fOurAddressStr; + unsigned fOurAddressStrSize; + portNumBits fOurPortNum; + Groupsock* fOurSocket; + char* fUserAgentHeaderStr; + unsigned fUserAgentHeaderStrLen; + + // Set for each call: + char const* fURL; + unsigned fURLSize; + struct in_addr fServerAddress; + portNumBits fServerPortNum; // in host order + portNumBits fClientStartPortNum; // in host order + unsigned fCallId, fFromTag; // set by us + char const* fToTagStr; // set by the responder + unsigned fToTagStrSize; + Authenticator fValidAuthenticator; + char const* fUserName; // 'user' name used in "From:" & "Contact:" lines + unsigned fUserNameSize; + + char* fInviteSDPDescription; + char* fInviteSDPDescriptionReturned; + char* fInviteCmd; + unsigned fInviteCmdSize; + Authenticator* fWorkingAuthenticator; + inviteClientState fInviteClientState; + char fEventLoopStopFlag; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/ServerMediaSession.hh b/AnyCore/lib_rtsp/liveMedia/include/ServerMediaSession.hh new file mode 100644 index 0000000..0329e48 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/ServerMediaSession.hh @@ -0,0 +1,201 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A data structure that represents a session that consists of +// potentially multiple (audio and/or video) sub-sessions +// (This data structure is used for media *streamers* - i.e., servers. +// For media receivers, use "MediaSession" instead.) +// C++ header + +#ifndef _SERVER_MEDIA_SESSION_HH +#define _SERVER_MEDIA_SESSION_HH + +#ifndef _MEDIA_HH +#include "Media.hh" +#endif +#ifndef _FRAMED_SOURCE_HH +#include "FramedSource.hh" +#endif +#ifndef _GROUPEID_HH +#include "GroupEId.hh" +#endif +#ifndef _RTP_INTERFACE_HH +#include "RTPInterface.hh" // for ServerRequestAlternativeByteHandler +#endif + +class ServerMediaSubsession; // forward + +class ServerMediaSession: public Medium { +public: + static ServerMediaSession* createNew(UsageEnvironment& env, + char const* streamName = NULL, + char const* info = NULL, + char const* description = NULL, + Boolean isSSM = False, + char const* miscSDPLines = NULL); + + static Boolean lookupByName(UsageEnvironment& env, + char const* mediumName, + ServerMediaSession*& resultSession); + + char* generateSDPDescription(); // based on the entire session + // Note: The caller is responsible for freeing the returned string + + char const* streamName() const { return fStreamName; } + + Boolean addSubsession(ServerMediaSubsession* subsession); + unsigned numSubsessions() const { return fSubsessionCounter; } + + void testScaleFactor(float& scale); // sets "scale" to the actual supported scale + float duration() const; + // a result == 0 means an unbounded session (the default) + // a result < 0 means: subsession durations differ; the result is -(the largest). + // a result > 0 means: this is the duration of a bounded session + + unsigned referenceCount() const { return fReferenceCount; } + void incrementReferenceCount() { ++fReferenceCount; } + void decrementReferenceCount() { if (fReferenceCount > 0) --fReferenceCount; } + Boolean& deleteWhenUnreferenced() { return fDeleteWhenUnreferenced; } + + void deleteAllSubsessions(); + // Removes and deletes all subsessions added by "addSubsession()", returning us to an 'empty' state + // Note: If you have already added this "ServerMediaSession" to a "RTSPServer" then, before calling this function, + // you must first close any client connections that use it, + // by calling "RTSPServer::closeAllClientSessionsForServerMediaSession()". + +protected: + ServerMediaSession(UsageEnvironment& env, char const* streamName, + char const* info, char const* description, + Boolean isSSM, char const* miscSDPLines); + // called only by "createNew()" + + virtual ~ServerMediaSession(); + +private: // redefined virtual functions + virtual Boolean isServerMediaSession() const; + +private: + Boolean fIsSSM; + + // Linkage fields: + friend class ServerMediaSubsessionIterator; + ServerMediaSubsession* fSubsessionsHead; + ServerMediaSubsession* fSubsessionsTail; + unsigned fSubsessionCounter; + + char* fStreamName; + char* fInfoSDPString; + char* fDescriptionSDPString; + char* fMiscSDPLines; + struct timeval fCreationTime; + unsigned fReferenceCount; + Boolean fDeleteWhenUnreferenced; +}; + + +class ServerMediaSubsessionIterator { +public: + ServerMediaSubsessionIterator(ServerMediaSession& session); + virtual ~ServerMediaSubsessionIterator(); + + ServerMediaSubsession* next(); // NULL if none + void reset(); + +private: + ServerMediaSession& fOurSession; + ServerMediaSubsession* fNextPtr; +}; + + +class ServerMediaSubsession: public Medium { +public: + unsigned trackNumber() const { return fTrackNumber; } + char const* trackId(); + virtual char const* sdpLines() = 0; + virtual void getStreamParameters(unsigned clientSessionId, // in + netAddressBits clientAddress, // in + Port const& clientRTPPort, // in + Port const& clientRTCPPort, // in + int tcpSocketNum, // in (-1 means use UDP, not TCP) + unsigned char rtpChannelId, // in (used if TCP) + unsigned char rtcpChannelId, // in (used if TCP) + netAddressBits& destinationAddress, // in out + u_int8_t& destinationTTL, // in out + Boolean& isMulticast, // out + Port& serverRTPPort, // out + Port& serverRTCPPort, // out + void*& streamToken // out + ) = 0; + virtual void startStream(unsigned clientSessionId, void* streamToken, + TaskFunc* rtcpRRHandler, + void* rtcpRRHandlerClientData, + unsigned short& rtpSeqNum, + unsigned& rtpTimestamp, + ServerRequestAlternativeByteHandler* serverRequestAlternativeByteHandler, + void* serverRequestAlternativeByteHandlerClientData) = 0; + virtual void pauseStream(unsigned clientSessionId, void* streamToken); + virtual void seekStream(unsigned clientSessionId, void* streamToken, double& seekNPT, + double streamDuration, u_int64_t& numBytes); + // This routine is used to seek by relative (i.e., NPT) time. + // "streamDuration", if >0.0, specifies how much data to stream, past "seekNPT". (If <=0.0, all remaining data is streamed.) + // "numBytes" returns the size (in bytes) of the data to be streamed, or 0 if unknown or unlimited. + virtual void seekStream(unsigned clientSessionId, void* streamToken, char*& absStart, char*& absEnd); + // This routine is used to seek by 'absolute' time. + // "absStart" should be a string of the form "YYYYMMDDTHHMMSSZ" or "YYYYMMDDTHHMMSS.Z". + // "absEnd" should be either NULL (for no end time), or a string of the same form as "absStart". + // These strings may be modified in-place, or can be reassigned to a newly-allocated value (after delete[]ing the original). + virtual void nullSeekStream(unsigned clientSessionId, void* streamToken, + double streamEndTime, u_int64_t& numBytes); + // Called whenever we're handling a "PLAY" command without a specified start time. + virtual void setStreamScale(unsigned clientSessionId, void* streamToken, float scale); + virtual float getCurrentNPT(void* streamToken); + virtual FramedSource* getStreamSource(void* streamToken); + virtual void deleteStream(unsigned clientSessionId, void*& streamToken); + + virtual void testScaleFactor(float& scale); // sets "scale" to the actual supported scale + virtual float duration() const; + // returns 0 for an unbounded session (the default) + // returns > 0 for a bounded session + virtual void getAbsoluteTimeRange(char*& absStartTime, char*& absEndTime) const; + // Subclasses can reimplement this iff they support seeking by 'absolute' time. + + // The following may be called by (e.g.) SIP servers, for which the + // address and port number fields in SDP descriptions need to be non-zero: + void setServerAddressAndPortForSDP(netAddressBits addressBits, + portNumBits portBits); + +protected: // we're a virtual base class + ServerMediaSubsession(UsageEnvironment& env); + virtual ~ServerMediaSubsession(); + + char const* rangeSDPLine() const; + // returns a string to be delete[]d + + ServerMediaSession* fParentSession; + netAddressBits fServerAddressForSDP; + portNumBits fPortNumForSDP; + +private: + friend class ServerMediaSession; + friend class ServerMediaSubsessionIterator; + ServerMediaSubsession* fNext; + + unsigned fTrackNumber; // within an enclosing ServerMediaSession + char const* fTrackId; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/SimpleRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/SimpleRTPSink.hh new file mode 100644 index 0000000..808fc4a --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/SimpleRTPSink.hh @@ -0,0 +1,76 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A simple RTP sink that packs frames into each outgoing +// packet, without any fragmentation or special headers. +// C++ header + +#ifndef _SIMPLE_RTP_SINK_HH +#define _SIMPLE_RTP_SINK_HH + +#ifndef _MULTI_FRAMED_RTP_SINK_HH +#include "MultiFramedRTPSink.hh" +#endif + +class SimpleRTPSink: public MultiFramedRTPSink { +public: + static SimpleRTPSink* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency, + char const* sdpMediaTypeString, + char const* rtpPayloadFormatName, + unsigned numChannels = 1, + Boolean allowMultipleFramesPerPacket = True, + Boolean doNormalMBitRule = True); + // "doNormalMBitRule" means: If the medium (i.e., "sdpMediaTypeString") is other than "audio", set the RTP "M" bit + // on each outgoing packet iff it contains the last (or only) fragment of a frame. + // Otherwise (i.e., if "doNormalMBitRule" is False, or the medium is "audio"), leave the "M" bit unset. + + void setMBitOnNextPacket() { fSetMBitOnNextPacket = True; } // hack for optionally setting the RTP 'M' bit from outside the class + +protected: + SimpleRTPSink(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency, + char const* sdpMediaTypeString, + char const* rtpPayloadFormatName, + unsigned numChannels, + Boolean allowMultipleFramesPerPacket, + Boolean doNormalMBitRule); + // called only by createNew() + + virtual ~SimpleRTPSink(); + +protected: // redefined virtual functions + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + virtual + Boolean frameCanAppearAfterPacketStart(unsigned char const* frameStart, + unsigned numBytesInFrame) const; + virtual char const* sdpMediaType() const; + +private: + char const* fSDPMediaTypeString; + Boolean fAllowMultipleFramesPerPacket; + Boolean fSetMBitOnLastFrames, fSetMBitOnNextPacket; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/SimpleRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/SimpleRTPSource.hh new file mode 100644 index 0000000..1c4194a --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/SimpleRTPSource.hh @@ -0,0 +1,67 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A RTP source for a simple RTP payload format that +// - doesn't have any special headers following the RTP header +// (if necessary, the "offset" parameter can be used to specify a +// special header that we just skip over) +// - doesn't have any special framing apart from the packet data itself +// C++ header + +#ifndef _SIMPLE_RTP_SOURCE_HH +#define _SIMPLE_RTP_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +class SimpleRTPSource: public MultiFramedRTPSource { +public: + static SimpleRTPSource* createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency, + char const* mimeTypeString, + unsigned offset = 0, + Boolean doNormalMBitRule = True); + // "doNormalMBitRule" means: If the medium is not audio, use the RTP "M" + // bit on each incoming packet to indicate the last (or only) fragment + // of a frame. Otherwise (i.e., if "doNormalMBitRule" is False, or the medium is "audio"), the "M" bit is ignored. + +protected: + virtual ~SimpleRTPSource(); + +protected: + SimpleRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency, + char const* mimeTypeString, unsigned offset, + Boolean doNormalMBitRule); + // called only by createNew() + +private: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual char const* MIMEtype() const; + +private: + char const* fMIMEtypeString; + unsigned fOffset; + Boolean fUseMBitForFrameEnd; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/StreamReplicator.hh b/AnyCore/lib_rtsp/liveMedia/include/StreamReplicator.hh new file mode 100644 index 0000000..2fdc3a7 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/StreamReplicator.hh @@ -0,0 +1,84 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// An class that can be used to create (possibly multiple) 'replicas' of an incoming stream. +// C++ header + +#ifndef _STREAM_REPLICATOR_HH +#define _STREAM_REPLICATOR_HH + +#ifndef _FRAMED_SOURCE_HH +#include "FramedSource.hh" +#endif + +class StreamReplica; // forward + +class StreamReplicator: public Medium { +public: + static StreamReplicator* createNew(UsageEnvironment& env, FramedSource* inputSource, Boolean deleteWhenLastReplicaDies = True); + // If "deleteWhenLastReplicaDies" is True (the default), then the "StreamReplicator" object is deleted when (and only when) + // all replicas have been deleted. (In this case, you must *not* call "Medium::close()" on the "StreamReplicator" object, + // unless you never created any replicas from it to begin with.) + // If "deleteWhenLastReplicaDies" is False, then the "StreamReplicator" object remains in existence, even when all replicas + // have been deleted. (This allows you to create new replicas later, if you wish.) In this case, you delete the + // "StreamReplicator" object by calling "Medium::close()" on it - but you must do so only when "numReplicas()" returns 0. + + FramedSource* createStreamReplica(); + + unsigned numReplicas() const { return fNumReplicas; } + + FramedSource* inputSource() const { return fInputSource; } + + // Call before destruction if you want to prevent the destructor from closing the input source + void detachInputSource() { fInputSource = NULL; } + +protected: + StreamReplicator(UsageEnvironment& env, FramedSource* inputSource, Boolean deleteWhenLastReplicaDies); + // called only by "createNew()" + virtual ~StreamReplicator(); + +private: + // Routines called by replicas to implement frame delivery, and the stopping/restarting/deletion of replicas: + friend class StreamReplica; + void getNextFrame(StreamReplica* replica); + void deactivateStreamReplica(StreamReplica* replica); + void removeStreamReplica(StreamReplica* replica); + +private: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, + struct timeval presentationTime, unsigned durationInMicroseconds); + + static void onSourceClosure(void* clientData); + void onSourceClosure(); + + void deliverReceivedFrame(); + +private: + FramedSource* fInputSource; + Boolean fDeleteWhenLastReplicaDies, fInputSourceHasClosed; + unsigned fNumReplicas, fNumActiveReplicas, fNumDeliveriesMadeSoFar; + int fFrameIndex; // 0 or 1; used to figure out if a replica is requesting the current frame, or the next frame + + StreamReplica* fMasterReplica; // the first replica that requests each frame. We use its buffer when copying to the others. + StreamReplica* fReplicasAwaitingCurrentFrame; // other than the 'master' replica + StreamReplica* fReplicasAwaitingNextFrame; // replicas that have already received the current frame, and have asked for the next +}; +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/T140TextRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/T140TextRTPSink.hh new file mode 100644 index 0000000..6a3d178 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/T140TextRTPSink.hh @@ -0,0 +1,103 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for T.140 text (RFC 2793) +// C++ header + +#ifndef _T140_TEXT_RTP_SINK_HH +#define _T140_TEXT_RTP_SINK_HH + +#ifndef _TEXT_RTP_SINK_HH +#include "TextRTPSink.hh" +#endif +#ifndef _FRAMED_FILTER_HH +#include "FramedFilter.hh" +#endif + +class T140IdleFilter; + +class T140TextRTPSink: public TextRTPSink { +public: + static T140TextRTPSink* createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat); + +protected: + T140TextRTPSink(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat); + // called only by createNew() + + virtual ~T140TextRTPSink(); + +protected: // redefined virtual functions: + virtual Boolean continuePlaying(); + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + virtual Boolean frameCanAppearAfterPacketStart(unsigned char const* frameStart, + unsigned numBytesInFrame) const; + +protected: + T140IdleFilter* fOurIdleFilter; + Boolean fAreInIdlePeriod; +}; + + +////////// T140IdleFilter definition ////////// + +// Because the T.140 text RTP payload format specification recommends that (empty) RTP packets be sent during 'idle periods' +// when no new text is available, we implement "T140TextRTPSink" using a separate "T140IdleFilter" class - sitting in front +// - that delivers, to the "T140TextRTPSink", a continuous sequence of (possibly) empty frames. +// (Note: This class should be used only by "T140TextRTPSink", or a subclass.) + +class T140IdleFilter: public FramedFilter { +public: + T140IdleFilter(UsageEnvironment& env, FramedSource* inputSource); + virtual ~T140IdleFilter(); + +private: // redefined virtual functions: + virtual void doGetNextFrame(); + virtual void doStopGettingFrames(); + +private: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + + static void handleIdleTimeout(void* clientData); + void handleIdleTimeout(); + + void deliverFromBuffer(); + void deliverEmptyFrame(); + + static void onSourceClosure(void* clientData); + void onSourceClosure(); + +private: + TaskToken fIdleTimerTask; + unsigned fBufferSize, fNumBufferedBytes; + char* fBuffer; + unsigned fBufferedNumTruncatedBytes; // a count of truncated bytes from the upstream + struct timeval fBufferedDataPresentationTime; + unsigned fBufferedDataDurationInMicroseconds; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/TCPStreamSink.hh b/AnyCore/lib_rtsp/liveMedia/include/TCPStreamSink.hh new file mode 100644 index 0000000..c039d13 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/TCPStreamSink.hh @@ -0,0 +1,67 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A sink representing a TCP output stream +// C++ header + +#ifndef _TCP_STREAM_SINK_HH +#define _TCP_STREAM_SINK_HH + +#ifndef _MEDIA_SINK_HH +#include "MediaSink.hh" +#endif + +#define TCP_STREAM_SINK_BUFFER_SIZE 10000 + +class TCPStreamSink: public MediaSink { +public: + static TCPStreamSink* createNew(UsageEnvironment& env, int socketNum); + // "socketNum" is the socket number of an existing, writable TCP socket (which should be non-blocking). + // The caller is responsible for closing this socket later (when this object no longer exists). + +protected: + TCPStreamSink(UsageEnvironment& env, int socketNum); // called only by "createNew()" + virtual ~TCPStreamSink(); + +protected: + // Redefined virtual functions: + virtual Boolean continuePlaying(); + +private: + void processBuffer(); // common routine, called from both the 'socket writable' and 'incoming data' handlers below + + static void socketWritableHandler(void* clientData, int mask); + void socketWritableHandler1(); + + static void afterGettingFrame(void* clientData, unsigned frameSize, unsigned numTruncatedBytes, + struct timeval /*presentationTime*/, unsigned /*durationInMicroseconds*/); + void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes); + + static void ourOnSourceClosure(void* clientData); + void ourOnSourceClosure1(); + + unsigned numUnwrittenBytes() const { return fUnwrittenBytesEnd - fUnwrittenBytesStart; } + unsigned freeBufferSpace() const { return TCP_STREAM_SINK_BUFFER_SIZE - fUnwrittenBytesEnd; } + +private: + unsigned char fBuffer[TCP_STREAM_SINK_BUFFER_SIZE]; + unsigned fUnwrittenBytesStart, fUnwrittenBytesEnd; + Boolean fInputSourceIsOpen, fOutputSocketIsWritable; + int fOutputSocketNum; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/TextRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/TextRTPSink.hh new file mode 100644 index 0000000..32678ce --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/TextRTPSink.hh @@ -0,0 +1,41 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A generic RTP sink for text codecs (abstract base class) +// C++ header + +#ifndef _TEXT_RTP_SINK_HH +#define _TEXT_RTP_SINK_HH + +#ifndef _MULTI_FRAMED_RTP_SINK_HH +#include "MultiFramedRTPSink.hh" +#endif + +class TextRTPSink: public MultiFramedRTPSink { +protected: + TextRTPSink(UsageEnvironment& env, + Groupsock* rtpgs, unsigned char rtpPayloadType, + unsigned rtpTimestampFrequency, + char const* rtpPayloadFormatName); + // (we're an abstract base class) + virtual ~TextRTPSink(); + +private: // redefined virtual functions: + virtual char const* sdpMediaType() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/TheoraVideoRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/TheoraVideoRTPSink.hh new file mode 100644 index 0000000..53207f8 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/TheoraVideoRTPSink.hh @@ -0,0 +1,72 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for Theora video +// C++ header + +#ifndef _THEORA_VIDEO_RTP_SINK_HH +#define _THEORA_VIDEO_RTP_SINK_HH + +#ifndef _VIDEO_RTP_SINK_HH +#include "VideoRTPSink.hh" +#endif + +class TheoraVideoRTPSink: public VideoRTPSink { +public: + static TheoraVideoRTPSink* + createNew(UsageEnvironment& env, Groupsock* RTPgs, u_int8_t rtpPayloadFormat, + // The following headers provide the 'configuration' information, for the SDP description: + u_int8_t* identificationHeader, unsigned identificationHeaderSize, + u_int8_t* commentHeader, unsigned commentHeaderSize, + u_int8_t* setupHeader, unsigned setupHeaderSize, + u_int32_t identField = 0xFACADE); + + static TheoraVideoRTPSink* + createNew(UsageEnvironment& env, Groupsock* RTPgs, u_int8_t rtpPayloadFormat, + char const* configStr); + // an optional variant of "createNew()" that takes a Base-64-encoded 'configuration' string, + // rather than the raw configuration headers as parameter. + +protected: + TheoraVideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs, + u_int8_t rtpPayloadFormat, + u_int8_t* identificationHeader, unsigned identificationHeaderSize, + u_int8_t* commentHeader, unsigned commentHeaderSize, + u_int8_t* setupHeader, unsigned setupHeaderSize, + u_int32_t identField); + // called only by createNew() + + virtual ~TheoraVideoRTPSink(); + +private: // redefined virtual functions: + virtual char const* auxSDPLine(); // for the "a=fmtp:" SDP line + + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + virtual Boolean frameCanAppearAfterPacketStart(unsigned char const* frameStart, + unsigned numBytesInFrame) const; + virtual unsigned specialHeaderSize() const; + +private: + u_int32_t fIdent; // "Ident" field used by this stream. (Only the low 24 bits of this are used.) + char* fFmtpSDPLine; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/TheoraVideoRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/TheoraVideoRTPSource.hh new file mode 100644 index 0000000..7cbb047 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/TheoraVideoRTPSource.hh @@ -0,0 +1,53 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Theora Video Audio RTP Sources +// C++ header + +#ifndef _THEORA_VIDEO_RTP_SOURCE_HH +#define _THEORA_VIDEO_RTP_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +class TheoraVideoRTPSource: public MultiFramedRTPSource { +public: + static TheoraVideoRTPSource* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat); + + u_int32_t curPacketIdent() const { return fCurPacketIdent; } // The current "Ident" field; only the low-order 24 bits are used + +protected: + TheoraVideoRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat); + // called only by createNew() + + virtual ~TheoraVideoRTPSource(); + +protected: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual char const* MIMEtype() const; + +private: + u_int32_t fCurPacketIdent; // only the low-order 24 bits are used +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/VP8VideoRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/VP8VideoRTPSink.hh new file mode 100644 index 0000000..e745d5e --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/VP8VideoRTPSink.hh @@ -0,0 +1,50 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for VP8 video +// C++ header + +#ifndef _VP8_VIDEO_RTP_SINK_HH +#define _VP8_VIDEO_RTP_SINK_HH + +#ifndef _VIDEO_RTP_SINK_HH +#include "VideoRTPSink.hh" +#endif + +class VP8VideoRTPSink: public VideoRTPSink { +public: + static VP8VideoRTPSink* createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat); + +protected: + VP8VideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat); + // called only by createNew() + + virtual ~VP8VideoRTPSink(); + +private: // redefined virtual functions: + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + virtual + Boolean frameCanAppearAfterPacketStart(unsigned char const* frameStart, + unsigned numBytesInFrame) const; + virtual unsigned specialHeaderSize() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/VP8VideoRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/VP8VideoRTPSource.hh new file mode 100644 index 0000000..744e3d1 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/VP8VideoRTPSource.hh @@ -0,0 +1,50 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// VP8 Video RTP Sources +// C++ header + +#ifndef _VP8_VIDEO_RTP_SOURCE_HH +#define _VP8_VIDEO_RTP_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +class VP8VideoRTPSource: public MultiFramedRTPSource { +public: + static VP8VideoRTPSource* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency = 90000); + +protected: + VP8VideoRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + // called only by createNew() + + virtual ~VP8VideoRTPSource(); + +protected: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual char const* MIMEtype() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/VideoRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/VideoRTPSink.hh new file mode 100644 index 0000000..c763092 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/VideoRTPSink.hh @@ -0,0 +1,41 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A generic RTP sink for video codecs (abstract base class) +// C++ header + +#ifndef _VIDEO_RTP_SINK_HH +#define _VIDEO_RTP_SINK_HH + +#ifndef _MULTI_FRAMED_RTP_SINK_HH +#include "MultiFramedRTPSink.hh" +#endif + +class VideoRTPSink: public MultiFramedRTPSink { +protected: + VideoRTPSink(UsageEnvironment& env, + Groupsock* rtpgs, unsigned char rtpPayloadType, + unsigned rtpTimestampFrequency, + char const* rtpPayloadFormatName); + // (we're an abstract base class) + virtual ~VideoRTPSink(); + +private: // redefined virtual functions: + virtual char const* sdpMediaType() const; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/VorbisAudioRTPSink.hh b/AnyCore/lib_rtsp/liveMedia/include/VorbisAudioRTPSink.hh new file mode 100644 index 0000000..7710010 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/VorbisAudioRTPSink.hh @@ -0,0 +1,85 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// RTP sink for Vorbis audio +// C++ header + +#ifndef _VORBIS_AUDIO_RTP_SINK_HH +#define _VORBIS_AUDIO_RTP_SINK_HH + +#ifndef _AUDIO_RTP_SINK_HH +#include "AudioRTPSink.hh" +#endif + +class VorbisAudioRTPSink: public AudioRTPSink { +public: + static VorbisAudioRTPSink* + createNew(UsageEnvironment& env, Groupsock* RTPgs, u_int8_t rtpPayloadFormat, + u_int32_t rtpTimestampFrequency, unsigned numChannels, + // The following headers provide the 'configuration' information, for the SDP description: + u_int8_t* identificationHeader, unsigned identificationHeaderSize, + u_int8_t* commentHeader, unsigned commentHeaderSize, + u_int8_t* setupHeader, unsigned setupHeaderSize, + u_int32_t identField = 0xFACADE); + + static VorbisAudioRTPSink* + createNew(UsageEnvironment& env, Groupsock* RTPgs, u_int8_t rtpPayloadFormat, + u_int32_t rtpTimestampFrequency, unsigned numChannels, + char const* configStr); + // an optional variant of "createNew()" that takes a Base-64-encoded 'configuration' string, + // rather than the raw configuration headers as parameter. + +protected: + VorbisAudioRTPSink(UsageEnvironment& env, Groupsock* RTPgs, + u_int8_t rtpPayloadFormat, u_int32_t rtpTimestampFrequency, unsigned numChannels, + u_int8_t* identificationHeader, unsigned identificationHeaderSize, + u_int8_t* commentHeader, unsigned commentHeaderSize, + u_int8_t* setupHeader, unsigned setupHeaderSize, + u_int32_t identField); + // called only by createNew() + + virtual ~VorbisAudioRTPSink(); + +private: // redefined virtual functions: + virtual char const* auxSDPLine(); // for the "a=fmtp:" SDP line + + virtual void doSpecialFrameHandling(unsigned fragmentationOffset, + unsigned char* frameStart, + unsigned numBytesInFrame, + struct timeval framePresentationTime, + unsigned numRemainingBytes); + virtual Boolean frameCanAppearAfterPacketStart(unsigned char const* frameStart, + unsigned numBytesInFrame) const; + virtual unsigned specialHeaderSize() const; + virtual unsigned frameSpecificHeaderSize() const; + +private: + u_int32_t fIdent; // "Ident" field used by this stream. (Only the low 24 bits of this are used.) + char* fFmtpSDPLine; +}; + + +// A general function used by both "VorbisAudioRTPSink" and "TheoraVideoRTPSink" to construct +// a Base64-encoded 'config' string (for SDP) from "identification", "comment", "setup" headers. +// (Note: The result string was heap-allocated, and the caller should delete[] it afterwards.) + +char* generateVorbisOrTheoraConfigStr(u_int8_t* identificationHeader, unsigned identificationHeaderSize, + u_int8_t* commentHeader, unsigned commentHeaderSize, + u_int8_t* setupHeader, unsigned setupHeaderSize, + u_int32_t identField); + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/VorbisAudioRTPSource.hh b/AnyCore/lib_rtsp/liveMedia/include/VorbisAudioRTPSource.hh new file mode 100644 index 0000000..6e11afd --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/VorbisAudioRTPSource.hh @@ -0,0 +1,66 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Vorbis Audio RTP Sources +// C++ header + +#ifndef _VORBIS_AUDIO_RTP_SOURCE_HH +#define _VORBIS_AUDIO_RTP_SOURCE_HH + +#ifndef _MULTI_FRAMED_RTP_SOURCE_HH +#include "MultiFramedRTPSource.hh" +#endif + +class VorbisAudioRTPSource: public MultiFramedRTPSource { +public: + static VorbisAudioRTPSource* + createNew(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + + u_int32_t curPacketIdent() const { return fCurPacketIdent; } // The current "Ident" field; only the low-order 24 bits are used + +protected: + VorbisAudioRTPSource(UsageEnvironment& env, Groupsock* RTPgs, + unsigned char rtpPayloadFormat, + unsigned rtpTimestampFrequency); + // called only by createNew() + + virtual ~VorbisAudioRTPSource(); + +protected: + // redefined virtual functions: + virtual Boolean processSpecialHeader(BufferedPacket* packet, + unsigned& resultSpecialHeaderSize); + virtual char const* MIMEtype() const; + +private: + u_int32_t fCurPacketIdent; // only the low-order 24 bits are used +}; + +void parseVorbisOrTheoraConfigStr(char const* configStr, + u_int8_t*& identificationHdr, unsigned& identificationHdrSize, + u_int8_t*& commentHdr, unsigned& commentHdrSize, + u_int8_t*& setupHdr, unsigned& setupHdrSize, + u_int32_t& identField); + // Returns (in each of the result parameters) unpacked Vorbis or Theora + // "identification", "comment", and "setup" headers that were specified in a + // "config" string (in the SDP description for a Vorbis/RTP or Theora/RTP stream). + // Each of the "*Hdr" result arrays are dynamically allocated by this routine, + // and must be delete[]d by the caller. + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/WAVAudioFileServerMediaSubsession.hh b/AnyCore/lib_rtsp/liveMedia/include/WAVAudioFileServerMediaSubsession.hh new file mode 100644 index 0000000..0bf4ee0 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/WAVAudioFileServerMediaSubsession.hh @@ -0,0 +1,68 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s +// on demand, from an WAV audio file. +// C++ header + +#ifndef _WAV_AUDIO_FILE_SERVER_MEDIA_SUBSESSION_HH +#define _WAV_AUDIO_FILE_SERVER_MEDIA_SUBSESSION_HH + +#ifndef _FILE_SERVER_MEDIA_SUBSESSION_HH +#include "FileServerMediaSubsession.hh" +#endif + +class WAVAudioFileServerMediaSubsession: public FileServerMediaSubsession{ +public: + static WAVAudioFileServerMediaSubsession* + createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource, + Boolean convertToULaw = False); + // If "convertToULaw" is True, 16-bit audio streams are converted to + // 8-bit u-law audio prior to streaming. + +protected: + WAVAudioFileServerMediaSubsession(UsageEnvironment& env, char const* fileName, + Boolean reuseFirstSource, Boolean convertToULaw); + // called only by createNew(); + virtual ~WAVAudioFileServerMediaSubsession(); + +protected: // redefined virtual functions + virtual void seekStreamSource(FramedSource* inputSource, double& seekNPT, double streamDuration, u_int64_t& numBytes); + virtual void setStreamSourceScale(FramedSource* inputSource, float scale); + virtual void setStreamSourceDuration(FramedSource* inputSource, double streamDuration, u_int64_t& numBytes); + + virtual FramedSource* createNewStreamSource(unsigned clientSessionId, + unsigned& estBitrate); + virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, + unsigned char rtpPayloadTypeIfDynamic, + FramedSource* inputSource); + virtual void testScaleFactor(float& scale); + virtual float duration() const; + +protected: + Boolean fConvertToULaw; + + // The following parameters of the input stream are set after + // "createNewStreamSource" is called: + unsigned char fAudioFormat; + unsigned char fBitsPerSample; + unsigned fSamplingFrequency; + unsigned fNumChannels; + float fFileDuration; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/WAVAudioFileSource.hh b/AnyCore/lib_rtsp/liveMedia/include/WAVAudioFileSource.hh new file mode 100644 index 0000000..9436c08 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/WAVAudioFileSource.hh @@ -0,0 +1,86 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// A WAV audio file source +// NOTE: Samples are returned in little-endian order (the same order in which +// they were stored in the file). +// C++ header + +#ifndef _WAV_AUDIO_FILE_SOURCE_HH +#define _WAV_AUDIO_FILE_SOURCE_HH + +#ifndef _AUDIO_INPUT_DEVICE_HH +#include "AudioInputDevice.hh" +#endif + +typedef enum { + WA_PCM = 0x01, + WA_PCMA = 0x06, + WA_PCMU = 0x07, + WA_IMA_ADPCM = 0x11, + WA_UNKNOWN +} WAV_AUDIO_FORMAT; + + +class WAVAudioFileSource: public AudioInputDevice { +public: + + static WAVAudioFileSource* createNew(UsageEnvironment& env, + char const* fileName); + + unsigned numPCMBytes() const; + void setScaleFactor(int scale); + void seekToPCMByte(unsigned byteNumber); + void limitNumBytesToStream(unsigned numBytesToStream); + // if "numBytesToStream" is >0, then we limit the stream to that number of bytes, before treating it as EOF + + unsigned char getAudioFormat(); + +protected: + WAVAudioFileSource(UsageEnvironment& env, FILE* fid); + // called only by createNew() + + virtual ~WAVAudioFileSource(); + + static void fileReadableHandler(WAVAudioFileSource* source, int mask); + void doReadFromFile(); + +private: + // redefined virtual functions: + virtual void doGetNextFrame(); + virtual void doStopGettingFrames(); + virtual Boolean setInputPort(int portIndex); + virtual double getAverageLevel() const; + +protected: + unsigned fPreferredFrameSize; + +private: + FILE* fFid; + double fPlayTimePerSample; // useconds + Boolean fFidIsSeekable; + unsigned fLastPlayTime; // useconds + Boolean fHaveStartedReading; + unsigned fWAVHeaderSize; + unsigned fFileSize; + int fScaleFactor; + Boolean fLimitNumBytesToStream; + unsigned fNumBytesToStream; // used iff "fLimitNumBytesToStream" is True + unsigned char fAudioFormat; +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/liveMedia.hh b/AnyCore/lib_rtsp/liveMedia/include/liveMedia.hh new file mode 100644 index 0000000..2e0acc2 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/liveMedia.hh @@ -0,0 +1,129 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Inclusion of header files representing the interface +// for the entire library +// +// Programs that use the library can include this header file, +// instead of each of the individual media header files + +#ifndef _LIVEMEDIA_HH +#define _LIVEMEDIA_HH + +#include "MPEG1or2AudioRTPSink.hh" +#include "MP3ADURTPSink.hh" +#include "MPEG1or2VideoRTPSink.hh" +#include "MPEG4ESVideoRTPSink.hh" +#include "AMRAudioFileSink.hh" +#include "H264VideoFileSink.hh" +#include "H265VideoFileSink.hh" +#include "OggFileSink.hh" +#include "BasicUDPSink.hh" +#include "GSMAudioRTPSink.hh" +#include "H263plusVideoRTPSink.hh" +#include "H264VideoRTPSink.hh" +#include "H265VideoRTPSink.hh" +#include "DVVideoRTPSource.hh" +#include "DVVideoRTPSink.hh" +#include "DVVideoStreamFramer.hh" +#include "H264VideoStreamFramer.hh" +#include "H265VideoStreamFramer.hh" +#include "H264VideoStreamDiscreteFramer.hh" +#include "H265VideoStreamDiscreteFramer.hh" +#include "JPEGVideoRTPSink.hh" +#include "SimpleRTPSink.hh" +#include "uLawAudioFilter.hh" +#include "MPEG2IndexFromTransportStream.hh" +#include "MPEG2TransportStreamTrickModeFilter.hh" +#include "ByteStreamMultiFileSource.hh" +#include "ByteStreamMemoryBufferSource.hh" +#include "BasicUDPSource.hh" +#include "SimpleRTPSource.hh" +#include "MPEG1or2AudioRTPSource.hh" +#include "MPEG4LATMAudioRTPSource.hh" +#include "MPEG4LATMAudioRTPSink.hh" +#include "MPEG4ESVideoRTPSource.hh" +#include "MPEG4GenericRTPSource.hh" +#include "MP3ADURTPSource.hh" +#include "QCELPAudioRTPSource.hh" +#include "AMRAudioRTPSource.hh" +#include "JPEGVideoRTPSource.hh" +#include "JPEGVideoSource.hh" +#include "MPEG1or2VideoRTPSource.hh" +#include "VorbisAudioRTPSource.hh" +#include "TheoraVideoRTPSource.hh" +#include "VP8VideoRTPSource.hh" +#include "MPEG2TransportStreamFromPESSource.hh" +#include "MPEG2TransportStreamFromESSource.hh" +#include "MPEG2TransportStreamFramer.hh" +#include "ADTSAudioFileSource.hh" +#include "H261VideoRTPSource.hh" +#include "H263plusVideoRTPSource.hh" +#include "H264VideoRTPSource.hh" +#include "H265VideoRTPSource.hh" +#include "MP3FileSource.hh" +#include "MP3ADU.hh" +#include "MP3ADUinterleaving.hh" +#include "MP3Transcoder.hh" +#include "MPEG1or2DemuxedElementaryStream.hh" +#include "MPEG1or2AudioStreamFramer.hh" +#include "H263plusVideoStreamFramer.hh" +#include "AC3AudioStreamFramer.hh" +#include "AC3AudioRTPSource.hh" +#include "AC3AudioRTPSink.hh" +#include "VorbisAudioRTPSink.hh" +#include "TheoraVideoRTPSink.hh" +#include "VP8VideoRTPSink.hh" +#include "MPEG4GenericRTPSink.hh" +#include "MPEG1or2VideoStreamDiscreteFramer.hh" +#include "MPEG4VideoStreamDiscreteFramer.hh" +#include "DeviceSource.hh" +#include "AudioInputDevice.hh" +#include "WAVAudioFileSource.hh" +#include "StreamReplicator.hh" +#include "RTSPRegisterSender.hh" +#include "RTSPServerSupportingHTTPStreaming.hh" +#include "RTSPClient.hh" +#include "SIPClient.hh" +#include "QuickTimeFileSink.hh" +#include "QuickTimeGenericRTPSource.hh" +#include "AVIFileSink.hh" +#include "PassiveServerMediaSubsession.hh" +#include "MPEG4VideoFileServerMediaSubsession.hh" +#include "H264VideoFileServerMediaSubsession.hh" +#include "H265VideoFileServerMediaSubsession.hh" +#include "WAVAudioFileServerMediaSubsession.hh" +#include "AMRAudioFileServerMediaSubsession.hh" +#include "AMRAudioFileSource.hh" +#include "AMRAudioRTPSink.hh" +#include "T140TextRTPSink.hh" +#include "TCPStreamSink.hh" +#include "MP3AudioFileServerMediaSubsession.hh" +#include "MPEG1or2VideoFileServerMediaSubsession.hh" +#include "MPEG1or2FileServerDemux.hh" +#include "MPEG2TransportFileServerMediaSubsession.hh" +#include "H263plusVideoFileServerMediaSubsession.hh" +#include "ADTSAudioFileServerMediaSubsession.hh" +#include "DVVideoFileServerMediaSubsession.hh" +#include "AC3AudioFileServerMediaSubsession.hh" +#include "MPEG2TransportUDPServerMediaSubsession.hh" +#include "MatroskaFileServerDemux.hh" +#include "OggFileServerDemux.hh" +#include "ProxyServerMediaSession.hh" +#include "DarwinInjector.hh" + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/liveMedia_version.hh b/AnyCore/lib_rtsp/liveMedia/include/liveMedia_version.hh new file mode 100644 index 0000000..1969692 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/liveMedia_version.hh @@ -0,0 +1,10 @@ +// Version information for the "liveMedia" library +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. + +#ifndef _LIVEMEDIA_VERSION_HH +#define _LIVEMEDIA_VERSION_HH + +#define LIVEMEDIA_LIBRARY_VERSION_STRING "2014.09.22" +#define LIVEMEDIA_LIBRARY_VERSION_INT 1411344000 + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/ourMD5.hh b/AnyCore/lib_rtsp/liveMedia/include/ourMD5.hh new file mode 100644 index 0000000..d0bd241 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/ourMD5.hh @@ -0,0 +1,38 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Because MD5 may not be implemented (at least, with the same interface) on all systems, +// we have our own implementation. +// C++ header + +#ifndef _OUR_MD5_HH +#define _OUR_MD5_HH + +extern char* our_MD5Data(unsigned char const* data, unsigned dataSize, char* outputDigest); + // "outputDigest" must be either NULL (in which case this function returns a heap-allocated + // buffer, which should be later delete[]d by the caller), or else it must point to + // a (>=)33-byte buffer (which this function will also return). + +extern unsigned char* our_MD5DataRaw(unsigned char const* data, unsigned dataSize, + unsigned char* outputDigest); + // Like "ourMD5Data()", except that it returns the digest in 'raw' binary form, rather than + // as an ASCII hex string. + // "outputDigest" must be either NULL (in which case this function returns a heap-allocated + // buffer, which should be later delete[]d by the caller), or else it must point to + // a (>=)16-byte buffer (which this function will also return). + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/include/uLawAudioFilter.hh b/AnyCore/lib_rtsp/liveMedia/include/uLawAudioFilter.hh new file mode 100644 index 0000000..d6066bc --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/include/uLawAudioFilter.hh @@ -0,0 +1,208 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Filters for converting between raw PCM audio and uLaw +// C++ header + +#ifndef _ULAW_AUDIO_FILTER_HH +#define _ULAW_AUDIO_FILTER_HH + +#ifndef _FRAMED_FILTER_HH +#include "FramedFilter.hh" +#endif + +////////// 16-bit PCM (in various byte orderings) -> 8-bit u-Law ////////// + +class uLawFromPCMAudioSource: public FramedFilter { +public: + static uLawFromPCMAudioSource* + createNew(UsageEnvironment& env, FramedSource* inputSource, + int byteOrdering = 0); + // "byteOrdering" == 0 => host order (the default) + // "byteOrdering" == 1 => little-endian order + // "byteOrdering" == 2 => network (i.e., big-endian) order + +protected: + uLawFromPCMAudioSource(UsageEnvironment& env, FramedSource* inputSource, + int byteOrdering); + // called only by createNew() + virtual ~uLawFromPCMAudioSource(); + +private: + // Redefined virtual functions: + virtual void doGetNextFrame(); + +private: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + +private: + int fByteOrdering; + unsigned char* fInputBuffer; + unsigned fInputBufferSize; +}; + + +////////// u-Law -> 16-bit PCM (in host order) ////////// + +class PCMFromuLawAudioSource: public FramedFilter { +public: + static PCMFromuLawAudioSource* + createNew(UsageEnvironment& env, FramedSource* inputSource); + +protected: + PCMFromuLawAudioSource(UsageEnvironment& env, + FramedSource* inputSource); + // called only by createNew() + virtual ~PCMFromuLawAudioSource(); + +private: + // Redefined virtual functions: + virtual void doGetNextFrame(); + +private: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + +private: + unsigned char* fInputBuffer; + unsigned fInputBufferSize; +}; + + +////////// 16-bit values (in host order) -> 16-bit network order ////////// + +class NetworkFromHostOrder16: public FramedFilter { +public: + static NetworkFromHostOrder16* + createNew(UsageEnvironment& env, FramedSource* inputSource); + +protected: + NetworkFromHostOrder16(UsageEnvironment& env, FramedSource* inputSource); + // called only by createNew() + virtual ~NetworkFromHostOrder16(); + +private: + // Redefined virtual functions: + virtual void doGetNextFrame(); + +private: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); +}; + + +////////// 16-bit values (in network order) -> 16-bit host order ////////// + +class HostFromNetworkOrder16: public FramedFilter { +public: + static HostFromNetworkOrder16* + createNew(UsageEnvironment& env, FramedSource* inputSource); + +protected: + HostFromNetworkOrder16(UsageEnvironment& env, FramedSource* inputSource); + // called only by createNew() + virtual ~HostFromNetworkOrder16(); + +private: + // Redefined virtual functions: + virtual void doGetNextFrame(); + +private: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); +}; + + +////////// 16-bit values: little-endian <-> big-endian ////////// + +class EndianSwap16: public FramedFilter { +public: + static EndianSwap16* createNew(UsageEnvironment& env, FramedSource* inputSource); + +protected: + EndianSwap16(UsageEnvironment& env, FramedSource* inputSource); + // called only by createNew() + virtual ~EndianSwap16(); + +private: + // Redefined virtual functions: + virtual void doGetNextFrame(); + +private: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); +}; + + +////////// 24-bit values: little-endian <-> big-endian ////////// + +class EndianSwap24: public FramedFilter { +public: + static EndianSwap24* createNew(UsageEnvironment& env, FramedSource* inputSource); + +protected: + EndianSwap24(UsageEnvironment& env, FramedSource* inputSource); + // called only by createNew() + virtual ~EndianSwap24(); + +private: + // Redefined virtual functions: + virtual void doGetNextFrame(); + +private: + static void afterGettingFrame(void* clientData, unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); + void afterGettingFrame1(unsigned frameSize, + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); +}; + +#endif diff --git a/AnyCore/lib_rtsp/liveMedia/liveMedia.mak b/AnyCore/lib_rtsp/liveMedia/liveMedia.mak new file mode 100644 index 0000000..755d11e --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/liveMedia.mak @@ -0,0 +1,446 @@ +INCLUDES = -Iinclude -I../UsageEnvironment/include -I../groupsock/include +PREFIX = /usr/local +LIBDIR = $(PREFIX)/lib +##### Change the following for your environment: +# Comment out the following line to produce Makefiles that generate debuggable code: +NODEBUG=1 + +# The following definition ensures that we are properly matching +# the WinSock2 library file with the correct header files. +# (will link with "ws2_32.lib" and include "winsock2.h" & "Ws2tcpip.h") +TARGETOS = WINNT + +# If for some reason you wish to use WinSock1 instead, uncomment the +# following two definitions. +# (will link with "wsock32.lib" and include "winsock.h") +#TARGETOS = WIN95 +#APPVER = 4.0 + +!include + +UI_OPTS = $(guilflags) $(guilibsdll) +# Use the following to get a console (e.g., for debugging): +CONSOLE_UI_OPTS = $(conlflags) $(conlibsdll) +CPU=i386 + +TOOLS32 = c:\Program Files\DevStudio\Vc +COMPILE_OPTS = $(INCLUDES) $(cdebug) $(cflags) $(cvarsdll) -I. -I"$(TOOLS32)\include" +C = c +C_COMPILER = "$(TOOLS32)\bin\cl" +C_FLAGS = $(COMPILE_OPTS) +CPP = cpp +CPLUSPLUS_COMPILER = $(C_COMPILER) +CPLUSPLUS_FLAGS = $(COMPILE_OPTS) +OBJ = obj +LINK = $(link) -out: +LIBRARY_LINK = lib -out: +LINK_OPTS_0 = $(linkdebug) msvcirt.lib +LIBRARY_LINK_OPTS = +LINK_OPTS = $(LINK_OPTS_0) $(UI_OPTS) +CONSOLE_LINK_OPTS = $(LINK_OPTS_0) $(CONSOLE_UI_OPTS) +SERVICE_LINK_OPTS = kernel32.lib advapi32.lib shell32.lib -subsystem:console,$(APPVER) +LIB_SUFFIX = lib +LIBS_FOR_CONSOLE_APPLICATION = +LIBS_FOR_GUI_APPLICATION = +MULTIMEDIA_LIBS = winmm.lib +EXE = .exe +PLATFORM = Windows + +rc32 = "$(TOOLS32)\bin\rc" +.rc.res: + $(rc32) $< +##### End of variables to change + +NAME = libliveMedia +LIVEMEDIA_LIB = $(NAME).$(LIB_SUFFIX) +ALL = $(LIVEMEDIA_LIB) +all: $(ALL) + +.$(C).$(OBJ): + $(C_COMPILER) -c $(C_FLAGS) $< +.$(CPP).$(OBJ): + $(CPLUSPLUS_COMPILER) -c $(CPLUSPLUS_FLAGS) $< + +MP3_SOURCE_OBJS = MP3FileSource.$(OBJ) MP3Transcoder.$(OBJ) MP3ADU.$(OBJ) MP3ADUdescriptor.$(OBJ) MP3ADUinterleaving.$(OBJ) MP3ADUTranscoder.$(OBJ) MP3StreamState.$(OBJ) MP3Internals.$(OBJ) MP3InternalsHuffman.$(OBJ) MP3InternalsHuffmanTable.$(OBJ) MP3ADURTPSource.$(OBJ) +MPEG_SOURCE_OBJS = MPEG1or2Demux.$(OBJ) MPEG1or2DemuxedElementaryStream.$(OBJ) MPEGVideoStreamFramer.$(OBJ) MPEG1or2VideoStreamFramer.$(OBJ) MPEG1or2VideoStreamDiscreteFramer.$(OBJ) MPEG4VideoStreamFramer.$(OBJ) MPEG4VideoStreamDiscreteFramer.$(OBJ) H264or5VideoStreamFramer.$(OBJ) H264or5VideoStreamDiscreteFramer.$(OBJ) H264VideoStreamFramer.$(OBJ) H264VideoStreamDiscreteFramer.$(OBJ) H265VideoStreamFramer.$(OBJ) H265VideoStreamDiscreteFramer.$(OBJ) MPEGVideoStreamParser.$(OBJ) MPEG1or2AudioStreamFramer.$(OBJ) MPEG1or2AudioRTPSource.$(OBJ) MPEG4LATMAudioRTPSource.$(OBJ) MPEG4ESVideoRTPSource.$(OBJ) MPEG4GenericRTPSource.$(OBJ) $(MP3_SOURCE_OBJS) MPEG1or2VideoRTPSource.$(OBJ) MPEG2TransportStreamMultiplexor.$(OBJ) MPEG2TransportStreamFromPESSource.$(OBJ) MPEG2TransportStreamFromESSource.$(OBJ) MPEG2TransportStreamFramer.$(OBJ) ADTSAudioFileSource.$(OBJ) +H263_SOURCE_OBJS = H263plusVideoRTPSource.$(OBJ) H263plusVideoStreamFramer.$(OBJ) H263plusVideoStreamParser.$(OBJ) +AC3_SOURCE_OBJS = AC3AudioStreamFramer.$(OBJ) AC3AudioRTPSource.$(OBJ) +DV_SOURCE_OBJS = DVVideoStreamFramer.$(OBJ) DVVideoRTPSource.$(OBJ) +MP3_SINK_OBJS = MP3ADURTPSink.$(OBJ) +MPEG_SINK_OBJS = MPEG1or2AudioRTPSink.$(OBJ) $(MP3_SINK_OBJS) MPEG1or2VideoRTPSink.$(OBJ) MPEG4LATMAudioRTPSink.$(OBJ) MPEG4GenericRTPSink.$(OBJ) MPEG4ESVideoRTPSink.$(OBJ) +H263_SINK_OBJS = H263plusVideoRTPSink.$(OBJ) +H264_OR_5_SINK_OBJS = H264or5VideoRTPSink.$(OBJ) H264VideoRTPSink.$(OBJ) H265VideoRTPSink.$(OBJ) +DV_SINK_OBJS = DVVideoRTPSink.$(OBJ) +AC3_SINK_OBJS = AC3AudioRTPSink.$(OBJ) + +MISC_SOURCE_OBJS = MediaSource.$(OBJ) FramedSource.$(OBJ) FramedFileSource.$(OBJ) FramedFilter.$(OBJ) ByteStreamFileSource.$(OBJ) ByteStreamMultiFileSource.$(OBJ) ByteStreamMemoryBufferSource.$(OBJ) BasicUDPSource.$(OBJ) DeviceSource.$(OBJ) AudioInputDevice.$(OBJ) WAVAudioFileSource.$(OBJ) $(MPEG_SOURCE_OBJS) $(H263_SOURCE_OBJS) $(AC3_SOURCE_OBJS) $(DV_SOURCE_OBJS) JPEGVideoSource.$(OBJ) AMRAudioSource.$(OBJ) AMRAudioFileSource.$(OBJ) InputFile.$(OBJ) StreamReplicator.$(OBJ) +MISC_SINK_OBJS = MediaSink.$(OBJ) FileSink.$(OBJ) BasicUDPSink.$(OBJ) AMRAudioFileSink.$(OBJ) H264or5VideoFileSink.$(OBJ) H264VideoFileSink.$(OBJ) H265VideoFileSink.$(OBJ) OggFileSink.$(OBJ) $(MPEG_SINK_OBJS) $(H263_SINK_OBJS) $(H264_OR_5_SINK_OBJS) $(DV_SINK_OBJS) $(AC3_SINK_OBJS) VorbisAudioRTPSink.$(OBJ) TheoraVideoRTPSink.$(OBJ) VP8VideoRTPSink.$(OBJ) GSMAudioRTPSink.$(OBJ) JPEGVideoRTPSink.$(OBJ) SimpleRTPSink.$(OBJ) AMRAudioRTPSink.$(OBJ) T140TextRTPSink.$(OBJ) TCPStreamSink.$(OBJ) OutputFile.$(OBJ) +MISC_FILTER_OBJS = uLawAudioFilter.$(OBJ) +TRANSPORT_STREAM_TRICK_PLAY_OBJS = MPEG2IndexFromTransportStream.$(OBJ) MPEG2TransportStreamIndexFile.$(OBJ) MPEG2TransportStreamTrickModeFilter.$(OBJ) + +RTP_SOURCE_OBJS = RTPSource.$(OBJ) MultiFramedRTPSource.$(OBJ) SimpleRTPSource.$(OBJ) H261VideoRTPSource.$(OBJ) H264VideoRTPSource.$(OBJ) H265VideoRTPSource.$(OBJ) QCELPAudioRTPSource.$(OBJ) AMRAudioRTPSource.$(OBJ) JPEGVideoRTPSource.$(OBJ) VorbisAudioRTPSource.$(OBJ) TheoraVideoRTPSource.$(OBJ) VP8VideoRTPSource.$(OBJ) +RTP_SINK_OBJS = RTPSink.$(OBJ) MultiFramedRTPSink.$(OBJ) AudioRTPSink.$(OBJ) VideoRTPSink.$(OBJ) TextRTPSink.$(OBJ) +RTP_INTERFACE_OBJS = RTPInterface.$(OBJ) +RTP_OBJS = $(RTP_SOURCE_OBJS) $(RTP_SINK_OBJS) $(RTP_INTERFACE_OBJS) + +RTCP_OBJS = RTCP.$(OBJ) rtcp_from_spec.$(OBJ) +RTSP_OBJS = RTSPServer.$(OBJ) RTSPClient.$(OBJ) RTSPCommon.$(OBJ) RTSPServerSupportingHTTPStreaming.$(OBJ) RTSPRegisterSender.$(OBJ) +SIP_OBJS = SIPClient.$(OBJ) + +SESSION_OBJS = MediaSession.$(OBJ) ServerMediaSession.$(OBJ) PassiveServerMediaSubsession.$(OBJ) OnDemandServerMediaSubsession.$(OBJ) FileServerMediaSubsession.$(OBJ) MPEG4VideoFileServerMediaSubsession.$(OBJ) H264VideoFileServerMediaSubsession.$(OBJ) H265VideoFileServerMediaSubsession.$(OBJ) H263plusVideoFileServerMediaSubsession.$(OBJ) WAVAudioFileServerMediaSubsession.$(OBJ) AMRAudioFileServerMediaSubsession.$(OBJ) MP3AudioFileServerMediaSubsession.$(OBJ) MPEG1or2VideoFileServerMediaSubsession.$(OBJ) MPEG1or2FileServerDemux.$(OBJ) MPEG1or2DemuxedServerMediaSubsession.$(OBJ) MPEG2TransportFileServerMediaSubsession.$(OBJ) ADTSAudioFileServerMediaSubsession.$(OBJ) DVVideoFileServerMediaSubsession.$(OBJ) AC3AudioFileServerMediaSubsession.$(OBJ) MPEG2TransportUDPServerMediaSubsession.$(OBJ) ProxyServerMediaSession.$(OBJ) + +QUICKTIME_OBJS = QuickTimeFileSink.$(OBJ) QuickTimeGenericRTPSource.$(OBJ) +AVI_OBJS = AVIFileSink.$(OBJ) + +MATROSKA_FILE_OBJS = MatroskaFile.$(OBJ) MatroskaFileParser.$(OBJ) EBMLNumber.$(OBJ) MatroskaDemuxedTrack.$(OBJ) +MATROSKA_SERVER_MEDIA_SUBSESSION_OBJS = MatroskaFileServerMediaSubsession.$(OBJ) MP3AudioMatroskaFileServerMediaSubsession.$(OBJ) +MATROSKA_RTSP_SERVER_OBJS = MatroskaFileServerDemux.$(OBJ) $(MATROSKA_SERVER_MEDIA_SUBSESSION_OBJS) +MATROSKA_OBJS = $(MATROSKA_FILE_OBJS) $(MATROSKA_RTSP_SERVER_OBJS) + +OGG_FILE_OBJS = OggFile.$(OBJ) OggFileParser.$(OBJ) OggDemuxedTrack.$(OBJ) +OGG_SERVER_MEDIA_SUBSESSION_OBJS = OggFileServerMediaSubsession.$(OBJ) +OGG_RTSP_SERVER_OBJS = OggFileServerDemux.$(OBJ) $(OGG_SERVER_MEDIA_SUBSESSION_OBJS) +OGG_OBJS = $(OGG_FILE_OBJS) $(OGG_RTSP_SERVER_OBJS) + +MISC_OBJS = DarwinInjector.$(OBJ) BitVector.$(OBJ) StreamParser.$(OBJ) DigestAuthentication.$(OBJ) ourMD5.$(OBJ) Base64.$(OBJ) Locale.$(OBJ) + +LIVEMEDIA_LIB_OBJS = Media.$(OBJ) $(MISC_SOURCE_OBJS) $(MISC_SINK_OBJS) $(MISC_FILTER_OBJS) $(RTP_OBJS) $(RTCP_OBJS) $(RTSP_OBJS) $(SIP_OBJS) $(SESSION_OBJS) $(QUICKTIME_OBJS) $(AVI_OBJS) $(TRANSPORT_STREAM_TRICK_PLAY_OBJS) $(MATROSKA_OBJS) $(OGG_OBJS) $(MISC_OBJS) + +$(LIVEMEDIA_LIB): $(LIVEMEDIA_LIB_OBJS) \ + $(PLATFORM_SPECIFIC_LIB_OBJS) + $(LIBRARY_LINK)$@ $(LIBRARY_LINK_OPTS) \ + $(LIVEMEDIA_LIB_OBJS) + +Media.$(CPP): include/Media.hh +include/Media.hh: include/liveMedia_version.hh +MediaSource.$(CPP): include/MediaSource.hh +include/MediaSource.hh: include/Media.hh +FramedSource.$(CPP): include/FramedSource.hh +include/FramedSource.hh: include/MediaSource.hh +FramedFileSource.$(CPP): include/FramedFileSource.hh +include/FramedFileSource.hh: include/FramedSource.hh +FramedFilter.$(CPP): include/FramedFilter.hh +include/FramedFilter.hh: include/FramedSource.hh +RTPSource.$(CPP): include/RTPSource.hh +include/RTPSource.hh: include/FramedSource.hh include/RTPInterface.hh +include/RTPInterface.hh: include/Media.hh +MultiFramedRTPSource.$(CPP): include/MultiFramedRTPSource.hh include/RTCP.hh +include/MultiFramedRTPSource.hh: include/RTPSource.hh +SimpleRTPSource.$(CPP): include/SimpleRTPSource.hh +include/SimpleRTPSource.hh: include/MultiFramedRTPSource.hh +H261VideoRTPSource.$(CPP): include/H261VideoRTPSource.hh +include/H261VideoRTPSource.hh: include/MultiFramedRTPSource.hh +H264VideoRTPSource.$(CPP): include/H264VideoRTPSource.hh include/Base64.hh +include/H264VideoRTPSource.hh: include/MultiFramedRTPSource.hh +H265VideoRTPSource.$(CPP): include/H265VideoRTPSource.hh +include/H265VideoRTPSource.hh: include/MultiFramedRTPSource.hh +QCELPAudioRTPSource.$(CPP): include/QCELPAudioRTPSource.hh include/MultiFramedRTPSource.hh include/FramedFilter.hh +include/QCELPAudioRTPSource.hh: include/RTPSource.hh +AMRAudioRTPSource.$(CPP): include/AMRAudioRTPSource.hh include/MultiFramedRTPSource.hh +include/AMRAudioRTPSource.hh: include/RTPSource.hh include/AMRAudioSource.hh +JPEGVideoRTPSource.$(CPP): include/JPEGVideoRTPSource.hh +include/JPEGVideoRTPSource.hh: include/MultiFramedRTPSource.hh +VorbisAudioRTPSource.$(CPP): include/VorbisAudioRTPSource.hh include/Base64.hh +include/VorbisAudioRTPSource.hh: include/MultiFramedRTPSource.hh +TheoraVideoRTPSource.$(CPP): include/TheoraVideoRTPSource.hh +include/TheoraVideoRTPSource.hh: include/MultiFramedRTPSource.hh +VP8VideoRTPSource.$(CPP): include/VP8VideoRTPSource.hh +include/VP8VideoRTPSource.hh: include/MultiFramedRTPSource.hh +ByteStreamFileSource.$(CPP): include/ByteStreamFileSource.hh include/InputFile.hh +include/ByteStreamFileSource.hh: include/FramedFileSource.hh +ByteStreamMultiFileSource.$(CPP): include/ByteStreamMultiFileSource.hh +include/ByteStreamMultiFileSource.hh: include/ByteStreamFileSource.hh +ByteStreamMemoryBufferSource.$(CPP): include/ByteStreamMemoryBufferSource.hh +include/ByteStreamMemoryBufferSource.hh: include/FramedSource.hh +BasicUDPSource.$(CPP): include/BasicUDPSource.hh +include/BasicUDPSource.hh: include/FramedSource.hh +DeviceSource.$(CPP): include/DeviceSource.hh +include/DeviceSource.hh: include/FramedSource.hh +AudioInputDevice.$(CPP): include/AudioInputDevice.hh +include/AudioInputDevice.hh: include/FramedSource.hh +WAVAudioFileSource.$(CPP): include/WAVAudioFileSource.hh include/InputFile.hh +include/WAVAudioFileSource.hh: include/AudioInputDevice.hh +MPEG1or2Demux.$(CPP): include/MPEG1or2Demux.hh include/MPEG1or2DemuxedElementaryStream.hh StreamParser.hh +include/MPEG1or2Demux.hh: include/FramedSource.hh +include/MPEG1or2DemuxedElementaryStream.hh: include/MPEG1or2Demux.hh +StreamParser.hh: include/FramedSource.hh +MPEG1or2DemuxedElementaryStream.$(CPP): include/MPEG1or2DemuxedElementaryStream.hh +MPEGVideoStreamFramer.$(CPP): MPEGVideoStreamParser.hh +MPEGVideoStreamParser.hh: StreamParser.hh include/MPEGVideoStreamFramer.hh +include/MPEGVideoStreamFramer.hh: include/FramedFilter.hh +MPEG1or2VideoStreamFramer.$(CPP): include/MPEG1or2VideoStreamFramer.hh MPEGVideoStreamParser.hh +include/MPEG1or2VideoStreamFramer.hh: include/MPEGVideoStreamFramer.hh +MPEG1or2VideoStreamDiscreteFramer.$(CPP): include/MPEG1or2VideoStreamDiscreteFramer.hh +include/MPEG1or2VideoStreamDiscreteFramer.hh: include/MPEG1or2VideoStreamFramer.hh +MPEG4VideoStreamFramer.$(CPP): include/MPEG4VideoStreamFramer.hh MPEGVideoStreamParser.hh include/MPEG4LATMAudioRTPSource.hh +include/MPEG4VideoStreamFramer.hh: include/MPEGVideoStreamFramer.hh +MPEG4VideoStreamDiscreteFramer.$(CPP): include/MPEG4VideoStreamDiscreteFramer.hh +include/MPEG4VideoStreamDiscreteFramer.hh: include/MPEG4VideoStreamFramer.hh +H264or5VideoStreamFramer.$(CPP): include/H264or5VideoStreamFramer.hh MPEGVideoStreamParser.hh include/BitVector.hh +include/H264or5VideoStreamFramer.hh: include/MPEGVideoStreamFramer.hh +H264or5VideoStreamDiscreteFramer.$(CPP): include/H264or5VideoStreamDiscreteFramer.hh +include/H264or5VideoStreamDiscreteFramer.hh: include/H264or5VideoStreamFramer.hh +H264VideoStreamFramer.$(CPP): include/H264VideoStreamFramer.hh +include/H264VideoStreamFramer.hh: include/H264or5VideoStreamFramer.hh +H264VideoStreamDiscreteFramer.$(CPP): include/H264VideoStreamDiscreteFramer.hh +include/H264VideoStreamDiscreteFramer.hh: include/H264VideoStreamFramer.hh +H265VideoStreamFramer.$(CPP): include/H265VideoStreamFramer.hh +include/H265VideoStreamFramer.hh: include/H264or5VideoStreamFramer.hh +H265VideoStreamDiscreteFramer.$(CPP): include/H265VideoStreamDiscreteFramer.hh +include/H265VideoStreamDiscreteFramer.hh: include/H265VideoStreamFramer.hh +MPEGVideoStreamParser.$(CPP): MPEGVideoStreamParser.hh +MPEG1or2AudioStreamFramer.$(CPP): include/MPEG1or2AudioStreamFramer.hh StreamParser.hh MP3Internals.hh +include/MPEG1or2AudioStreamFramer.hh: include/FramedFilter.hh +MPEG1or2AudioRTPSource.$(CPP): include/MPEG1or2AudioRTPSource.hh +include/MPEG1or2AudioRTPSource.hh: include/MultiFramedRTPSource.hh +MPEG4LATMAudioRTPSource.$(CPP): include/MPEG4LATMAudioRTPSource.hh +include/MPEG4LATMAudioRTPSource.hh: include/MultiFramedRTPSource.hh +MPEG4ESVideoRTPSource.$(CPP): include/MPEG4ESVideoRTPSource.hh +include/MPEG4ESVideoRTPSource.hh: include/MultiFramedRTPSource.hh +MPEG4GenericRTPSource.$(CPP): include/MPEG4GenericRTPSource.hh include/BitVector.hh include/MPEG4LATMAudioRTPSource.hh +include/MPEG4GenericRTPSource.hh: include/MultiFramedRTPSource.hh +MP3FileSource.$(CPP): include/MP3FileSource.hh MP3StreamState.hh include/InputFile.hh +include/MP3FileSource.hh: include/FramedFileSource.hh +MP3StreamState.hh: MP3Internals.hh +MP3Internals.hh: include/BitVector.hh +MP3Transcoder.$(CPP): include/MP3ADU.hh include/MP3Transcoder.hh +include/MP3ADU.hh: include/FramedFilter.hh +include/MP3Transcoder.hh: include/MP3ADU.hh include/MP3ADUTranscoder.hh +include/MP3ADUTranscoder.hh: include/FramedFilter.hh +MP3ADU.$(CPP): include/MP3ADU.hh MP3ADUdescriptor.hh MP3Internals.hh +MP3ADUdescriptor.$(CPP): MP3ADUdescriptor.hh +MP3ADUinterleaving.$(CPP): include/MP3ADUinterleaving.hh MP3ADUdescriptor.hh +include/MP3ADUinterleaving.hh: include/FramedFilter.hh +MP3ADUTranscoder.$(CPP): include/MP3ADUTranscoder.hh MP3Internals.hh +MP3StreamState.$(CPP): MP3StreamState.hh include/InputFile.hh +MP3Internals.$(CPP): MP3InternalsHuffman.hh +MP3InternalsHuffman.hh: MP3Internals.hh +MP3InternalsHuffman.$(CPP): MP3InternalsHuffman.hh +MP3InternalsHuffmanTable.$(CPP): MP3InternalsHuffman.hh +MP3ADURTPSource.$(CPP): include/MP3ADURTPSource.hh MP3ADUdescriptor.hh +include/MP3ADURTPSource.hh: include/MultiFramedRTPSource.hh +MPEG1or2VideoRTPSource.$(CPP): include/MPEG1or2VideoRTPSource.hh +include/MPEG1or2VideoRTPSource.hh: include/MultiFramedRTPSource.hh +MPEG2TransportStreamMultiplexor.$(CPP): include/MPEG2TransportStreamMultiplexor.hh +include/MPEG2TransportStreamMultiplexor.hh: include/FramedSource.hh include/MPEG1or2Demux.hh +MPEG2TransportStreamFromPESSource.$(CPP): include/MPEG2TransportStreamFromPESSource.hh +include/MPEG2TransportStreamFromPESSource.hh: include/MPEG2TransportStreamMultiplexor.hh include/MPEG1or2DemuxedElementaryStream.hh +MPEG2TransportStreamFromESSource.$(CPP): include/MPEG2TransportStreamFromESSource.hh +include/MPEG2TransportStreamFromESSource.hh: include/MPEG2TransportStreamMultiplexor.hh +MPEG2TransportStreamFramer.$(CPP): include/MPEG2TransportStreamFramer.hh +include/MPEG2TransportStreamFramer.hh: include/FramedFilter.hh include/MPEG2TransportStreamIndexFile.hh +ADTSAudioFileSource.$(CPP): include/ADTSAudioFileSource.hh include/InputFile.hh +include/ADTSAudioFileSource.hh: include/FramedFileSource.hh +H263plusVideoRTPSource.$(CPP): include/H263plusVideoRTPSource.hh +include/H263plusVideoRTPSource.hh: include/MultiFramedRTPSource.hh +H263plusVideoStreamFramer.$(CPP): include/H263plusVideoStreamFramer.hh H263plusVideoStreamParser.hh +include/H263plusVideoStreamFramer.hh: include/FramedFilter.hh +H263plusVideoStreamParser.hh: StreamParser.hh +H263plusVideoStreamParser.$(CPP): H263plusVideoStreamParser.hh include/H263plusVideoStreamFramer.hh +AC3AudioStreamFramer.$(CPP): include/AC3AudioStreamFramer.hh StreamParser.hh +include/AC3AudioStreamFramer.hh: include/FramedFilter.hh +AC3AudioRTPSource.$(CPP): include/AC3AudioRTPSource.hh +include/AC3AudioRTPSource.hh: include/MultiFramedRTPSource.hh +DVVideoRTPSource.$(CPP): include/DVVideoRTPSource.hh +include/DVVideoRTPSource.hh: include/MultiFramedRTPSource.hh +JPEGVideoSource.$(CPP): include/JPEGVideoSource.hh +include/JPEGVideoSource.hh: include/FramedSource.hh +AMRAudioSource.$(CPP): include/AMRAudioSource.hh +include/AMRAudioSource.hh: include/FramedSource.hh +AMRAudioFileSource.$(CPP): include/AMRAudioFileSource.hh include/InputFile.hh +include/AMRAudioFileSource.hh: include/AMRAudioSource.hh +InputFile.$(CPP): include/InputFile.hh +StreamReplicator.$(CPP): include/StreamReplicator.hh +include/StreamReplicator.hh: include/FramedSource.hh +MediaSink.$(CPP): include/MediaSink.hh +include/MediaSink.hh: include/FramedSource.hh +FileSink.$(CPP): include/FileSink.hh include/OutputFile.hh +include/FileSink.hh: include/MediaSink.hh +BasicUDPSink.$(CPP): include/BasicUDPSink.hh +include/BasicUDPSink.hh: include/MediaSink.hh +AMRAudioFileSink.$(CPP): include/AMRAudioFileSink.hh include/AMRAudioSource.hh include/OutputFile.hh +include/AMRAudioFileSink.hh: include/FileSink.hh +H264or5VideoFileSink.$(CPP): include/H264or5VideoFileSink.hh include/H264VideoRTPSource.hh +include/H264or5VideoFileSink.hh: include/FileSink.hh +H264VideoFileSink.$(CPP): include/H264VideoFileSink.hh include/OutputFile.hh +include/H264VideoFileSink.hh: include/H264or5VideoFileSink.hh +H265VideoFileSink.$(CPP): include/H265VideoFileSink.hh include/OutputFile.hh +include/H265VideoFileSink.hh: include/H264or5VideoFileSink.hh +OggFileSink.$(CPP): include/OggFileSink.hh include/OutputFile.hh include/VorbisAudioRTPSource.hh include/MPEG2TransportStreamMultiplexor.hh include/FramedSource.hh +include/OggFileSink.hh: include/FileSink.hh +RTPSink.$(CPP): include/RTPSink.hh +include/RTPSink.hh: include/MediaSink.hh include/RTPInterface.hh +MultiFramedRTPSink.$(CPP): include/MultiFramedRTPSink.hh +include/MultiFramedRTPSink.hh: include/RTPSink.hh +AudioRTPSink.$(CPP): include/AudioRTPSink.hh +include/AudioRTPSink.hh: include/MultiFramedRTPSink.hh +VideoRTPSink.$(CPP): include/VideoRTPSink.hh +include/VideoRTPSink.hh: include/MultiFramedRTPSink.hh +TextRTPSink.$(CPP): include/TextRTPSink.hh +include/TextRTPSink.hh: include/MultiFramedRTPSink.hh +RTPInterface.$(CPP): include/RTPInterface.hh +MPEG1or2AudioRTPSink.$(CPP): include/MPEG1or2AudioRTPSink.hh +include/MPEG1or2AudioRTPSink.hh: include/AudioRTPSink.hh +MP3ADURTPSink.$(CPP): include/MP3ADURTPSink.hh +include/MP3ADURTPSink.hh: include/AudioRTPSink.hh +MPEG1or2VideoRTPSink.$(CPP): include/MPEG1or2VideoRTPSink.hh include/MPEG1or2VideoStreamFramer.hh +include/MPEG1or2VideoRTPSink.hh: include/VideoRTPSink.hh +MPEG4LATMAudioRTPSink.$(CPP): include/MPEG4LATMAudioRTPSink.hh +include/MPEG4LATMAudioRTPSink.hh: include/AudioRTPSink.hh +MPEG4GenericRTPSink.$(CPP): include/MPEG4GenericRTPSink.hh include/Locale.hh +include/MPEG4GenericRTPSink.hh: include/MultiFramedRTPSink.hh +MPEG4ESVideoRTPSink.$(CPP): include/MPEG4ESVideoRTPSink.hh include/MPEG4VideoStreamFramer.hh include/MPEG4LATMAudioRTPSource.hh +include/MPEG4ESVideoRTPSink.hh: include/VideoRTPSink.hh +H263plusVideoRTPSink.$(CPP): include/H263plusVideoRTPSink.hh +include/H263plusVideoRTPSink.hh: include/VideoRTPSink.hh +H264or5VideoRTPSink.$(CPP): include/H264or5VideoRTPSink.hh include/H264or5VideoStreamFramer.hh +include/H264or5VideoRTPSink.hh: include/VideoRTPSink.hh include/FramedFilter.hh +H264VideoRTPSink.$(CPP): include/H264VideoRTPSink.hh include/H264VideoStreamFramer.hh include/Base64.hh include/H264VideoRTPSource.hh +include/H264VideoRTPSink.hh: include/H264or5VideoRTPSink.hh +H265VideoRTPSink.$(CPP): include/H265VideoRTPSink.hh include/H265VideoStreamFramer.hh include/Base64.hh include/BitVector.hh include/H264VideoRTPSource.hh +include/H265VideoRTPSink.hh: include/H264or5VideoRTPSink.hh +DVVideoRTPSink.$(CPP): include/DVVideoRTPSink.hh +include/DVVideoRTPSink.hh: include/VideoRTPSink.hh include/DVVideoStreamFramer.hh +include/DVVideoStreamFramer.hh: include/FramedFilter.hh +AC3AudioRTPSink.$(CPP): include/AC3AudioRTPSink.hh +include/AC3AudioRTPSink.hh: include/AudioRTPSink.hh +VorbisAudioRTPSink.$(CPP): include/VorbisAudioRTPSink.hh include/Base64.hh include/VorbisAudioRTPSource.hh +include/VorbisAudioRTPSink.hh: include/AudioRTPSink.hh +TheoraVideoRTPSink.$(CPP): include/TheoraVideoRTPSink.hh include/Base64.hh include/VorbisAudioRTPSource.hh include/VorbisAudioRTPSink.hh +include/TheoraVideoRTPSink.hh: include/VideoRTPSink.hh +VP8VideoRTPSink.$(CPP): include/VP8VideoRTPSink.hh +include/VP8VideoRTPSink.hh: include/VideoRTPSink.hh +GSMAudioRTPSink.$(CPP): include/GSMAudioRTPSink.hh +include/GSMAudioRTPSink.hh: include/AudioRTPSink.hh +JPEGVideoRTPSink.$(CPP): include/JPEGVideoRTPSink.hh include/JPEGVideoSource.hh +include/JPEGVideoRTPSink.hh: include/VideoRTPSink.hh +SimpleRTPSink.$(CPP): include/SimpleRTPSink.hh +include/SimpleRTPSink.hh: include/MultiFramedRTPSink.hh +AMRAudioRTPSink.$(CPP): include/AMRAudioRTPSink.hh include/AMRAudioSource.hh +include/AMRAudioRTPSink.hh: include/AudioRTPSink.hh +T140TextRTPSink.$(CPP): include/T140TextRTPSink.hh +include/T140TextRTPSink.hh: include/TextRTPSink.hh include/FramedFilter.hh +TCPStreamSink.$(CPP): include/TCPStreamSink.hh include/RTSPCommon.hh +include/TCPStreamSink.hh: include/MediaSink.hh +OutputFile.$(CPP): include/OutputFile.hh +uLawAudioFilter.$(CPP): include/uLawAudioFilter.hh +include/uLawAudioFilter.hh: include/FramedFilter.hh +MPEG2IndexFromTransportStream.$(CPP): include/MPEG2IndexFromTransportStream.hh +include/MPEG2IndexFromTransportStream.hh: include/FramedFilter.hh +MPEG2TransportStreamIndexFile.$(CPP): include/MPEG2TransportStreamIndexFile.hh include/InputFile.hh +include/MPEG2TransportStreamIndexFile.hh: include/Media.hh +MPEG2TransportStreamTrickModeFilter.$(CPP): include/MPEG2TransportStreamTrickModeFilter.hh include/ByteStreamFileSource.hh +include/MPEG2TransportStreamTrickModeFilter.hh: include/FramedFilter.hh include/MPEG2TransportStreamIndexFile.hh +RTCP.$(CPP): include/RTCP.hh rtcp_from_spec.h +include/RTCP.hh: include/RTPSink.hh include/RTPSource.hh +rtcp_from_spec.$(C): rtcp_from_spec.h +RTSPServer.$(CPP): include/RTSPServer.hh include/RTSPCommon.hh include/RTSPRegisterSender.hh include/ProxyServerMediaSession.hh include/Base64.hh +include/RTSPServer.hh: include/ServerMediaSession.hh include/DigestAuthentication.hh include/RTSPCommon.hh +include/ServerMediaSession.hh: include/Media.hh include/FramedSource.hh include/RTPInterface.hh +RTSPClient.$(CPP): include/RTSPClient.hh include/RTSPCommon.hh include/Base64.hh include/Locale.hh include/ourMD5.hh +include/RTSPClient.hh: include/MediaSession.hh include/DigestAuthentication.hh +RTSPCommon.$(CPP): include/RTSPCommon.hh include/Locale.hh +RTSPServerSupportingHTTPStreaming.$(CPP): include/RTSPServerSupportingHTTPStreaming.hh include/RTSPCommon.hh +include/RTSPServerSupportingHTTPStreaming.hh: include/RTSPServer.hh include/ByteStreamMemoryBufferSource.hh include/TCPStreamSink.hh +RTSPRegisterSender.$(CPP): include/RTSPRegisterSender.hh +include/RTSPRegisterSender.hh: include/RTSPClient.hh +SIPClient.$(CPP): include/SIPClient.hh +include/SIPClient.hh: include/MediaSession.hh include/DigestAuthentication.hh +MediaSession.$(CPP): include/liveMedia.hh include/Locale.hh +include/MediaSession.hh: include/RTCP.hh include/FramedFilter.hh +ServerMediaSession.$(CPP): include/ServerMediaSession.hh +PassiveServerMediaSubsession.$(CPP): include/PassiveServerMediaSubsession.hh +include/PassiveServerMediaSubsession.hh: include/ServerMediaSession.hh include/RTPSink.hh include/RTCP.hh +OnDemandServerMediaSubsession.$(CPP): include/OnDemandServerMediaSubsession.hh +include/OnDemandServerMediaSubsession.hh: include/ServerMediaSession.hh include/RTPSink.hh include/BasicUDPSink.hh include/RTCP.hh +FileServerMediaSubsession.$(CPP): include/FileServerMediaSubsession.hh +include/FileServerMediaSubsession.hh: include/OnDemandServerMediaSubsession.hh +MPEG4VideoFileServerMediaSubsession.$(CPP): include/MPEG4VideoFileServerMediaSubsession.hh include/MPEG4ESVideoRTPSink.hh include/ByteStreamFileSource.hh include/MPEG4VideoStreamFramer.hh +include/MPEG4VideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +H264VideoFileServerMediaSubsession.$(CPP): include/H264VideoFileServerMediaSubsession.hh include/H264VideoRTPSink.hh include/ByteStreamFileSource.hh include/H264VideoStreamFramer.hh +include/H264VideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +H265VideoFileServerMediaSubsession.$(CPP): include/H265VideoFileServerMediaSubsession.hh include/H265VideoRTPSink.hh include/ByteStreamFileSource.hh include/H265VideoStreamFramer.hh +include/H265VideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +H263plusVideoFileServerMediaSubsession.$(CPP): include/H263plusVideoFileServerMediaSubsession.hh include/H263plusVideoRTPSink.hh include/ByteStreamFileSource.hh include/H263plusVideoStreamFramer.hh +include/H263plusVideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +WAVAudioFileServerMediaSubsession.$(CPP): include/WAVAudioFileServerMediaSubsession.hh include/WAVAudioFileSource.hh include/uLawAudioFilter.hh include/SimpleRTPSink.hh +include/WAVAudioFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +AMRAudioFileServerMediaSubsession.$(CPP): include/AMRAudioFileServerMediaSubsession.hh include/AMRAudioRTPSink.hh include/AMRAudioFileSource.hh +include/AMRAudioFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +MP3AudioFileServerMediaSubsession.$(CPP): include/MP3AudioFileServerMediaSubsession.hh include/MPEG1or2AudioRTPSink.hh include/MP3ADURTPSink.hh include/MP3FileSource.hh include/MP3ADU.hh +include/MP3AudioFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh include/MP3ADUinterleaving.hh +MPEG1or2VideoFileServerMediaSubsession.$(CPP): include/MPEG1or2VideoFileServerMediaSubsession.hh include/MPEG1or2VideoRTPSink.hh include/ByteStreamFileSource.hh include/MPEG1or2VideoStreamFramer.hh +include/MPEG1or2VideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +MPEG1or2FileServerDemux.$(CPP): include/MPEG1or2FileServerDemux.hh include/MPEG1or2DemuxedServerMediaSubsession.hh include/ByteStreamFileSource.hh +include/MPEG1or2FileServerDemux.hh: include/ServerMediaSession.hh include/MPEG1or2DemuxedElementaryStream.hh +MPEG1or2DemuxedServerMediaSubsession.$(CPP): include/MPEG1or2DemuxedServerMediaSubsession.hh include/MPEG1or2AudioStreamFramer.hh include/MPEG1or2AudioRTPSink.hh include/MPEG1or2VideoStreamFramer.hh include/MPEG1or2VideoRTPSink.hh include/AC3AudioStreamFramer.hh include/AC3AudioRTPSink.hh include/ByteStreamFileSource.hh +include/MPEG1or2DemuxedServerMediaSubsession.hh: include/OnDemandServerMediaSubsession.hh include/MPEG1or2FileServerDemux.hh +MPEG2TransportFileServerMediaSubsession.$(CPP): include/MPEG2TransportFileServerMediaSubsession.hh include/SimpleRTPSink.hh +include/MPEG2TransportFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh include/MPEG2TransportStreamFramer.hh include/ByteStreamFileSource.hh include/MPEG2TransportStreamTrickModeFilter.hh include/MPEG2TransportStreamFromESSource.hh +ADTSAudioFileServerMediaSubsession.$(CPP): include/ADTSAudioFileServerMediaSubsession.hh include/ADTSAudioFileSource.hh include/MPEG4GenericRTPSink.hh +include/ADTSAudioFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +DVVideoFileServerMediaSubsession.$(CPP): include/DVVideoFileServerMediaSubsession.hh include/DVVideoRTPSink.hh include/ByteStreamFileSource.hh include/DVVideoStreamFramer.hh +include/DVVideoFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +AC3AudioFileServerMediaSubsession.$(CPP): include/AC3AudioFileServerMediaSubsession.hh include/AC3AudioRTPSink.hh include/ByteStreamFileSource.hh include/AC3AudioStreamFramer.hh +include/AC3AudioFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh +MPEG2TransportUDPServerMediaSubsession.$(CPP): include/MPEG2TransportUDPServerMediaSubsession.hh include/BasicUDPSource.hh include/SimpleRTPSource.hh include/MPEG2TransportStreamFramer.hh include/SimpleRTPSink.hh +include/MPEG2TransportUDPServerMediaSubsession.hh: include/OnDemandServerMediaSubsession.hh +ProxyServerMediaSession.$(CPP): include/liveMedia.hh include/RTSPCommon.hh +include/ProxyServerMediaSession.hh: include/ServerMediaSession.hh include/MediaSession.hh include/RTSPClient.hh +QuickTimeFileSink.$(CPP): include/QuickTimeFileSink.hh include/InputFile.hh include/OutputFile.hh include/QuickTimeGenericRTPSource.hh include/H263plusVideoRTPSource.hh include/MPEG4GenericRTPSource.hh include/MPEG4LATMAudioRTPSource.hh +include/QuickTimeFileSink.hh: include/MediaSession.hh +QuickTimeGenericRTPSource.$(CPP): include/QuickTimeGenericRTPSource.hh +include/QuickTimeGenericRTPSource.hh: include/MultiFramedRTPSource.hh +AVIFileSink.$(CPP): include/AVIFileSink.hh include/InputFile.hh include/OutputFile.hh +include/AVIFileSink.hh: include/MediaSession.hh +MatroskaFile.$(CPP): MatroskaFileParser.hh MatroskaDemuxedTrack.hh include/ByteStreamFileSource.hh include/H264VideoStreamDiscreteFramer.hh include/H265VideoStreamDiscreteFramer.hh include/MPEG1or2AudioRTPSink.hh include/MPEG4GenericRTPSink.hh include/AC3AudioRTPSink.hh include/SimpleRTPSink.hh include/VorbisAudioRTPSink.hh include/H264VideoRTPSink.hh include/H265VideoRTPSink.hh include/VP8VideoRTPSink.hh include/T140TextRTPSink.hh +MatroskaFileParser.hh: StreamParser.hh include/MatroskaFile.hh EBMLNumber.hh +include/MatroskaFile.hh: include/RTPSink.hh +MatroskaDemuxedTrack.hh: include/FramedSource.hh +MatroskaFileParser.$(CPP): MatroskaFileParser.hh MatroskaDemuxedTrack.hh include/ByteStreamFileSource.hh +EBMLNumber.$(CPP): EBMLNumber.hh +MatroskaDemuxedTrack.$(CPP): MatroskaDemuxedTrack.hh include/MatroskaFile.hh +MatroskaFileServerMediaSubsession.$(CPP): MatroskaFileServerMediaSubsession.hh MatroskaDemuxedTrack.hh include/FramedFilter.hh +MatroskaFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh include/MatroskaFileServerDemux.hh +MP3AudioMatroskaFileServerMediaSubsession.$(CPP): MP3AudioMatroskaFileServerMediaSubsession.hh MatroskaDemuxedTrack.hh +MP3AudioMatroskaFileServerMediaSubsession.hh: include/MP3AudioFileServerMediaSubsession.hh include/MatroskaFileServerDemux.hh +MatroskaFileServerDemux.$(CPP): include/MatroskaFileServerDemux.hh MP3AudioMatroskaFileServerMediaSubsession.hh MatroskaFileServerMediaSubsession.hh +include/MatroskaFileServerDemux.hh: include/ServerMediaSession.hh include/MatroskaFile.hh +OggFile.$(CPP): OggFileParser.hh OggDemuxedTrack.hh include/ByteStreamFileSource.hh include/VorbisAudioRTPSink.hh include/SimpleRTPSink.hh include/TheoraVideoRTPSink.hh +OggFileParser.hh: StreamParser.hh include/OggFile.hh +include/OggFile.hh: include/RTPSink.hh +OggDemuxedTrack.hh: include/FramedSource.hh +OggFileParser.$(CPP): OggFileParser.hh OggDemuxedTrack.hh +OggDemuxedTrack.$(CPP): OggDemuxedTrack.hh include/OggFile.hh +OggFileServerMediaSubsession.$(CPP): OggFileServerMediaSubsession.hh OggDemuxedTrack.hh include/FramedFilter.hh +OggFileServerMediaSubsession.hh: include/FileServerMediaSubsession.hh include/OggFileServerDemux.hh +OggFileServerDemux.$(CPP): include/OggFileServerDemux.hh OggFileServerMediaSubsession.hh +include/OggFileServerDemux.hh: include/ServerMediaSession.hh include/OggFile.hh +DarwinInjector.$(CPP): include/DarwinInjector.hh +include/DarwinInjector.hh: include/RTSPClient.hh include/RTCP.hh +BitVector.$(CPP): include/BitVector.hh +StreamParser.$(CPP): StreamParser.hh +DigestAuthentication.$(CPP): include/DigestAuthentication.hh include/ourMD5.hh +ourMD5.$(CPP): include/ourMD5.hh +Base64.$(CPP): include/Base64.hh +Locale.$(CPP): include/Locale.hh + +include/liveMedia.hh:: include/MPEG1or2AudioRTPSink.hh include/MP3ADURTPSink.hh include/MPEG1or2VideoRTPSink.hh include/MPEG4ESVideoRTPSink.hh include/BasicUDPSink.hh include/AMRAudioFileSink.hh include/H264VideoFileSink.hh include/H265VideoFileSink.hh include/OggFileSink.hh include/GSMAudioRTPSink.hh include/H263plusVideoRTPSink.hh include/H264VideoRTPSink.hh include/H265VideoRTPSink.hh include/DVVideoRTPSource.hh include/DVVideoRTPSink.hh include/DVVideoStreamFramer.hh include/H264VideoStreamFramer.hh include/H265VideoStreamFramer.hh include/H264VideoStreamDiscreteFramer.hh include/H265VideoStreamDiscreteFramer.hh include/JPEGVideoRTPSink.hh include/SimpleRTPSink.hh include/uLawAudioFilter.hh include/MPEG2IndexFromTransportStream.hh include/MPEG2TransportStreamTrickModeFilter.hh include/ByteStreamMultiFileSource.hh include/ByteStreamMemoryBufferSource.hh include/BasicUDPSource.hh include/SimpleRTPSource.hh include/MPEG1or2AudioRTPSource.hh include/MPEG4LATMAudioRTPSource.hh include/MPEG4LATMAudioRTPSink.hh include/MPEG4ESVideoRTPSource.hh include/MPEG4GenericRTPSource.hh include/MP3ADURTPSource.hh include/QCELPAudioRTPSource.hh include/AMRAudioRTPSource.hh include/JPEGVideoRTPSource.hh include/JPEGVideoSource.hh include/MPEG1or2VideoRTPSource.hh include/VorbisAudioRTPSource.hh include/TheoraVideoRTPSource.hh include/VP8VideoRTPSource.hh + +include/liveMedia.hh:: include/MPEG2TransportStreamFromPESSource.hh include/MPEG2TransportStreamFromESSource.hh include/MPEG2TransportStreamFramer.hh include/ADTSAudioFileSource.hh include/H261VideoRTPSource.hh include/H263plusVideoRTPSource.hh include/H264VideoRTPSource.hh include/H265VideoRTPSource.hh include/MP3FileSource.hh include/MP3ADU.hh include/MP3ADUinterleaving.hh include/MP3Transcoder.hh include/MPEG1or2DemuxedElementaryStream.hh include/MPEG1or2AudioStreamFramer.hh include/MPEG1or2VideoStreamDiscreteFramer.hh include/MPEG4VideoStreamDiscreteFramer.hh include/H263plusVideoStreamFramer.hh include/AC3AudioStreamFramer.hh include/AC3AudioRTPSource.hh include/AC3AudioRTPSink.hh include/VorbisAudioRTPSink.hh include/TheoraVideoRTPSink.hh include/VP8VideoRTPSink.hh include/MPEG4GenericRTPSink.hh include/DeviceSource.hh include/AudioInputDevice.hh include/WAVAudioFileSource.hh include/StreamReplicator.hh include/RTSPRegisterSender.hh + +include/liveMedia.hh:: include/RTSPServerSupportingHTTPStreaming.hh include/RTSPClient.hh include/SIPClient.hh include/QuickTimeFileSink.hh include/QuickTimeGenericRTPSource.hh include/AVIFileSink.hh include/PassiveServerMediaSubsession.hh include/MPEG4VideoFileServerMediaSubsession.hh include/H264VideoFileServerMediaSubsession.hh include/H265VideoFileServerMediaSubsession.hh include/WAVAudioFileServerMediaSubsession.hh include/AMRAudioFileServerMediaSubsession.hh include/AMRAudioFileSource.hh include/AMRAudioRTPSink.hh include/T140TextRTPSink.hh include/TCPStreamSink.hh include/MP3AudioFileServerMediaSubsession.hh include/MPEG1or2VideoFileServerMediaSubsession.hh include/MPEG1or2FileServerDemux.hh include/MPEG2TransportFileServerMediaSubsession.hh include/H263plusVideoFileServerMediaSubsession.hh include/ADTSAudioFileServerMediaSubsession.hh include/DVVideoFileServerMediaSubsession.hh include/AC3AudioFileServerMediaSubsession.hh include/MPEG2TransportUDPServerMediaSubsession.hh include/MatroskaFileServerDemux.hh include/OggFileServerDemux.hh include/ProxyServerMediaSession.hh include/DarwinInjector.hh + +clean: + -rm -rf *.$(OBJ) $(ALL) core *.core *~ include/*~ + +install: install1 $(INSTALL2) +install1: $(LIVEMEDIA_LIB) + install -d $(DESTDIR)$(PREFIX)/include/liveMedia $(DESTDIR)$(LIBDIR) + install -m 644 include/*.hh $(DESTDIR)$(PREFIX)/include/liveMedia + install -m 644 $(LIVEMEDIA_LIB) $(DESTDIR)$(LIBDIR) +install_shared_libraries: $(LIVEMEDIA_LIB) + ln -s $(NAME).$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/$(NAME).$(SHORT_LIB_SUFFIX) + ln -s $(NAME).$(LIB_SUFFIX) $(DESTDIR)$(LIBDIR)/$(NAME).so + +##### Any additional, platform-specific rules come here: diff --git a/AnyCore/lib_rtsp/liveMedia/liveMedia.vcproj b/AnyCore/lib_rtsp/liveMedia/liveMedia.vcproj new file mode 100644 index 0000000..0bef9ec --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/liveMedia.vcproj @@ -0,0 +1,894 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/AnyCore/lib_rtsp/liveMedia/liveMedia.vcproj.Eric-PC.Eric.user b/AnyCore/lib_rtsp/liveMedia/liveMedia.vcproj.Eric-PC.Eric.user new file mode 100644 index 0000000..c3c730f --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/liveMedia.vcproj.Eric-PC.Eric.user @@ -0,0 +1,65 @@ + + + + + + + + + + + diff --git a/AnyCore/lib_rtsp/liveMedia/liveMedia.vcxproj b/AnyCore/lib_rtsp/liveMedia/liveMedia.vcxproj new file mode 100644 index 0000000..4c9c730 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/liveMedia.vcxproj @@ -0,0 +1,275 @@ +锘 + + + + Debug + Win32 + + + Release + Win32 + + + + {A00FF0DF-A6A2-4B4B-8B21-1ED7C32ACDA3} + liveMedia + Win32Proj + + + + StaticLibrary + v140 + Unicode + true + + + StaticLibrary + v140 + Unicode + + + + + + + + + + + + + <_ProjectFileVersion>14.0.25431.1 + + + $(SolutionDir)$(Configuration)\ + $(Configuration)\ + + + $(SolutionDir)$(Configuration)\ + $(Configuration)\ + + + + Disabled + ./include;../groupsock/include;../UsageEnvironment/include;%(AdditionalIncludeDirectories) + WIN32;_DEBUG;_LIB;%(PreprocessorDefinitions) + true + EnableFastChecks + MultiThreadedDebugDLL + + $(IntDir) + $(IntDir)vc90.pdb + $(IntDir) + Level3 + EditAndContinue + + + + + + + + MaxSpeed + true + ./include;../groupsock/include;../UsageEnvironment/include;%(AdditionalIncludeDirectories) + WIN32;NDEBUG;_LIB;%(PreprocessorDefinitions) + MultiThreadedDLL + true + + Level3 + ProgramDatabase + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/AnyCore/lib_rtsp/liveMedia/liveMedia.vcxproj.filters b/AnyCore/lib_rtsp/liveMedia/liveMedia.vcxproj.filters new file mode 100644 index 0000000..62abbf3 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/liveMedia.vcxproj.filters @@ -0,0 +1,559 @@ +锘 + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + {93995380-89BD-4b04-88EB-625FBE52EBFB} + h;hpp;hxx;hm;inl;inc;xsd + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav + + + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + 婧愭枃浠 + + + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + 澶存枃浠 + + + + + + + \ No newline at end of file diff --git a/AnyCore/lib_rtsp/liveMedia/ourMD5.cpp b/AnyCore/lib_rtsp/liveMedia/ourMD5.cpp new file mode 100644 index 0000000..15463e9 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/ourMD5.cpp @@ -0,0 +1,325 @@ +/********** +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the +Free Software Foundation; either version 2.1 of the License, or (at your +option) any later version. (See .) + +This library is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for +more details. + +You should have received a copy of the GNU Lesser General Public License +along with this library; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +**********/ +// "liveMedia" +// Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. +// Because MD5 may not be implemented (at least, with the same interface) on all systems, +// we have our own implementation. +// Implementation + +#include "ourMD5.hh" +#include // for u_int32_t, u_int64_t +#include + +#define DIGEST_SIZE_IN_BYTES 16 +#define DIGEST_SIZE_IN_HEX_DIGITS (2*DIGEST_SIZE_IN_BYTES) +#define DIGEST_SIZE_AS_STRING (DIGEST_SIZE_IN_HEX_DIGITS+1) + +// The state of a MD5 computation in progress: + +class MD5Context { +public: + MD5Context(); + ~MD5Context(); + + void addData(unsigned char const* inputData, unsigned inputDataSize); + void end(char* outputDigest /*must point to an array of size DIGEST_SIZE_AS_STRING*/); + void finalize(unsigned char* outputDigestInBytes); + // Like "end()", except that the argument is a byte array, of size DIGEST_SIZE_IN_BYTES. + // This function is used to implement "end()". + +private: + void zeroize(); // to remove potentially sensitive information + void transform64Bytes(unsigned char const block[64]); // does the actual MD5 transform + +private: + u_int32_t fState[4]; // ABCD + u_int64_t fBitCount; // number of bits, modulo 2^64 + unsigned char fWorkingBuffer[64]; +}; + +char* our_MD5Data(unsigned char const* data, unsigned dataSize, char* outputDigest) { + MD5Context ctx; + + ctx.addData(data, dataSize); + + if (outputDigest == NULL) outputDigest = new char[DIGEST_SIZE_AS_STRING]; + ctx.end(outputDigest); + + return outputDigest; +} + +unsigned char* our_MD5DataRaw(unsigned char const* data, unsigned dataSize, + unsigned char* outputDigest) { + MD5Context ctx; + + ctx.addData(data, dataSize); + + if (outputDigest == NULL) outputDigest = new unsigned char[DIGEST_SIZE_IN_BYTES]; + ctx.finalize(outputDigest); + + return outputDigest; +} + + +////////// MD5Context implementation ////////// + +MD5Context::MD5Context() + : fBitCount(0) { + // Initialize with magic constants: + fState[0] = 0x67452301; + fState[1] = 0xefcdab89; + fState[2] = 0x98badcfe; + fState[3] = 0x10325476; +} + +MD5Context::~MD5Context() { + zeroize(); +} + +void MD5Context::addData(unsigned char const* inputData, unsigned inputDataSize) { + // Begin by noting how much of our 64-byte working buffer remains unfilled: + u_int64_t const byteCount = fBitCount>>3; + unsigned bufferBytesInUse = (unsigned)(byteCount&0x3F); + unsigned bufferBytesRemaining = 64 - bufferBytesInUse; + + // Then update our bit count: + fBitCount += inputDataSize<<3; + + unsigned i = 0; + if (inputDataSize >= bufferBytesRemaining) { + // We have enough input data to do (64-byte) MD5 transforms. + // Do this now, starting with a transform on our working buffer, then with + // (as many as possible) transforms on rest of the input data. + + memcpy((unsigned char*)&fWorkingBuffer[bufferBytesInUse], (unsigned char*)inputData, bufferBytesRemaining); + transform64Bytes(fWorkingBuffer); + bufferBytesInUse = 0; + + for (i = bufferBytesRemaining; i + 63 < inputDataSize; i += 64) { + transform64Bytes(&inputData[i]); + } + } + + // Copy any remaining (and currently un-transformed) input data into our working buffer: + if (i < inputDataSize) { + memcpy((unsigned char*)&fWorkingBuffer[bufferBytesInUse], (unsigned char*)&inputData[i], inputDataSize - i); + } +} + +void MD5Context::end(char* outputDigest) { + unsigned char digestInBytes[DIGEST_SIZE_IN_BYTES]; + finalize(digestInBytes); + + // Convert the digest from bytes (binary) to hex digits: + static char const hex[]="0123456789abcdef"; + unsigned i; + for (i = 0; i < DIGEST_SIZE_IN_BYTES; ++i) { + outputDigest[2*i] = hex[digestInBytes[i] >> 4]; + outputDigest[2*i+1] = hex[digestInBytes[i] & 0x0F]; + } + outputDigest[2*i] = '\0'; +} + +// Routines that unpack 32 and 64-bit values into arrays of bytes (in little-endian order). +// (These are used to implement "finalize()".) + +static void unpack32(unsigned char out[4], u_int32_t in) { + for (unsigned i = 0; i < 4; ++i) { + out[i] = (unsigned char)((in>>(8*i))&0xFF); + } +} + +static void unpack64(unsigned char out[8], u_int64_t in) { + for (unsigned i = 0; i < 8; ++i) { + out[i] = (unsigned char)((in>>(8*i))&0xFF); + } +} + +static unsigned char const PADDING[64] = { + 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +}; + +void MD5Context::finalize(unsigned char* outputDigestInBytes) { + // Unpack our bit count: + unsigned char bitCountInBytes[8]; + unpack64(bitCountInBytes, fBitCount); + + // Before 'finalizing', make sure that we transform any remaining bytes in our working buffer: + u_int64_t const byteCount = fBitCount>>3; + unsigned bufferBytesInUse = (unsigned)(byteCount&0x3F); + unsigned numPaddingBytes + = (bufferBytesInUse < 56) ? (56 - bufferBytesInUse) : (64 + 56 - bufferBytesInUse); + addData(PADDING, numPaddingBytes); + + addData(bitCountInBytes, 8); + + // Unpack our 'state' into the output digest: + unpack32(&outputDigestInBytes[0], fState[0]); + unpack32(&outputDigestInBytes[4], fState[1]); + unpack32(&outputDigestInBytes[8], fState[2]); + unpack32(&outputDigestInBytes[12], fState[3]); + + zeroize(); +} + +void MD5Context::zeroize() { + fState[0] = fState[1] = fState[2] = fState[3] = 0; + fBitCount = 0; + for (unsigned i = 0; i < 64; ++i) fWorkingBuffer[i] = 0; +} + + +////////// Implementation of the MD5 transform ("MD5Context::transform64Bytes()") ////////// + +// Constants for the transform: +#define S11 7 +#define S12 12 +#define S13 17 +#define S14 22 +#define S21 5 +#define S22 9 +#define S23 14 +#define S24 20 +#define S31 4 +#define S32 11 +#define S33 16 +#define S34 23 +#define S41 6 +#define S42 10 +#define S43 15 +#define S44 21 + +// Basic MD5 functions: +#define F(x, y, z) (((x) & (y)) | ((~x) & (z))) +#define G(x, y, z) (((x) & (z)) | ((y) & (~z))) +#define H(x, y, z) ((x) ^ (y) ^ (z)) +#define I(x, y, z) ((y) ^ ((x) | (~z))) + +// Rotate "x" left "n" bits: +#define ROTATE_LEFT(x, n) (((x) << (n)) | ((x) >> (32-(n)))) + +// Other transforms: +#define FF(a, b, c, d, x, s, ac) { \ + (a) += F((b), (c), (d)) + (x) + (u_int32_t)(ac); \ + (a) = ROTATE_LEFT((a), (s)); \ + (a) += (b); \ +} +#define GG(a, b, c, d, x, s, ac) { \ + (a) += G((b), (c), (d)) + (x) + (u_int32_t)(ac); \ + (a) = ROTATE_LEFT((a), (s)); \ + (a) += (b); \ +} +#define HH(a, b, c, d, x, s, ac) { \ + (a) += H((b), (c), (d)) + (x) + (u_int32_t)(ac); \ + (a) = ROTATE_LEFT((a), (s)); \ + (a) += (b); \ +} +#define II(a, b, c, d, x, s, ac) { \ + (a) += I((b), (c), (d)) + (x) + (u_int32_t)(ac); \ + (a) = ROTATE_LEFT((a), (s)); \ + (a) += (b); \ +} + +void MD5Context::transform64Bytes(unsigned char const block[64]) { + u_int32_t a = fState[0], b = fState[1], c = fState[2], d = fState[3]; + + // Begin by packing "block" into an array ("x") of 16 32-bit values (in little-endian order): + u_int32_t x[16]; + for (unsigned i = 0, j = 0; i < 16; ++i, j += 4) { + x[i] = ((u_int32_t)block[j]) | (((u_int32_t)block[j+1]) << 8) | (((u_int32_t)block[j+2]) << 16) | (((u_int32_t)block[j+3]) << 24); + } + + // Now, perform the transform on the array "x": + + // Round 1 + FF(a, b, c, d, x[0], S11, 0xd76aa478); // 1 + FF(d, a, b, c, x[1], S12, 0xe8c7b756); // 2 + FF(c, d, a, b, x[2], S13, 0x242070db); // 3 + FF(b, c, d, a, x[3], S14, 0xc1bdceee); // 4 + FF(a, b, c, d, x[4], S11, 0xf57c0faf); // 5 + FF(d, a, b, c, x[5], S12, 0x4787c62a); // 6 + FF(c, d, a, b, x[6], S13, 0xa8304613); // 7 + FF(b, c, d, a, x[7], S14, 0xfd469501); // 8 + FF(a, b, c, d, x[8], S11, 0x698098d8); // 9 + FF(d, a, b, c, x[9], S12, 0x8b44f7af); // 10 + FF(c, d, a, b, x[10], S13, 0xffff5bb1); // 11 + FF(b, c, d, a, x[11], S14, 0x895cd7be); // 12 + FF(a, b, c, d, x[12], S11, 0x6b901122); // 13 + FF(d, a, b, c, x[13], S12, 0xfd987193); // 14 + FF(c, d, a, b, x[14], S13, 0xa679438e); // 15 + FF(b, c, d, a, x[15], S14, 0x49b40821); // 16 + + // Round 2 + GG(a, b, c, d, x[1], S21, 0xf61e2562); // 17 + GG(d, a, b, c, x[6], S22, 0xc040b340); // 18 + GG(c, d, a, b, x[11], S23, 0x265e5a51); // 19 + GG(b, c, d, a, x[0], S24, 0xe9b6c7aa); // 20 + GG(a, b, c, d, x[5], S21, 0xd62f105d); // 21 + GG(d, a, b, c, x[10], S22, 0x2441453); // 22 + GG(c, d, a, b, x[15], S23, 0xd8a1e681); // 23 + GG(b, c, d, a, x[4], S24, 0xe7d3fbc8); // 24 + GG(a, b, c, d, x[9], S21, 0x21e1cde6); // 25 + GG(d, a, b, c, x[14], S22, 0xc33707d6); // 26 + GG(c, d, a, b, x[3], S23, 0xf4d50d87); // 27 + GG(b, c, d, a, x[8], S24, 0x455a14ed); // 28 + GG(a, b, c, d, x[13], S21, 0xa9e3e905); // 29 + GG(d, a, b, c, x[2], S22, 0xfcefa3f8); // 30 + GG(c, d, a, b, x[7], S23, 0x676f02d9); // 31 + GG(b, c, d, a, x[12], S24, 0x8d2a4c8a); // 32 + + // Round 3 + HH(a, b, c, d, x[5], S31, 0xfffa3942); // 33 + HH(d, a, b, c, x[8], S32, 0x8771f681); // 34 + HH(c, d, a, b, x[11], S33, 0x6d9d6122); // 35 + HH(b, c, d, a, x[14], S34, 0xfde5380c); // 36 + HH(a, b, c, d, x[1], S31, 0xa4beea44); // 37 + HH(d, a, b, c, x[4], S32, 0x4bdecfa9); // 38 + HH(c, d, a, b, x[7], S33, 0xf6bb4b60); // 39 + HH(b, c, d, a, x[10], S34, 0xbebfbc70); // 40 + HH(a, b, c, d, x[13], S31, 0x289b7ec6); // 41 + HH(d, a, b, c, x[0], S32, 0xeaa127fa); // 42 + HH(c, d, a, b, x[3], S33, 0xd4ef3085); // 43 + HH(b, c, d, a, x[6], S34, 0x4881d05); // 44 + HH(a, b, c, d, x[9], S31, 0xd9d4d039); // 45 + HH(d, a, b, c, x[12], S32, 0xe6db99e5); // 46 + HH(c, d, a, b, x[15], S33, 0x1fa27cf8); // 47 + HH(b, c, d, a, x[2], S34, 0xc4ac5665); // 48 + + // Round 4 + II(a, b, c, d, x[0], S41, 0xf4292244); // 49 + II(d, a, b, c, x[7], S42, 0x432aff97); // 50 + II(c, d, a, b, x[14], S43, 0xab9423a7); // 51 + II(b, c, d, a, x[5], S44, 0xfc93a039); // 52 + II(a, b, c, d, x[12], S41, 0x655b59c3); // 53 + II(d, a, b, c, x[3], S42, 0x8f0ccc92); // 54 + II(c, d, a, b, x[10], S43, 0xffeff47d); // 55 + II(b, c, d, a, x[1], S44, 0x85845dd1); // 56 + II(a, b, c, d, x[8], S41, 0x6fa87e4f); // 57 + II(d, a, b, c, x[15], S42, 0xfe2ce6e0); // 58 + II(c, d, a, b, x[6], S43, 0xa3014314); // 59 + II(b, c, d, a, x[13], S44, 0x4e0811a1); // 60 + II(a, b, c, d, x[4], S41, 0xf7537e82); // 61 + II(d, a, b, c, x[11], S42, 0xbd3af235); // 62 + II(c, d, a, b, x[2], S43, 0x2ad7d2bb); // 63 + II(b, c, d, a, x[9], S44, 0xeb86d391); // 64 + + fState[0] += a; fState[1] += b; fState[2] += c; fState[3] += d; + + // Zeroize sensitive information. + for (unsigned k = 0; k < 16; ++k) x[k] = 0; +} diff --git a/AnyCore/lib_rtsp/liveMedia/rtcp_from_spec.c b/AnyCore/lib_rtsp/liveMedia/rtcp_from_spec.c new file mode 100644 index 0000000..a828ec7 --- /dev/null +++ b/AnyCore/lib_rtsp/liveMedia/rtcp_from_spec.c @@ -0,0 +1,289 @@ +/* RTCP code taken directly from the most recent RTP specification: + * RFC 3550 + * Implementation + */ + +#include "rtcp_from_spec.h" + +/***** + +A.7 Computing the RTCP Transmission Interval + + The following functions implement the RTCP transmission and reception + rules described in Section 6.2. These rules are coded in several + functions: + + o rtcp_interval() computes the deterministic calculated + interval, measured in seconds. The parameters are defined in + Section 6.3. + + o OnExpire() is called when the RTCP transmission timer expires. + + o OnReceive() is called whenever an RTCP packet is received. + + Both OnExpire() and OnReceive() have event e as an argument. This is + the next scheduled event for that participant, either an RTCP report + or a BYE packet. It is assumed that the following functions are + available: + + o Schedule(time t, event e) schedules an event e to occur at + time t. When time t arrives, the function OnExpire is called + with e as an argument. + + o Reschedule(time t, event e) reschedules a previously scheduled + event e for time t. + + o SendRTCPReport(event e) sends an RTCP report. + + o SendBYEPacket(event e) sends a BYE packet. + + o TypeOfEvent(event e) returns EVENT_BYE if the event being + processed is for a BYE packet to be sent, else it returns + EVENT_REPORT. + + o PacketType(p) returns PACKET_RTCP_REPORT if packet p is an + RTCP report (not BYE), PACKET_BYE if its a BYE RTCP packet, + and PACKET_RTP if its a regular RTP data packet. + + o ReceivedPacketSize() and SentPacketSize() return the size of + the referenced packet in octets. + + o NewMember(p) returns a 1 if the participant who sent packet p + is not currently in the member list, 0 otherwise. Note this + function is not sufficient for a complete implementation + because each CSRC identifier in an RTP packet and each SSRC in + a BYE packet should be processed. + + o NewSender(p) returns a 1 if the participant who sent packet p + is not currently in the sender sublist of the member list, 0 + otherwise. + + o AddMember() and RemoveMember() to add and remove participants + from the member list. + + o AddSender() and RemoveSender() to add and remove participants + from the sender sublist of the member list. +*****/ + + + double rtcp_interval(int members, + int senders, + double rtcp_bw, + int we_sent, + double avg_rtcp_size, + int initial) + { + /* + * Minimum average time between RTCP packets from this site (in + * seconds). This time prevents the reports from `clumping' when + * sessions are small and the law of large numbers isn't helping + * to smooth out the traffic. It also keeps the report interval + * from becoming ridiculously small during transient outages like + * a network partition. + */ + double const RTCP_MIN_TIME = 5.; + /* + * Fraction of the RTCP bandwidth to be shared among active + * senders. (This fraction was chosen so that in a typical + * session with one or two active senders, the computed report + * time would be roughly equal to the minimum report time so that + * we don't unnecessarily slow down receiver reports.) The + * receiver fraction must be 1 - the sender fraction. + */ + double const RTCP_SENDER_BW_FRACTION = 0.25; + double const RTCP_RCVR_BW_FRACTION = (1-RTCP_SENDER_BW_FRACTION); + /* + * To compensate for "unconditional reconsideration" converging to a + * value below the intended average. + */ + double const COMPENSATION = 2.71828 - 1.5; + + double t; /* interval */ + double rtcp_min_time = RTCP_MIN_TIME; + int n; /* no. of members for computation */ + + /* + * Very first call at application start-up uses half the min + * delay for quicker notification while still allowing some time + * before reporting for randomization and to learn about other + * sources so the report interval will converge to the correct + * interval more quickly. + */ + if (initial) { + rtcp_min_time /= 2; + } + + /* + * If there were active senders, give them at least a minimum + * share of the RTCP bandwidth. Otherwise all participants share + * the RTCP bandwidth equally. + */ + n = members; + if (senders > 0 && senders < members * RTCP_SENDER_BW_FRACTION) { + if (we_sent) { + rtcp_bw *= RTCP_SENDER_BW_FRACTION; + n = senders; + } else { + rtcp_bw *= RTCP_RCVR_BW_FRACTION; + n -= senders; + } + } + + /* + * The effective number of sites times the average packet size is + * the total number of octets sent when each site sends a report. + * Dividing this by the effective bandwidth gives the time + * interval over which those packets must be sent in order to + * meet the bandwidth target, with a minimum enforced. In that + * time interval we send one report so this time is also our + * average time between reports. + */ + t = avg_rtcp_size * n / rtcp_bw; + if (t < rtcp_min_time) t = rtcp_min_time; + + /* + * To avoid traffic bursts from unintended synchronization with + * other sites, we then pick our actual next report interval as a + * random number uniformly distributed between 0.5*t and 1.5*t. + */ + t = t * (drand48() + 0.5); + t = t / COMPENSATION; + return t; + } + + void OnExpire(event e, + int members, + int senders, + double rtcp_bw, + int we_sent, + double *avg_rtcp_size, + int *initial, + time_tp tc, + time_tp *tp, + int *pmembers) + { + /* This function is responsible for deciding whether to send + * an RTCP report or BYE packet now, or to reschedule transmission. + * It is also responsible for updating the pmembers, initial, tp, + * and avg_rtcp_size state variables. This function should be called + * upon expiration of the event timer used by Schedule(). */ + + double t; /* Interval */ + double tn; /* Next transmit time */ + + /* In the case of a BYE, we use "unconditional reconsideration" to + * reschedule the transmission of the BYE if necessary */ + + if (TypeOfEvent(e) == EVENT_BYE) { + t = rtcp_interval(members, + senders, + rtcp_bw, + we_sent, + *avg_rtcp_size, + *initial); + tn = *tp + t; + if (tn <= tc) { + SendBYEPacket(e); + exit(1); + } else { + Schedule(tn, e); + } + + } else if (TypeOfEvent(e) == EVENT_REPORT) { + t = rtcp_interval(members, + senders, + rtcp_bw, + we_sent, + *avg_rtcp_size, + *initial); + tn = *tp + t; + + if (tn <= tc) { + SendRTCPReport(e); + *avg_rtcp_size = (1./16.)*SentPacketSize(e) + + (15./16.)*(*avg_rtcp_size); + *tp = tc; + + /* We must redraw the interval. Don't reuse the + one computed above, since its not actually + distributed the same, as we are conditioned + on it being small enough to cause a packet to + be sent */ + + t = rtcp_interval(members, + senders, + rtcp_bw, + we_sent, + *avg_rtcp_size, + *initial); + + Schedule(t+tc,e); + *initial = 0; + } else { + Schedule(tn, e); + } + *pmembers = members; + } + } + + + void OnReceive(packet p, + event e, + int *members, + int *pmembers, + int *senders, + double *avg_rtcp_size, + double *tp, + double tc, + double tn) + { + /* What we do depends on whether we have left the group, and + * are waiting to send a BYE (TypeOfEvent(e) == EVENT_BYE) or + * an RTCP report. p represents the packet that was just received. */ + + if (PacketType(p) == PACKET_RTCP_REPORT) { + if (NewMember(p) && (TypeOfEvent(e) == EVENT_REPORT)) { + AddMember(p); + *members += 1; + } + *avg_rtcp_size = (1./16.)*ReceivedPacketSize(p) + + (15./16.)*(*avg_rtcp_size); + } else if (PacketType(p) == PACKET_RTP) { + if (NewMember(p) && (TypeOfEvent(e) == EVENT_REPORT)) { + AddMember(p); + *members += 1; + } + if (NewSender(p) && (TypeOfEvent(e) == EVENT_REPORT)) { + AddSender(p); + *senders += 1; + } + } else if (PacketType(p) == PACKET_BYE) { + *avg_rtcp_size = (1./16.)*ReceivedPacketSize(p) + + (15./16.)*(*avg_rtcp_size); + + if (TypeOfEvent(e) == EVENT_REPORT) { + if (NewSender(p) == FALSE) { + RemoveSender(p); + *senders -= 1; + } + + if (NewMember(p) == FALSE) { + RemoveMember(p); + *members -= 1; + } + + if(*members < *pmembers) { + tn = tc + (((double) *members)/(*pmembers))*(tn - tc); + *tp = tc - (((double) *members)/(*pmembers))*(tc - *tp); + + /* Reschedule the next report for time tn */ + + Reschedule(tn, e); + *pmembers = *members; + } + + } else if (TypeOfEvent(e) == EVENT_BYE) { + *members += 1; + } + } + } diff --git a/README.md b/README.md index 58fe223..d1803d1 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,7 @@ **8锛屽疄鐢ㄤ富涔夛紝閭d簺浠涔堝潙浠涔堜紭鍖栫瓑姒傚康璇锋悳绱㈢浉鍏虫枃绔**
**9锛孫penH264杞欢缂栫爜锛孎FMpeg杞欢瑙g爜锛孎AAC/FAAD杞欢缂栬В鐮侊紝閫傞厤涓嶅悓绯荤粺鐨勭‖浠剁紪瑙g爜缁熺粺鍖呭惈**
**10锛屾敮鎸丼RS銆丯ginx-RTMP绛夋爣鍑哛TMP鏈嶅姟锛涘悓鏃舵敮鎸佸悇澶DN鍘傚晢鐨勬帴鍏**
+**11锛屾洿澶氬崗璁敮鎸; ???璁″垝鏀寔rtsp鎾斁???**
## 椤圭洰灞曠ず ![Chat](http://git.oschina.net/dynctm/AnyRTC-RTMP/raw/master/Pictures/IMG_0779.png) ![Chat](http://git.oschina.net/dynctm/AnyRTC-RTMP/raw/master/Pictures/IMG_0777.png) ![Chat](http://git.oschina.net/dynctm/AnyRTC-RTMP/raw/master/Pictures/IMG_0778.png) @@ -21,42 +22,43 @@ # 涓轰粈涔堝紑婧愶紵 鏈叕鍙告娆″紑婧愮Щ鍔ㄧ洿鎾В鍐虫柟妗堢殑鏍规湰鐩殑锛氬洖棣堝紑婧愮ぞ鍖猴紝鐗瑰埆鏄疭RS鍜學ebRTC椤圭洰锛屽ぇ瀹朵篃鍙互鐪嬪埌鏈寮婧愰」鐩殑妗嗘灦浣跨敤浜哤ebRTC锛孯TMP鍗忚閮ㄥ垎浣跨敤鐨勬槸srs_librtmp锛涜繖涓や釜寮婧愰」鐩彲浠ヨ鍦ㄦ祦濯掍綋棰嗗煙缁欎簣浜嗗ぇ瀹跺お澶氾紝鎵浠ラ傚綋鐨勫洖棣堟槸鐞嗘墍搴斿綋銆 -##缂栬瘧鐜 +##缂栬瘧鐜
Android Studio銆丯DK(鏀寔Windows銆丩inux銆丮acOS绛堿ndroid寮鍙戠幆澧)
XCode
VS2015
-##鏀寔鐨勭郴缁熷钩鍙 +##鏀寔鐨勭郴缁熷钩鍙
Android 4.0鍙婁互涓
iOS 8.0鍙婁互涓
Windows 7鍙婁互涓
-##鏀寔鐨凜PU鏋舵瀯 +##鏀寔鐨凜PU鏋舵瀯
Android armv7 arm64
iOS armv7 armv7s arm64
Windows win32銆亁64
-##绗笁鏂瑰簱鐗堟湰 +##绗笁鏂瑰簱鐗堟湰
libfaac 1.28
libfaad2 2.7
ffmpeg 3.0
libyuv newest
openh264 1.6.0
-##鎶鏈氦娴 -鐩存挱鎶鏈疩Q缇わ細554714720
+##鎶鏈氦娴
+鐩存挱鎶鏈疩Q缇わ細554714720(鍗冲皢婊″憳)
杩為害鎶鏈疩Q缇わ細580477436(鍗冲皢婊″憳)
-##鎺堟潈澹版槑 +##鎺堟潈澹版槑
鏈寮婧愬湪鏈巿鏉冩儏鍐典笅涓嶅彲搴旂敤浜庝换浣曠殑闂簮鍟嗕笟椤圭洰锛屽叿浣撹鍙傜収GNU License涓殑澹版槑銆
鍜ㄨQQ:2628840833
鑱旂郴鐢佃瘽:021-65650071
-##鐗堟潈澹版槑 +##鐗堟潈澹版槑
鑻ユ湰寮婧愰」鐩秹鍙婂埌鍏朵粬杞欢鐨勭増鏉冿紝璇峰強鏃惰仈绯讳綔鑰呰繘琛屼慨姝c
-##鎹愯禒 +##鎹愯禒
鏈」鐩笉鎺ュ彈浠讳綍褰㈠紡鐨勬崘璧狅紝鎮ㄧ殑鏀寔灏辨槸鏈澶х殑鍔ㄥ姏銆
## License AnyRTC-RTMP is available under the GNU license. See the LICENSE file for more info. + -- Gitee