2024年1月18日发(作者:)
RTSP的主要命令表:RTSP状态码:Status-Code =| "100" ; Continue| "200" ; OK| "201" ; Created| "250" ; Low on Storage Space| "300" ; Multiple Choices| "301" ; Moved Permanently| "302" ; Moved Temporarily| "303" ; See Other| "304" ; Not Modified| "305" ; Use Proxy| "400" ; Bad Request| "401" ; Unauthorized| "402" ; Payment Required| "403" ; Forbidden| "404" ; Not Found| "405" ; Method Not Allowed| "406" ; Not Acceptable| "407" ; Proxy Authentication Required| "408" ; Request Time-out| "410" ; Gone| "411" ; Length Required| "412" ; Precondition Failed| "413" ; Request Entity Too Large| "414" ; Request-URI Too Large| "415" ; Unsupported Media Type| "451" ; Parameter Not Understood| "452" ; Conference Not Found| "453" ; Not Enough Bandwidth| "454" ; Session Not Found| "455" ; Method Not Valid in This State| "456" ; Header Field Not Valid for Resource| "457" ; Invalid Range| "458" ; Parameter Is Read-Only| "459" ; Aggregate operation not allowed| "460" ; Only aggregate operation allowed| "461" ; Unsupported transport| "462" ; Destination unreachable| "500" ; Internal Server Error| "501" ; Not Implemented| "502" ; Bad Gateway| "503" ; Service Unavailable| "504" ; Gateway Time-out| "505" ; RTSP Version not supported| "551" ; Option not supportedSDP的格式:
ARTPAssembler::AssemblyStatus AMPEG4AudioAssembler::addPacket( const sp
void AMPEG4AudioAssembler::submitAccessUnit() { CHECK(!());#if VERBOSE (VERBOSE) << "Access unit complete (" << () << " packets)";#endif sp
MyHandler::kWhatAccessUnit:{ size_t trackIndex; CHECK(msg->findSize("trackIndex", &trackIndex)); (mTSParser == ) { CHECK_LT(trackIndex, ()); } { CHECK_EQ(trackIndex, 0u); } sp
void NuPlayer::onStart(int64_t startPositionUs) { (!mSourceStarted) { mSourceStarted = true; mSource->start(); } mOffloadAudio = false; mAudioEOS = false; mVideoEOS = false; mStarted = true; uint32_t flags = 0; sp
ACodec::UninitializedState::onAllocateComponent( sp
Vector
解码器的配置status_t MediaCodec::configure( sp
mInputMetadataType = kMetadataBufferTypeInvalid; mOutputMetadataType = kMetadataBufferTypeInvalid; status_t err = setComponentRole(encoder /* isEncoder */, mime); (err != OK) { err; } int32_t bitRate = 0; // FLAC encoder doesn't need a bitrate, other encoders do (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) && !msg->findInt32("bitrate", &bitRate)) { INVALID_OPERATION; } int32_t storeMeta; (encoder && msg->findInt32("store-metadata-in-buffers", &storeMeta) && storeMeta != 0) { err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); (err != OK) { ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", mComponentName.c_str(), err); err; } // For this specific case we could be using camera source even if storeMetaDataInBuffers // returns Gralloc source. Pretend that we are; this will force us to use nBufferSize. (mInputMetadataType == kMetadataBufferTypeGrallocSource) { mInputMetadataType = kMetadataBufferTypeCameraSource; } uint32_t usageBits; (mOMX->getParameter( mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, &usageBits, (usageBits)) == OK) { inputFormat->setInt32( "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); } } int32_t prependSPSPPS = 0; (encoder && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) && prependSPSPPS != 0) { OMX_INDEXTYPE index; err = mOMX->getExtensionIndex( mNode, "dSPSPPSToIDRFrames", &index); (err == OK) { PrependSPSPPSToIDRFramesParams params; InitOMXParams(¶ms); e = OMX_TRUE; err = mOMX->setParameter( mNode, index, ¶ms, (params)); } (err != OK) { ALOGE("Encoder could not be configured to emit SPS/PPS before " "IDR frames. (err %d)", err); err; } } // Only enable metadata mode on encoder output if encoder can prepend // sps/pps to idr frames, since in metadata mode the bitstream is in an // opaque handle, to which we don't have access. int32_t video = !strncasecmp(mime, "video/", 6); mIsVideo = video; (encoder && video) { OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS && msg->findInt32("store-metadata-in-buffers-output", &storeMeta) && storeMeta != 0);
err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); (err != OK) { ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", mComponentName.c_str(), err); } (!msg->findInt64( "repeat-previous-frame-after", &mRepeatFrameDelayUs)) { mRepeatFrameDelayUs = -1ll; } (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { mMaxPtsGapUs = -1ll; } (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { mMaxFps = -1; } (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { mTimePerCaptureUs = -1ll; } (!msg->findInt32( "create-input-buffers-suspended", (int32_t*)&mCreateInputBuffersSuspended)) { mCreateInputBuffersSuspended = ; } } // NOTE: we only use native window for video decoders sp
int32_t audioHwSync = 0; (!msg->findInt32("audio-hw-sync", &audioHwSync)) { ALOGW("No Audio HW Sync provided for video tunnel"); } err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); (err != OK) { ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", audioHwSync, ()); err; } int32_t maxWidth = 0, maxHeight = 0; (msg->findInt32("max-width", &maxWidth) && msg->findInt32("max-height", &maxHeight)) { err = mOMX->prepareForAdaptivePlayback( mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); (err != OK) { ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", mComponentName.c_str(), err); // allow failure err = OK; } { inputFormat->setInt32("max-width", maxWidth); inputFormat->setInt32("max-height", maxHeight); inputFormat->setInt32("adaptive-playback", ); } } } { ALOGV("Configuring CPU controlled video playback."); mTunneled = ; // Explicity reset the sideband handle of the window for // non-tunneled video in case the window was previously used // for a tunneled video playback. err = native_window_set_sideband_stream((), NULL); (err != OK) { ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); err; } // Always try to enable dynamic output buffers on native surface err = mOMX->storeMetaDataInBuffers( mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); (err != OK) { ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", mComponentName.c_str(), err); // if adaptive playback has been requested, try JB fallback // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS // LARGE MEMORY REQUIREMENT // we will not do adaptive playback on software accessed // surfaces as they never had to respond to changes in the // crop window, and we don't trust that they will be able to. usageBits = 0; canDoAdaptivePlayback; (nativeWindow->query( (), NATIVE_WINDOW_CONSUMER_USAGE_BITS, &usageBits) != OK) { canDoAdaptivePlayback = ; } { canDoAdaptivePlayback = (usageBits & (GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK)) == 0; } int32_t maxWidth = 0, maxHeight = 0; (canDoAdaptivePlayback && msg->findInt32("max-width", &maxWidth) && msg->findInt32("max-height", &maxHeight)) { ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", mComponentName.c_str(), maxWidth, maxHeight); err = mOMX->prepareForAdaptivePlayback( mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight);
maxHeight); ALOGW_IF(err != OK, "[%s] prepareForAdaptivePlayback failed w/ err %d", mComponentName.c_str(), err); (err == OK) { inputFormat->setInt32("max-width", maxWidth); inputFormat->setInt32("max-height", maxHeight); inputFormat->setInt32("adaptive-playback", ); } } // allow failure err = OK; } { ALOGV("[%s] storeMetaDataInBuffers succeeded", mComponentName.c_str()); CHECK(storingMetadataInDecodedBuffers()); mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( "legacy-adaptive", !msg->contains("no-experiments")); inputFormat->setInt32("adaptive-playback", ); } int32_t push; (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) { mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; } } int32_t rotationDegrees; (msg->findInt32("rotation-degrees", &rotationDegrees)) { mRotationDegrees = rotationDegrees; } { mRotationDegrees = 0; } } (video) { // determine need for software renderer usingSwRenderer = ; (haveNativeWindow && With(".")) { usingSwRenderer = ; haveNativeWindow = ; } (encoder) { err = setupVideoEncoder(mime, msg); } { err = setupVideoDecoder(mime, msg, haveNativeWindow); } (err != OK) { err; } (haveNativeWindow) { mNativeWindow = static_cast
(!outputFormat->findInt32("color-format", &colorFormat)) { ALOGE("ouptut port did not have a color format (wrong domain?)"); BAD_VALUE; } ALOGD("[%s] Requested output format %#x and got %#x.", mComponentName.c_str(), requestedColorFormat, colorFormat); (!isFlexibleColorFormat( mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) || flexibleEquivalent != (OMX_U32)requestedColorFormat) { // device did not handle flex-YUV request for native window, fall back // to SW renderer ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); (); mNativeWindowUsageBits = 0; haveNativeWindow = ; usingSwRenderer = ; (storingMetadataInDecodedBuffers()) { err = mOMX->storeMetaDataInBuffers( mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case // TODO: implement adaptive-playback support for bytebuffer mode. // This is done by SW codecs, but most HW codecs don't support it. inputFormat->setInt32("adaptive-playback", ); } (err == OK) { err = mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_FALSE); } (mFlags & kFlagIsGrallocUsageProtected) { // fallback is not supported for protected playback err = PERMISSION_DENIED; } (err == OK) { err = setupVideoDecoder(mime, msg, ); } } } } (usingSwRenderer) { outputFormat->setInt32("using-sw-renderer", 1); } } (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { int32_t numChannels, sampleRate; (!msg->findInt32("channel-count", &numChannels) || !msg->findInt32("sample-rate", &sampleRate)) { // Since we did not always check for these, leave them optional // and have the decoder figure it all out. err = OK; } { err = setupRawAudioFormat( encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); } } (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { int32_t numChannels, sampleRate; (!msg->findInt32("channel-count", &numChannels) || !msg->findInt32("sample-rate", &sampleRate)) { err = INVALID_OPERATION; } { int32_t isADTS, aacProfile; int32_t sbrMode; int32_t maxOutputChannelCount; int32_t pcmLimiterEnable; drcParams_t drc; (!msg->findInt32("is-adts", &isADTS)) { isADTS = 0; } (!msg->findInt32("aac-profile", &aacProfile)) { aacProfile = OMX_AUDIO_AACObjectNull; } (!msg->findInt32("aac-sbr-mode", &sbrMode)) { sbrMode = -1; } (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { maxOutputChannelCount = -1; } (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { // value is unknown
// value is unknown pcmLimiterEnable = -1; } (!msg->findInt32("aac-encoded-target-level", &dTargetLevel)) { // value is unknown dTargetLevel = -1; } (!msg->findInt32("aac-drc-cut-level", &)) { // value is unknown = -1; } (!msg->findInt32("aac-drc-boost-level", &st)) { // value is unknown st = -1; } (!msg->findInt32("aac-drc-heavy-compression", &ompression)) { // value is unknown ompression = -1; } (!msg->findInt32("aac-target-ref-level", &RefLevel)) { // value is unknown RefLevel = -1; } err = setupAACCodec( encoder, numChannels, sampleRate, bitRate, aacProfile, isADTS != 0, sbrMode, maxOutputChannelCount, drc, pcmLimiterEnable); } } (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { err = setupAMRCodec(encoder, /* isWAMR */, bitRate); } (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { err = setupAMRCodec(encoder, /* isWAMR */, bitRate); } (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { // These are PCM-like formats with a fixed sample rate but // a variable number of channels. int32_t numChannels; (!msg->findInt32("channel-count", &numChannels)) { err = INVALID_OPERATION; } { int32_t sampleRate; (!msg->findInt32("sample-rate", &sampleRate)) { sampleRate = 8000; } err = setupG711Codec(encoder, sampleRate, numChannels); } } (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; (encoder && (!msg->findInt32("channel-count", &numChannels) || !msg->findInt32("sample-rate", &sampleRate))) { ALOGE("missing channel count or sample rate for FLAC encoder"); err = INVALID_OPERATION; } { (encoder) { (!msg->findInt32( "complexity", &compressionLevel) && !msg->findInt32( "flac-compression-level", &compressionLevel)) { compressionLevel = 5; // default FLAC compression level } (compressionLevel < 0) { ALOGW("compression level %d outside [0..8] range, " "using 0", compressionLevel); compressionLevel = 0; } (compressionLevel > 8) { ALOGW("compression level %d outside [0..8] range, " "using 8", compressionLevel); compressionLevel = 8; } } err = setupFlacCodec( encoder, numChannels, sampleRate, compressionLevel); } } (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { int32_t numChannels, sampleRate; (encoder
(encoder || !msg->findInt32("channel-count", &numChannels) || !msg->findInt32("sample-rate", &sampleRate)) { err = INVALID_OPERATION; } { err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels); } } (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { int32_t numChannels; int32_t sampleRate; (!msg->findInt32("channel-count", &numChannels) || !msg->findInt32("sample-rate", &sampleRate)) { err = INVALID_OPERATION; } { err = setupAC3Codec(encoder, numChannels, sampleRate); } } (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { int32_t numChannels; int32_t sampleRate; (!msg->findInt32("channel-count", &numChannels) || !msg->findInt32("sample-rate", &sampleRate)) { err = INVALID_OPERATION; } { err = setupEAC3Codec(encoder, numChannels, sampleRate); } } (err != OK) { err; } (!msg->findInt32("encoder-delay", &mEncoderDelay)) { mEncoderDelay = 0; } (!msg->findInt32("encoder-padding", &mEncoderPadding)) { mEncoderPadding = 0; } (msg->findInt32("channel-mask", &mChannelMask)) { mChannelMaskPresent = ; } { mChannelMaskPresent = ; } int32_t maxInputSize; (msg->findInt32("max-input-size", &maxInputSize)) { err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); } (!strcmp("r", mComponentName.c_str())) { err = setMinBufferSize(kPortIndexInput, 8192); //
} int32_t priority; (msg->findInt32("priority", &priority)) { err = setPriority(priority); } int32_t rateInt = -1; rateFloat = -1; (!msg->findFloat("operating-rate", &rateFloat)) { msg->findInt32("operating-rate", &rateInt); rateFloat = ()rateInt; // 16MHz (FLINTMAX) is OK for upper bound. } (rateFloat > 0) { err = setOperatingRate(rateFloat, video); } mBaseOutputFormat = outputFormat; err = getPortFormat(kPortIndexInput, inputFormat); (err == OK) { err = getPortFormat(kPortIndexOutput, outputFormat); (err == OK) { mInputFormat = inputFormat; mOutputFormat = outputFormat; } } err;}
void ACodec::ExecutingState::resume() { submitOutputBuffers(); // Post all available input buffers (mCodec->mBuffers[kPortIndexInput].size() == 0u) { ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str());void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { } (mCodec->mPortEOS[kPortIndexInput]) { ; (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { } BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); (info->mStatus == BufferInfo::OWNED_BY_US) { CHECK_EQ(()info->mStatus, ()BufferInfo::OWNED_BY_US); postFillThisBuffer(info); sp
//.......... (mFlags & kFlagIsAsync) { (!mHaveInputSurface) { (mState == FLUSHED) { mHavePendingInputBuffers = true; } { onInputBufferAvailable(); } } } (mFlags & kFlagDequeueInputPending) { CHECK(handleDequeueInputBuffer(mDequeueInputReplyID)); ++mDequeueInputTimeoutGeneration; mFlags &= ~kFlagDequeueInputPending; mDequeueInputReplyID = 0; } { postActivityNotificationIfPossible(); } ;}void MediaCodec::onInputBufferAvailable() { int32_t index; ((index = dequeuePortBuffer(kPortIndexInput)) >= 0) { sp
bool NuPlayer::Decoder::handleAnInputBuffer(size_t ) { (isDiscontinuityPending()) { ; } sp
NuPlayer::DecoderBase::onRequestInputBuffers() { (mRequestInputBuffersPending) { ; } // doRequestBuffers() return true if we should request more data (doRequestBuffers()) { mRequestInputBuffersPending = true; sp
} (timeChange) { rememberCodecSpecificData(newFormat); mTimeChangePending = true; err = ERROR_END_OF_STREAM; } (seamlessFormatChange) { // reuse existing decoder and don't flush rememberCodecSpecificData(newFormat); ; } { // This stream is unaffected by the discontinuity -EWOULDBLOCK; } } // reply should only be returned without a buffer set // when there is an error (including EOS) CHECK(err != OK); reply->setInt32("err", err); ERROR_END_OF_STREAM; } dropAccessUnit = false; (!mIsAudio && !mIsSecure && mRenderer->getVideoLateByUs() > 100000ll && mIsVideoAVC && !IsAVCReferenceFrame(accessUnit)) { dropAccessUnit = true; ++mNumInputFramesDropped; } } (dropAccessUnit); // ALOGV("returned a valid buffer of %s data", mIsAudio ? "mIsAudio" : "video");# 0 mediaTimeUs; CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs)); ALOGV("[%s] feeding input buffer at media time %.3f", mIsAudio ? "audio" : "video", mediaTimeUs / 1E6);# (mCCDecoder != NULL) { mCCDecoder->decode(accessUnit); } reply->setBuffer("buffer", accessUnit); OK;}
发布评论