前言image.png

编解码器信息收集的步骤

image.png

收集音频编解码器的时机

image.png

将编解码器信息与SDP建立联系

image.png

代码分析

打断点,

Conductor::InitializePeerConnection ->webrtc::CreatePeerConnectionFactory

  1. peer_connection_factory_ = webrtc::CreatePeerConnectionFactory(
  2. nullptr /* network_thread */, nullptr /* worker_thread */,
  3. nullptr /* signaling_thread */, nullptr /* default_adm */,
  4. webrtc::CreateBuiltinAudioEncoderFactory(),
  5. webrtc::CreateBuiltinAudioDecoderFactory(),
  6. webrtc::CreateBuiltinVideoEncoderFactory(),
  7. webrtc::CreateBuiltinVideoDecoderFactory(), nullptr /* audio_mixer */,
  8. nullptr /* audio_processing */);

webrtc::CreateBuiltinAudioEncoderFactory(),
->
CreateAudioEncoderFactory 可变参数模板
->
audio_encoder_factory_template_impl::AudioEncoderFactoryT>())
->
class AudioEncoderFactoryT : public AudioEncoderFactory {
public:
std::vector GetSupportedEncoders() override {
std::vector specs;
Helper::AppendSupportedEncoders(&specs);
return specs;
}

ChannelManager::Init

  1. cricket::ChannelManager::Init() 123 C++
  2. webrtc::ConnectionContext::Create(webrtc::PeerConnectionFactoryDependencies * dependencies) 79 C++
  3. webrtc::PeerConnectionFactory::Create(webrtc::PeerConnectionFactoryDependencies dependencies) 84 C++
  4. webrtc::CreateModularPeerConnectionFactory(webrtc::PeerConnectionFactoryDependencies dependencies) 70 C++
  5. webrtc::CreatePeerConnectionFactory(rtc::Thread * network_thread, rtc::Thread * worker_thread, rtc::Thread * signaling_thread, rtc::scoped_refptr<webrtc::AudioDeviceModule> default_adm, rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory, rtc::scoped_refptr<webrtc::AudioDecoderFactory> audio_decoder_factory, std::__1::unique_ptr<webrtc::VideoEncoderFactory,std::default_delete<webrtc::VideoEncoderFactory>> video_encoder_factory, std::__1::unique_ptr<webrtc::VideoDecoderFactory,std::default_delete<webrtc::VideoDecoderFactory>> video_decoder_factory, rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer, rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing, webrtc::AudioFrameProcessor * audio_frame_processor) 71 C++
  6. Conductor::InitializePeerConnection() 133 C++
  7. Conductor::ConnectToPeer(int peer_id) 450 C++
  8. MainWnd::OnDefaultAction() 348 C++
  9. MainWnd::OnMessage(unsigned int msg, unsigned __int64 wp, __int64 lp, __int64 * result) 392 C++
  10. MainWnd::WndProc(HWND__ * hwnd, unsigned int msg, unsigned __int64 wp, __int64 lp) 419 C++
  1. bool ChannelManager::Init() {
  2. RTC_DCHECK(!initialized_);
  3. if (initialized_) {
  4. return false;
  5. }
  6. RTC_DCHECK(network_thread_);
  7. RTC_DCHECK(worker_thread_);
  8. if (!network_thread_->IsCurrent()) {
  9. // Do not allow invoking calls to other threads on the network thread.
  10. network_thread_->Invoke<void>(
  11. RTC_FROM_HERE, [&] { network_thread_->DisallowBlockingCalls(); });
  12. }
  13. if (media_engine_) {
  14. initialized_ = worker_thread_->Invoke<bool>(
  15. RTC_FROM_HERE, [&] { return media_engine_->Init(); });
  16. RTC_DCHECK(initialized_);
  17. } else {
  18. initialized_ = true;
  19. }
  20. return initialized_;
  21. }

WebRtcVoiceEngine::Init

image.png

  1. cricket::WebRtcVoiceEngine::Init() 287 C++
  2. cricket::CompositeMediaEngine::Init() 182 C++
  3. cricket::ChannelManager::Init() 123 C++
  4. webrtc::ConnectionContext::Create(webrtc::PeerConnectionFactoryDependencies * dependencies) 79 C++
  5. webrtc::PeerConnectionFactory::Create(webrtc::PeerConnectionFactoryDependencies dependencies) 84 C++
  6. webrtc::CreateModularPeerConnectionFactory(webrtc::PeerConnectionFactoryDependencies dependencies) 70 C++
  7. webrtc::CreatePeerConnectionFactory(rtc::Thread * network_thread, rtc::Thread * worker_thread, rtc::Thread * signaling_thread, rtc::scoped_refptr<webrtc::AudioDeviceModule> default_adm, rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory, rtc::scoped_refptr<webrtc::AudioDecoderFactory> audio_decoder_factory, std::__1::unique_ptr<webrtc::VideoEncoderFactory,std::default_delete<webrtc::VideoEncoderFactory>> video_encoder_factory, std::__1::unique_ptr<webrtc::VideoDecoderFactory,std::default_delete<webrtc::VideoDecoderFactory>> video_decoder_factory, rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer, rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing, webrtc::AudioFrameProcessor * audio_frame_processor) 71 C++
  8. Conductor::InitializePeerConnection() 133 C++
  9. Conductor::ConnectToPeer(int peer_id) 450 C++
  10. MainWnd::OnDefaultAction() 348 C++
  11. MainWnd::OnMessage(unsigned int msg, unsigned __int64 wp, __int64 lp, __int64 * result) 392 C++
  12. MainWnd::WndProc(HWND__ * hwnd, unsigned int msg, unsigned __int64 wp, __int64 lp) 419 C++
  1. void WebRtcVoiceEngine::Init() {
  2. RTC_DCHECK_RUN_ON(&worker_thread_checker_);
  3. RTC_LOG(LS_INFO) << "WebRtcVoiceEngine::Init";
  4. // TaskQueue expects to be created/destroyed on the same thread.
  5. low_priority_worker_queue_.reset(
  6. new rtc::TaskQueue(task_queue_factory_->CreateTaskQueue(
  7. "rtc-low-prio", webrtc::TaskQueueFactory::Priority::LOW)));
  8. // Load our audio codec lists.
  9. RTC_LOG(LS_VERBOSE) << "Supported send codecs in order of preference:";
  10. send_codecs_ = CollectCodecs(encoder_factory_->GetSupportedEncoders());
  11. for (const AudioCodec& codec : send_codecs_) {
  12. RTC_LOG(LS_VERBOSE) << ToString(codec);
  13. }
  14. RTC_LOG(LS_VERBOSE) << "Supported recv codecs in order of preference:";
  15. recv_codecs_ = CollectCodecs(decoder_factory_->GetSupportedDecoders());
  16. for (const AudioCodec& codec : recv_codecs_) {
  17. RTC_LOG(LS_VERBOSE) << ToString(codec);
  18. *************
  19. }

CollectCodecs(encoderfactory->GetSupportedEncoders());
image.png

AudioEncoderOpusImpl::AppendSupportedEncoders

H:\webrtc-20210315\webrtc-20210315\webrtc\webrtc-checkout\src\modules\audio_coding\codecs\opus\audio_encoder_opus.cc
image.png

WebRtcVoiceEngine::CollectCodecs

收集编码器信息。
image.png

  1. std::vector<AudioCodec> WebRtcVoiceEngine::CollectCodecs(
  2. const std::vector<webrtc::AudioCodecSpec>& specs) const {
  3. PayloadTypeMapper mapper;
  4. std::vector<AudioCodec> out;
  5. // Only generate CN payload types for these clockrates:
  6. std::map<int, bool, std::greater<int>> generate_cn = {
  7. {8000, false}, {16000, false}, {32000, false}};
  8. // Only generate telephone-event payload types for these clockrates:
  9. std::map<int, bool, std::greater<int>> generate_dtmf = {
  10. {8000, false}, {16000, false}, {32000, false}, {48000, false}};
  11. auto map_format = [&mapper](const webrtc::SdpAudioFormat& format,
  12. std::vector<AudioCodec>* out) {
  13. absl::optional<AudioCodec> opt_codec = mapper.ToAudioCodec(format);
  14. if (opt_codec) {
  15. if (out) {
  16. out->push_back(*opt_codec);
  17. }
  18. } else {
  19. RTC_LOG(LS_ERROR) << "Unable to assign payload type to format: "
  20. << rtc::ToString(format);
  21. }
  22. return opt_codec;
  23. };
  24. for (const auto& spec : specs) {
  25. // We need to do some extra stuff before adding the main codecs to out.
  26. absl::optional<AudioCodec> opt_codec = map_format(spec.format, nullptr);
  27. if (opt_codec) {
  28. AudioCodec& codec = *opt_codec;
  29. if (spec.info.supports_network_adaption) {
  30. codec.AddFeedbackParam(
  31. FeedbackParam(kRtcpFbParamTransportCc, kParamValueEmpty));
  32. }
  33. if (spec.info.allow_comfort_noise) {
  34. // Generate a CN entry if the decoder allows it and we support the
  35. // clockrate.
  36. auto cn = generate_cn.find(spec.format.clockrate_hz);
  37. if (cn != generate_cn.end()) {
  38. cn->second = true;
  39. }
  40. }
  41. // Generate a telephone-event entry if we support the clockrate.
  42. auto dtmf = generate_dtmf.find(spec.format.clockrate_hz);
  43. if (dtmf != generate_dtmf.end()) {
  44. dtmf->second = true;
  45. }
  46. out.push_back(codec);
  47. if (codec.name == kOpusCodecName && audio_red_for_opus_trial_enabled_) {
  48. map_format({kRedCodecName, 48000, 2}, &out);
  49. }
  50. }
  51. }
  52. // Add CN codecs after "proper" audio codecs.
  53. for (const auto& cn : generate_cn) {
  54. if (cn.second) {
  55. map_format({kCnCodecName, cn.first, 1}, &out);
  56. }
  57. }
  58. // Add telephone-event codecs last.
  59. for (const auto& dtmf : generate_dtmf) {
  60. if (dtmf.second) {
  61. map_format({kDtmfCodecName, dtmf.first, 1}, &out);
  62. }
  63. }
  64. return out;
  65. }

PayloadTypeMapper::PayloadTypeMapper

保存类型信息

  1. PayloadTypeMapper::PayloadTypeMapper()
  2. // RFC 3551 reserves payload type numbers in the range 96-127 exclusively
  3. // for dynamic assignment. Once those are used up, it is recommended that
  4. // payload types unassigned by the RFC are used for dynamic payload type
  5. // mapping, before any static payload ids. At this point, we only support
  6. // mapping within the exclusive range.
  7. : next_unused_payload_type_(96),
  8. max_payload_type_(127),
  9. mappings_(
  10. {// Static payload type assignments according to RFC 3551.
  11. {{kPcmuCodecName, 8000, 1}, 0},
  12. {{"GSM", 8000, 1}, 3},
  13. {{"G723", 8000, 1}, 4},
  14. {{"DVI4", 8000, 1}, 5},
  15. {{"DVI4", 16000, 1}, 6},
  16. {{"LPC", 8000, 1}, 7},
  17. {{kPcmaCodecName, 8000, 1}, 8},
  18. {{kG722CodecName, 8000, 1}, 9},
  19. {{kL16CodecName, 44100, 2}, 10},
  20. {{kL16CodecName, 44100, 1}, 11},
  21. {{"QCELP", 8000, 1}, 12},
  22. {{kCnCodecName, 8000, 1}, 13},
  23. // RFC 4566 is a bit ambiguous on the contents of the "encoding
  24. // parameters" field, which, for audio, encodes the number of
  25. // channels. It is "optional and may be omitted if the number of
  26. // channels is one". Does that necessarily imply that an omitted
  27. // encoding parameter means one channel? Since RFC 3551 doesn't
  28. // specify a value for this parameter for MPA, I've included both 0
  29. // and 1 here, to increase the chances it will be correctly used if
  30. // someone implements an MPEG audio encoder/decoder.
  31. {{"MPA", 90000, 0}, 14},
  32. {{"MPA", 90000, 1}, 14},
  33. {{"G728", 8000, 1}, 15},
  34. {{"DVI4", 11025, 1}, 16},
  35. {{"DVI4", 22050, 1}, 17},
  36. {{"G729", 8000, 1}, 18},
  37. // Payload type assignments currently used by WebRTC.
  38. // Includes data to reduce collisions (and thus reassignments)
  39. {{kGoogleRtpDataCodecName, 0, 0}, kGoogleRtpDataCodecPlType},
  40. {{kIlbcCodecName, 8000, 1}, 102},
  41. {{kIsacCodecName, 16000, 1}, 103},
  42. {{kIsacCodecName, 32000, 1}, 104},
  43. {{kCnCodecName, 16000, 1}, 105},
  44. {{kCnCodecName, 32000, 1}, 106},
  45. {{kOpusCodecName,
  46. 48000,
  47. 2,
  48. {{kCodecParamMinPTime, "10"},
  49. {kCodecParamUseInbandFec, kParamValueTrue}}},
  50. 111},
  51. // TODO(solenberg): Remove the hard coded 16k,32k,48k DTMF once we
  52. // assign payload types dynamically for send side as well.
  53. {{kDtmfCodecName, 48000, 1}, 110},
  54. {{kDtmfCodecName, 32000, 1}, 112},
  55. {{kDtmfCodecName, 16000, 1}, 113},
  56. {{kDtmfCodecName, 8000, 1}, 126}}) {
  57. // TODO(ossu): Try to keep this as change-proof as possible until we're able
  58. // to remove the payload type constants from everywhere in the code.
  59. for (const auto& mapping : mappings_) {
  60. used_payload_types_.insert(mapping.second);
  61. }
  62. }

小结

image.png