一,对象创建流程
connection.js 封装并调用了rtcConn.node;
wrtcConnection.js 封装并调用了rtcFrame.node;
oem@svr1804:~/git/owt-server/dist-debug$ ll -h webrtc_agent/rtcFrame/build/Release/rtcFrame.node
-rwxrwxr-x 1 oem oem 7.7M 5月 14 22:57 webrtc_agent/rtcFrame/build/Release/rtcFrame.node*
oem@svr1804:~/git/owt-server/dist-debug$ ll -h webrtc_agent/rtcConn/build/Release/rtcConn.node
-rwxrwxr-x 1 oem oem 25M 5月 14 22:57 webrtc_agent/rtcConn/build/Release/rtcConn.node*
/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/index.js
var addon = require('../rtcConn/build/Release/rtcConn.node');
(只调用了addon.ThreadPool)
/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/connection.js
const addon = require('../rtcConn/build/Release/rtcConn');
_createWrtc() {
var wrtc = new addon.WebRtcConnection(
this.threadPool, this.ioThreadPool, this.id,
global.config.webrtc.stunserver,
global.config.webrtc.stunport,
global.config.webrtc.minport,
global.config.webrtc.maxport,
false, //this.trickleIce,
this._getMediaConfiguration(this.mediaConfiguration),
'', //networkinterface
this.ipAddresses
);
_createMediaStream(id, options = {}, isPublisher = true) {
log.debug(`message: _createMediaStream, connectionId: ${this.id}, ` +
`mediaStreamId: ${id}, isPublisher: ${isPublisher}`);
const mediaStream = new addon.MediaStream(this.threadPool, this.wrtc, id,
options.label, this._getMediaConfiguration(this.mediaConfiguration), isPublisher);
/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/wrtcConnection.js
const {
AudioFrameConstructor,
AudioFramePacketizer,
VideoFrameConstructor,
VideoFramePacketizer,
CallBase,
} = require('../rtcFrame/build/Release/rtcFrame.node');
收到that.publish()调用,创建WebRTC对象
CALL STACK(从下往上调用):
Connection (/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/connection.js:69)
class Connection extends EventEmitter {
constructor (id, threadPool, ioThreadPool, options = {}) {
super();
log.info(`message: Connection, id: ${id}`);
this.wrtc = this._createWrtc();
}
_createWrtc() {
var wrtc = new addon.WebRtcConnection(
module.exports (/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/wrtcConnection.js:635)
wrtc = new Connection(wrtcId, threadPool, ioThreadPool, { ipAddresses });
wrtc.callBase = new CallBase();
// wrtc.addMediaStream(wrtcId, {label: ''}, direction === 'in');
initWebRtcConnection(wrtc);
createWebRTCConnection (/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/index.js:137)
var createWebRTCConnection = function (transportId, controller, owner) {
var connection = new WrtcConnection({
connectionId: transportId,
threadPool: threadPool,
ioThreadPool: ioThreadPool,
network_interfaces: global.config.webrtc.network_interfaces,
owner,
}
that.publish (/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/index.js:234)
that.publish = function (operationId, connectionType, options, callback) {
conn = createWebRTCConnection(options.transportId, options.controller, options.owner);
rtcConn.node C++模块部分代码:
erizo C++ 实现层:
/home/oem/git/owt-server/third_party/licode/erizo/src/erizo/WebRtcConnection.cpp
WebRtcConnection::WebRtcConnection(std::shared_ptr worker, std::shared_ptr io_worker,
const std::string& connection_id, const IceConfig& ice_config, const std::vector rtp_mappings,
const std::vector ext_mappings, WebRtcConnectionEventListener* listener) :
connection_id_{connection_id},
NAN封装层:
/home/oem/git/owt-server/source/agent/webrtc/rtcConn/WebRtcConnection.cc
NAN_METHOD(WebRtcConnection::New) {
WebRtcConnection* obj = new WebRtcConnection();
obj->me = std::make_shared(worker, io_worker, wrtcId, iceConfig,
rtp_mappings, ext_mappings, obj);
uv_async_init(uv_default_loop(), &obj->async_, &WebRtcConnection::eventsCallback);
/home/oem/git/owt-server/third_party/licode/erizo/src/erizo/WebRtcConnection.cpp
bool WebRtcConnection::init() {
maybeNotifyWebRtcConnectionEvent(global_state_, "");
return true;
}
/home/oem/git/owt-server/third_party/licode/erizo/src/erizo/WebRtcConnection.cpp
NAN_METHOD(WebRtcConnection::init) {
bool r = me->init();
}
二,processOffer并且CreateMediaStream
Nodejs: onSignalling --> processOffer(从下往上执行)
processOfferMedia (/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/wrtcConnection.js:460)
const processOfferMedia = function (mid) {
const setupTransport = function (mid) {
processOffer (/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/wrtcConnection.js:493)
const processOffer = function (sdp) {
for (const mid of mids) {
processOfferMedia(mid);
}
for (const mid of mids) {
if (remoteSdp.getMediaPort(mid) !== 0) {
opId = setupTransport(mid);
}
}
processSignalling (/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/wrtcConnection.js:612)
var processSignalling = function () {
if (msg.type === 'offer') {
processOffer(msg.sdp);
}
that.onSignalling (/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/wrtcConnection.js:621)
if (wrtc) {
processSignalling();
}
that.onTransportSignaling (/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/index.js:314)
that.onTransportSignaling = function (connectionId, msg, callback) {
var conn = getWebRTCConnection(connectionId);
if (conn) {
conn.onSignalling(msg, connectionId);
NodeJS: CreateMediaStream(从下往上执行):
_createMediaStream (/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/connection.js:107)
_createMediaStream(id, options = {}, isPublisher = true) {
const mediaStream = new addon.MediaStream(this.threadPool, this.wrtc, id,
addMediaStream (/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/connection.js:217)
addMediaStream(id, options, isPublisher) {
if (this.mediaStreams.get(id) === undefined) {
const mediaStream = this._createMediaStream(id, options, isPublisher);
this.wrtc.addMediaStream(mediaStream);
this.mediaStreams.set(id, mediaStream);
}
}
WrtcStream (/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/wrtcConnection.js:57)
class WrtcStream extends EventEmitter {
/*
* audio: { format, ssrc, mid, midExtId }
* video: { format, ssrcs, mid, midExtId, transportcc, red, ulpfec }
*/
constructor(id, wrtc, direction, {audio, video, owner}) {
if (direction === 'in') {
wrtc.addMediaStream(id, {label: id}, true);
} else {
wrtc.addMediaStream(id, {label: id}, false);
setupTransport (/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/wrtcConnection.js:396)
const setupTransport = function (mid) {
// No simulcast
if (!trackMap.has(mid)) {
trackMap.set(mid, new WrtcStream(mid, wrtc, direction, trackSettings));
processOffer (/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/wrtcConnection.js:507)
processSignalling (/home/oem/git/owt-server/dist-debug/webrtc_agent/webrtc/wrtcConnection.js:612)
C++ MediaStream:
/home/oem/git/owt-server/third_party/licode/erizo/src/erizo/MediaStream.cpp
MediaStream::MediaStream(std::shared_ptr worker,
std::shared_ptr connection,
rtcConn.node!erizo::MediaStream::setRemoteSdp(erizo::MediaStream * const this, std::shared_ptr sdp) (/home/oem/git/owt-server/third_party/licode/erizo/src/erizo/MediaStream.cpp:150)
bool MediaStream::setRemoteSdp(std::shared_ptr sdp) {
rtcConn.node!erizo::WebRtcConnection::&)>::operator()(const std::shared_ptr &) const(const erizo::WebRtcConnection::&)> * const __closure, const std::shared_ptr & media_stream) (/home/oem/git/owt-server/third_party/licode/erizo/src/erizo/WebRtcConnection.cpp:290)
void WebRtcConnection::setRemoteSdpsToMediaStreams(std::string stream_id) {
if (auto connection = weak_this.lock()) {
media_stream->setRemoteSdp(connection->remote_sdp_);
ELOG_DEBUG("%s message: setting remote SDP to stream, stream: %s", connection->toLog(), media_stream->getId());
connection->onRemoteSdpsSetToMediaStreams(stream_id);
三,receive audio 流程
rtcFrame.node!owt_base::AudioFramePacketizer::onFrame(owt_base::AudioFramePacketizer * const this, const owt_base::Frame & frame) (/home/oem/git/owt-server/source/core/owt_base/AudioFramePacketizer.cpp:84)
void AudioFramePacketizer::onFrame(const Frame& frame)
{
rtcFrame.node!owt_base::FrameSource::deliverFrame(owt_base::FrameSource * const this, const owt_base::Frame & frame) (/home/oem/git/owt-server/source/core/owt_base/MediaFramePipeline.cpp:85)
void FrameSource::deliverFrame(const Frame& frame)
{
(*it)->onFrame(frame);
rtcFrame.node!owt_base::AudioFrameConstructor::deliverAudioData_(owt_base::AudioFrameConstructor * const this, std::shared_ptr audio_packet) (/home/oem/git/owt-server/source/core/owt_base/AudioFrameConstructor.cpp:138)
int AudioFrameConstructor::deliverAudioData_(std::shared_ptr audio_packet)
{
deliverFrame(frame);
rtcConn.node!erizo::MediaSink::deliverAudioData(erizo::MediaSink * const this, std::shared_ptr data_packet) (/home/oem/git/owt-server/third_party/licode/erizo/src/erizo/MediaDefinitions.h:124)
int deliverAudioData(std::shared_ptr data_packet) {
return this->deliverAudioData_(data_packet);
}
rtcConn.node!erizo::MediaStream::read(erizo::MediaStream * const this, std::shared_ptr packet) (/home/oem/git/owt-server/third_party/licode/erizo/src/erizo/MediaStream.cpp:505)
void MediaStream::read(std::shared_ptr packet) {
audio_sink_->deliverAudioData(std::move(packet));
rtcConn.node!erizo::PacketReader::read(std::shared_ptr packet, erizo::InboundHandler::Context * ctx, erizo::PacketReader * const this) (/home/oem/git/owt-server/third_party/licode/erizo/src/erizo/MediaStream.h:232)
media_stream_->read(std::move(packet));
rtcConn.node!erizo::InboundContextImpl::read(erizo::InboundContextImpl * const this, std::shared_ptr packet) (/home/oem/git/owt-server/third_party/licode/erizo/src/erizo/pipeline/HandlerContext-inl.h:319)
this->handler_->read(this, std::move(packet));
rtcConn.node!erizo::ContextImpl::fireRead(erizo::ContextImpl * const this, std::shared_ptr packet) (/home/oem/git/owt-server/third_party/licode/erizo/src/erizo/pipeline/HandlerContext-inl.h:177)
void fireRead(std::shared_ptr packet) override {
this->nextIn_->read(std::move(packet));
rtcConn.node!erizo::WoogeenHandler::read(erizo::WoogeenHandler * const this, erizo::Handler::Context * ctx, std::shared_ptr packet) (/home/oem/git/owt-server/source/agent/webrtc/rtcConn/conn_handler/WoogeenHandler.cpp:35)
void WoogeenHandler::read(Context *ctx, std::shared_ptr packet) {
ctx->fireRead(std::move(packet));
rtcConn.node!non-virtual thunk to erizo::ContextImpl::read(std::shared_ptr)() (/usr/include/c++/7/bits/shared_ptr_base.h:684)
rtcConn.node!erizo::Pipeline::read(erizo::Pipeline * const this, std::shared_ptr packet) (/home/oem/git/owt-server/third_party/licode/erizo/src/erizo/pipeline/Pipeline.cpp:72)
void Pipeline::read(std::shared_ptr packet) {
front_->read(std::move(packet));
}
rtcConn.node!erizo::MediaStream::::operator()(void) const(const erizo::MediaStream:: * const __closure) (/home/oem/git/owt-server/third_party/licode/erizo/src/erizo/MediaStream.cpp:473)
void MediaStream::onTransportData(std::shared_ptr incoming_packet, Transport *transport) {
stream_ptr->pipeline_->read(std::move(packet));
四,receive video 流程
/home/oem/git/owt-server/third_party/webrtc-m88/src/modules/rtp_rtcp/source/rtp_sender_video.cc
bool RTPSenderVideo::SendVideo(
int payload_type,
absl::optional codec_type,
uint32_t rtp_timestamp,
int64_t capture_time_ms,
rtc::ArrayView payload,
RTPVideoHeader video_header,
absl::optional expected_retransmission_time_ms,
absl::optional estimated_capture_clock_offset_ms) {
/home/oem/git/owt-server/source/core/rtc_adapter/VideoSendAdapter.cc
void VideoSendAdapterImpl::onFrame(const Frame& frame)
{
h.video_type_header.emplace();
m_senderVideo->SendVideo(
H265_90000_PT,
webrtc::kVideoCodecH265,
timeStamp,
timeStamp,
rtc::ArrayView(frame.payload, frame.length),
h,
m_rtpRtcp->ExpectedRetransmissionTimeMs(),
0);
rtcFrame.node!owt_base::VideoFramePacketizer::onFrame(owt_base::VideoFramePacketizer * const this, const owt_base::Frame & frame) (/home/oem/git/owt-server/source/core/owt_base/VideoFramePacketizer.cpp:179)
void VideoFramePacketizer::onFrame(const Frame& frame)
{
m_videoSend->onFrame(frame);
rtcFrame.node!owt_base::FrameSource::deliverFrame(owt_base::FrameSource * const this, const owt_base::Frame & frame) (/home/oem/git/owt-server/source/core/owt_base/MediaFramePipeline.cpp:90)
void FrameSource::deliverFrame(const Frame& frame)
{
else if (isVideoFrame(frame)) {
for (auto it = m_video_dests.begin(); it != m_video_dests.end(); ++it) {
(*it)->onFrame(frame);
}
欢迎分享,转载请注明来源:内存溢出
评论列表(0条)