Commit 7e5cb333 by ziyue

开始对接js

parent 978917b4
......@@ -136,7 +136,7 @@ charSet=utf-8
#http链接超时时间
keepAliveSecond=30
#http请求体最大字节数,如果post的body太大,则不适合缓存body在内存
maxReqSize=4096
maxReqSize=40960
#404网页内容,用户可以自定义404网页
notFound=<html><head><title>404 Not Found</title></head><body bgcolor="white"><center><h1>您访问的资源不存在!</h1></center><hr><center>ZLMediaKit-4.0</center></body></html>
#http服务器监听端口
......
......@@ -127,6 +127,26 @@ static HttpApi toApi(const function<void(API_ARGS_JSON)> &cb) {
});
}
static HttpApi toApi(const function<void(API_ARGS_STRING_ASYNC)> &cb) {
return [cb](const Parser &parser, const HttpSession::HttpResponseInvoker &invoker, SockInfo &sender) {
GET_CONFIG(string, charSet, Http::kCharSet);
HttpSession::KeyValue headerOut;
headerOut["Content-Type"] = string("application/json; charset=") + charSet;
Json::Value val;
val["code"] = API::Success;
cb(sender, parser.getHeader(), headerOut, parser, val, invoker);
};
}
static HttpApi toApi(const function<void(API_ARGS_STRING)> &cb) {
return toApi([cb](API_ARGS_STRING_ASYNC) {
cb(API_ARGS_VALUE);
invoker(200, headerOut, val.toStyledString());
});
}
void api_regist(const string &api_path, const function<void(API_ARGS_MAP)> &func) {
s_map_api.emplace(api_path, toApi(func));
}
......@@ -143,6 +163,14 @@ void api_regist(const string &api_path, const function<void(API_ARGS_JSON_ASYNC)
s_map_api.emplace(api_path, toApi(func));
}
void api_regist(const string &api_path, const function<void(API_ARGS_STRING)> &func){
s_map_api.emplace(api_path, toApi(func));
}
void api_regist(const string &api_path, const function<void(API_ARGS_STRING_ASYNC)> &func){
s_map_api.emplace(api_path, toApi(func));
}
//获取HTTP请求中url参数、content参数
static ApiArgsType getAllArgs(const Parser &parser) {
ApiArgsType allArgs;
......@@ -1054,9 +1082,9 @@ void installWebApi() {
#ifdef ENABLE_WEBRTC
static list<WebRtcTransportImp::Ptr> rtcs;
api_regist("/webrtc",[](API_ARGS_MAP_ASYNC){
api_regist("/index/api/webrtc",[](API_ARGS_STRING){
CHECK_ARGS("app", "stream");
auto src = dynamic_pointer_cast<RtspMediaSource>(MediaSource::find(RTSP_SCHEMA, DEFAULT_VHOST, allArgs["app"], allArgs["stream"]));
auto src = dynamic_pointer_cast<RtspMediaSource>(MediaSource::find(RTSP_SCHEMA, DEFAULT_VHOST, allArgs.getUrlArgs()["app"], allArgs.getUrlArgs()["stream"]));
if (!src) {
throw ApiRetException("流不存在", API::NotFound);
}
......@@ -1064,7 +1092,8 @@ void installWebApi() {
headerOut["Access-Control-Allow-Origin"] = "*";
auto rtc = WebRtcTransportImp::create(EventPollerPool::Instance().getPoller());
rtc->attach(src);
invoker(200, headerOut, rtc->GetLocalSdp());
val["sdp"] = rtc->getAnswerSdp(allArgs.Content());
val["type"] = "answer";
rtcs.emplace_back(rtc);
});
#endif
......
......@@ -85,6 +85,8 @@ using ApiArgsType = map<string, variant, StrCaseCompare>;
#define API_ARGS_MAP_ASYNC API_ARGS_MAP, const HttpSession::HttpResponseInvoker &invoker
#define API_ARGS_JSON SockInfo &sender, HttpSession::KeyValue &headerIn, HttpSession::KeyValue &headerOut, Json::Value &allArgs, Json::Value &val
#define API_ARGS_JSON_ASYNC API_ARGS_JSON, const HttpSession::HttpResponseInvoker &invoker
#define API_ARGS_STRING SockInfo &sender, HttpSession::KeyValue &headerIn, HttpSession::KeyValue &headerOut, const Parser &allArgs, Json::Value &val
#define API_ARGS_STRING_ASYNC API_ARGS_STRING, const HttpSession::HttpResponseInvoker &invoker
#define API_ARGS_VALUE sender, headerIn, headerOut, allArgs, val
//注册http请求参数是map<string, variant, StrCaseCompare>类型的http api
......@@ -97,6 +99,11 @@ void api_regist(const string &api_path, const function<void(API_ARGS_JSON)> &fun
//注册http请求参数是Json::Value类型,但是可以异步回复的的http api
void api_regist(const string &api_path, const function<void(API_ARGS_JSON_ASYNC)> &func);
//注册http请求参数是http原始请求信息的http api
void api_regist(const string &api_path, const function<void(API_ARGS_STRING)> &func);
//注册http请求参数是http原始请求信息的异步回复的http api
void api_regist(const string &api_path, const function<void(API_ARGS_STRING_ASYNC)> &func);
template<typename Args, typename First>
bool checkArgs(Args &&args, First &&first) {
return !args[first].empty();
......@@ -107,6 +114,16 @@ bool checkArgs(Args &&args, First &&first, KeyTypes &&...keys) {
return !args[first].empty() && checkArgs(std::forward<Args>(args), std::forward<KeyTypes>(keys)...);
}
template<typename First>
bool checkArgs(const Parser &args, First &&first) {
return !args.getUrlArgs()[first].empty();
}
template<typename First, typename ...KeyTypes>
bool checkArgs(const Parser &args, First &&first, KeyTypes &&...keys) {
return !args.getUrlArgs()[first].empty() && checkArgs(args, std::forward<KeyTypes>(keys)...);
}
//检查http参数是否为空的宏
#define CHECK_ARGS(...) \
if(!checkArgs(allArgs,##__VA_ARGS__)){ \
......
......@@ -25,6 +25,7 @@
#include "Rtp/RtpServer.h"
#include "WebApi.h"
#include "WebHook.h"
#include "../webrtc/Sdp.h"
#if defined(ENABLE_VERSION)
#include "Version.h"
......@@ -209,6 +210,153 @@ static void inline listen_shell_input(){
//全局变量,在WebApi中用于保存配置文件用
string g_ini_file;
void test_sdp(){
char str1[] = "v=0\n"
"o=- 380154348540553537 2 IN IP4 127.0.0.1\n"
"s=-\n"
"b=CT:1900\n"
"t=0 0\n"
"a=group:BUNDLE video\n"
"a=msid-semantic: WMS\n"
"m=video 9 RTP/SAVPF 96\n"
"c=IN IP4 0.0.0.0\n"
"a=rtcp:9 IN IP4 0.0.0.0\n"
"a=ice-ufrag:1ZFN\n"
"a=ice-pwd:70P3H0jPlGz1fiJl5XZfXMZH\n"
"a=ice-options:trickle\n"
"a=fingerprint:sha-256 3E:10:35:6B:9A:9E:B0:55:AC:2A:88:F5:74:C1:70:32:B5:8D:88:1D:37:B0:9C:69:A6:DD:07:10:73:27:1A:16\n"
"a=setup:active\n"
"a=mid:video\n"
"a=recvonly\n"
"a=rtcp-mux\n"
"a=rtpmap:96 H264/90000\n"
"a=fmtp:96 level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=42e01f";
char str2[] = "v=0\n"
"o=- 2584450093346841581 2 IN IP4 127.0.0.1\n"
"s=-\n"
"t=0 0\n"
"a=group:BUNDLE audio video data\n"
"a=msid-semantic: WMS 616cfbb1-33a3-4d8c-8275-a199d6005549\n"
"m=audio 9 UDP/TLS/RTP/SAVPF 111 103 104 9 0 8 106 105 13 110 112 113 126\n"
"c=IN IP4 0.0.0.0\n"
"a=rtcp:9 IN IP4 0.0.0.0\n"
"a=ice-ufrag:sXJ3\n"
"a=ice-pwd:yEclOTrLg1gEubBFefOqtmyV\n"
"a=fingerprint:sha-256 22:14:B5:AF:66:12:C7:C7:8D:EF:4B:DE:40:25:ED:5D:8F:17:54:DD:88:33:C0:13:2E:FD:1A:FA:7E:7A:1B:79\n"
"a=setup:actpass\n"
"a=mid:audio\n"
"a=extmap:1/sendonly urn:ietf:params:rtp-hdrext:ssrc-audio-level\n"
"a=sendrecv\n"
"a=rtcp-mux\n"
"a=rtpmap:111 opus/48000/2\n"
"a=rtcp-fb:111 transport-cc\n"
"a=fmtp:111 minptime=10;useinbandfec=1\n"
"a=rtpmap:103 ISAC/16000\n"
"a=rtpmap:104 ISAC/32000\n"
"a=rtpmap:9 G722/8000\n"
"a=rtpmap:0 PCMU/8000\n"
"a=rtpmap:8 PCMA/8000\n"
"a=rtpmap:106 CN/32000\n"
"a=rtpmap:105 CN/16000\n"
"a=rtpmap:13 CN/8000\n"
"a=rtpmap:110 telephone-event/48000\n"
"a=rtpmap:112 telephone-event/32000\n"
"a=rtpmap:113 telephone-event/16000\n"
"a=rtpmap:126 telephone-event/8000\n"
"a=ssrc:120276603 cname:iSkJ2vn5cYYubTve\n"
"a=ssrc:120276603 msid:616cfbb1-33a3-4d8c-8275-a199d6005549 1da3d329-7399-4fe9-b20f-69606bebd363\n"
"a=ssrc:120276603 mslabel:616cfbb1-33a3-4d8c-8275-a199d6005549\n"
"a=ssrc:120276603 label:1da3d329-7399-4fe9-b20f-69606bebd363\n"
"m=video 9 UDP/TLS/RTP/SAVPF 96 98 100 102 127 97 99 101 125\n"
"c=IN IP4 0.0.0.0\n"
"a=rtcp:9 IN IP4 0.0.0.0\n"
"a=ice-ufrag:sXJ3\n"
"a=ice-pwd:yEclOTrLg1gEubBFefOqtmyV\n"
"a=fingerprint:sha-256 22:14:B5:AF:66:12:C7:C7:8D:EF:4B:DE:40:25:ED:5D:8F:17:54:DD:88:33:C0:13:2E:FD:1A:FA:7E:7A:1B:79\n"
"a=setup:actpass\n"
"a=mid:video\n"
"a=extmap:2 urn:ietf:params:rtp-hdrext:toffset\n"
"a=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time\n"
"a=extmap:4 urn:3gpp:video-orientation\n"
"a=extmap:5 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01\n"
"a=extmap:6 http://www.webrtc.org/experiments/rtp-hdrext/playout-delay\n"
"a=sendrecv\n"
"a=rtcp-mux\n"
"a=rtcp-rsize\n"
"a=rtpmap:96 VP8/90000\n"
"a=rtcp-fb:96 ccm fir\n"
"a=rtcp-fb:96 nack\n"
"a=rtcp-fb:96 nack pli\n"
"a=rtcp-fb:96 goog-remb\n"
"a=rtcp-fb:96 transport-cc\n"
"a=rtpmap:98 VP9/90000\n"
"a=rtcp-fb:98 ccm fir\n"
"a=rtcp-fb:98 nack\n"
"a=rtcp-fb:98 nack pli\n"
"a=rtcp-fb:98 goog-remb\n"
"a=rtcp-fb:98 transport-cc\n"
"a=rtpmap:100 H264/90000\n"
"a=rtcp-fb:100 ccm fir\n"
"a=rtcp-fb:100 nack\n"
"a=rtcp-fb:100 nack pli\n"
"a=rtcp-fb:100 goog-remb\n"
"a=rtcp-fb:100 transport-cc\n"
"a=fmtp:100 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f\n"
"a=rtpmap:102 red/90000\n"
"a=rtpmap:127 ulpfec/90000\n"
"a=rtpmap:97 rtx/90000\n"
"a=fmtp:97 apt=96\n"
"a=rtpmap:99 rtx/90000\n"
"a=fmtp:99 apt=98\n"
"a=rtpmap:101 rtx/90000\n"
"a=fmtp:101 apt=100\n"
"a=rtpmap:125 rtx/90000\n"
"a=fmtp:125 apt=102\n"
"a=ssrc-group:FID 2580761338 611523443\n"
"a=ssrc:2580761338 cname:iSkJ2vn5cYYubTve\n"
"a=ssrc:2580761338 msid:616cfbb1-33a3-4d8c-8275-a199d6005549 bf270496-a23e-47b5-b901-ef23096cd961\n"
"a=ssrc:2580761338 mslabel:616cfbb1-33a3-4d8c-8275-a199d6005549\n"
"a=ssrc:2580761338 label:bf270496-a23e-47b5-b901-ef23096cd961\n"
"a=ssrc:611523443 cname:iSkJ2vn5cYYubTve\n"
"a=ssrc:611523443 msid:616cfbb1-33a3-4d8c-8275-a199d6005549 bf270496-a23e-47b5-b901-ef23096cd961\n"
"a=ssrc:611523443 mslabel:616cfbb1-33a3-4d8c-8275-a199d6005549\n"
"a=ssrc:611523443 label:bf270496-a23e-47b5-b901-ef23096cd961\n"
"a=candidate:3575467457 1 udp 2113937151 10.15.83.23 57857 typ host generation 0 ufrag 6R0z network-cost 999\n"
"m=application 9 DTLS/SCTP 5000\n"
"c=IN IP4 0.0.0.0\n"
"a=ice-ufrag:sXJ3\n"
"a=ice-pwd:yEclOTrLg1gEubBFefOqtmyV\n"
"a=fingerprint:sha-256 22:14:B5:AF:66:12:C7:C7:8D:EF:4B:DE:40:25:ED:5D:8F:17:54:DD:88:33:C0:13:2E:FD:1A:FA:7E:7A:1B:79\n"
"a=setup:actpass\n"
"a=mid:data\n"
"a=sctpmap:5000 webrtc-datachannel 1024\n"
"a=sctp-port:5000";
RtcSessionSdp sdp1;
sdp1.parse(str1);
RtcSessionSdp sdp2;
sdp2.parse(str2);
for (auto media : sdp1.medias) {
InfoL << getRtpDirectionString(media.getDirection());
}
for (auto media : sdp2.medias) {
InfoL << getRtpDirectionString(media.getDirection());
}
InfoL << sdp1.toString();
InfoL << sdp2.toString();
RtcSession session1;
session1.loadFrom(str1);
RtcSession session2;
session2.loadFrom(str2);
DebugL << session1.toString();
DebugL << session2.toString();
}
int start_main(int argc,char *argv[]) {
{
CMD_main cmd_main;
......@@ -266,6 +414,7 @@ int start_main(int argc,char *argv[]) {
});
}
// test_sdp();
uint16_t shellPort = mINI::Instance()[Shell::kPort];
uint16_t rtspPort = mINI::Instance()[Rtsp::kPort];
uint16_t rtspsPort = mINI::Instance()[Rtsp::kSSLPort];
......
......@@ -101,7 +101,10 @@ static bool registerAllItem(){
registerSdpItem<SdpDirectionRecvonly>();
registerSdpItem<SdpDirectionSendrecv>();
registerSdpItem<SdpDirectionInactive>();
registerSdpItem<SdpAttrMsid>();
registerSdpItem<SdpAttrExtmapAllowMixed>();
registerSdpItem<SdpAttrRid>();
registerSdpItem<SdpAttrSimulcast>();
return true;
}
......
......@@ -445,6 +445,28 @@ public:
const char* getKey() const override { return "candidate";}
};
class SdpAttrMsid : public SdpItem{
public:
const char* getKey() const override { return "msid";}
};
class SdpAttrExtmapAllowMixed : public SdpItem{
public:
const char* getKey() const override { return "extmap-allow-mixed";}
};
class SdpAttrSimulcast : public SdpItem{
public:
//todo
const char* getKey() const override { return "simulcast";}
};
class SdpAttrRid : public SdpItem{
public:
//todo
const char* getKey() const override { return "rid";}
};
class RtcSdpBase {
public:
vector<SdpItem::Ptr> items;
......@@ -575,6 +597,7 @@ public:
RtcSSRC rtx_ssrc;
//////// simulcast ////////
bool simulcast{false};
RtcSSRC rtp_ssrc_low;
RtcSSRC rtp_ssrc_mid;
RtcSSRC rtp_ssrc_high;
......@@ -611,6 +634,8 @@ public:
class RtcSession{
public:
using Ptr = std::shared_ptr<RtcSession>;
uint32_t version;
SdpOrigin origin;
string session_name;
......
......@@ -62,7 +62,17 @@ void WebRtcTransport::onWrite(const char *buf, size_t len){
onWrite(buf, len, (struct sockaddr_in *)tuple);
}
std::string WebRtcTransport::GetLocalSdp() {
std::string WebRtcTransport::getAnswerSdp(const string &offer){
InfoL << offer;
_offer_sdp = std::make_shared<RtcSession>();
_offer_sdp->loadFrom(offer);
RtcConfigure configure;
_answer_sdp = configure.createAnswer(*_offer_sdp);
return _answer_sdp->toString();
}
std::string WebRtcTransport::getOfferSdp() {
RTC::DtlsTransport::Fingerprint remote_fingerprint;
remote_fingerprint.algorithm = RTC::DtlsTransport::GetFingerprintAlgorithm("sha-256");
remote_fingerprint.value = "";
......
......@@ -7,6 +7,7 @@
#include "IceServer.hpp"
#include "SrtpSession.hpp"
#include "StunPacket.hpp"
#include "Sdp.h"
class WebRtcTransport : public RTC::DtlsTransport::Listener, public RTC::IceServer::Listener {
public:
......@@ -17,9 +18,9 @@ public:
/// 销毁对象
virtual void onDestory();
/// 获取本地sdp
/// \return
std::string GetLocalSdp();
std::string getAnswerSdp(const string &offer);
std::string getOfferSdp();
/// 收到udp数据
/// \param buf
......@@ -76,6 +77,8 @@ private:
std::shared_ptr<RTC::IceServer> ice_server_;
std::shared_ptr<RTC::DtlsTransport> dtls_transport_;
std::shared_ptr<RTC::SrtpSession> srtp_session_;
RtcSession::Ptr _offer_sdp;
RtcSession::Ptr _answer_sdp;
};
#include "Poller/EventPoller.h"
......
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
<!DOCTYPE html>
<!--
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
-->
<html>
<head>
<title>PeerConnection PRANSWER Demo</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<style>
video {
border:5px solid black;
width:800px;
height:600px;
}
</style>
</head>
<body>
<video id="vid2" autoplay></video>
<br>
<p>ip_address</p>
<input id="input1" type="text" name="ip_address" value="https://rp.zlmediakit.com:20443/webrtc?app=live&stream=test">
<br>
<button id="btn1">Call</button>
<button id="btn3">Hang Up</button>
</body>
<script src="js/adapter.js"></script>
<script src="js/common.js"></script>
<script src="js/main.js"></script>
</html>
<meta charset="utf-8">
<head>
<title>ZLM RTC demo</title>
<script src="./ZLMRTCClient.js"></script>
</head>
<body>
<div style="text-align: center;">
<div>
<video id='video' controls autoplay style="text-align:left;">
Your browser is too old which doesn't support HTML5 video.
</video>
<video id='selfVideo' controls autoplay style="text-align:right;">
Your browser is too old which doesn't support HTML5 video.
</video>
</div>
<div>
<p>
<label for="streamUrl">url:</label>
<input type="text" id='streamUrl' value="https://rp.zlmediakit.com:20443/index/api/webrtc?app=live&stream=test">
</p>
<p>
<label for="simulecast">simulecast:</label>
<input type="checkbox" id='simulecast' checked="checked">
</p>
<button onclick="start()">开始</button>
<button onclick="stop()">停止</button>
</div>
</div>
<script>
var player = null
function start()
{
stop();
player = new ZLMRTCClient.Endpoint(
{
element: document.getElementById('video'),// video 标签
debug: true,// 是否打印日志
zlmsdpUrl:document.getElementById('streamUrl').value,//流地址
simulecast:document.getElementById('simulecast').checked
}
);
player.on(ZLMRTCClient.Events.WEBRTC_ICE_CANDIDATE_ERROR,function(e)
{// ICE 协商出错
console.log('ICE 协商出错')
});
player.on(ZLMRTCClient.Events.WEBRTC_ON_REMOTE_STREAMS,function(e)
{//获取到了远端流,可以播放
console.log('播放成功',e.streams)
});
player.on(ZLMRTCClient.Events.WEBRTC_OFFER_ANWSER_EXCHANGE_FAILED,function(e)
{// offer anwser 交换失败
console.log('offer anwser 交换失败',e)
});
player.on(ZLMRTCClient.Events.WEBRTC_ON_LOCAL_STREAM,function(s)
{// 获取到了本地流
document.getElementById('selfVideo').srcObject=s;
//console.log('offer anwser 交换失败',e)
});
}
function stop()
{
if(player)
{
player.close();
player = null;
var local = document.getElementById('selfVideo');
local.removeAttribute('srcObject');
local.load();
}
}
</script>
</body>
<script>
</script>
</html>
\ No newline at end of file
(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.adapter = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
/* eslint-env node */
'use strict';
// Shimming starts here.
(function() {
// Utils.
var logging = require('./utils').log;
var browserDetails = require('./utils').browserDetails;
// Export to the adapter global object visible in the browser.
module.exports.browserDetails = browserDetails;
module.exports.extractVersion = require('./utils').extractVersion;
module.exports.disableLog = require('./utils').disableLog;
// Comment out the line below if you want logging to occur, including logging
// for the switch statement below. Can also be turned on in the browser via
// adapter.disableLog(false), but then logging from the switch statement below
// will not appear.
require('./utils').disableLog(true);
// Browser shims.
var chromeShim = require('./chrome/chrome_shim') || null;
var edgeShim = require('./edge/edge_shim') || null;
var firefoxShim = require('./firefox/firefox_shim') || null;
var safariShim = require('./safari/safari_shim') || null;
// Shim browser if found.
switch (browserDetails.browser) {
case 'opera': // fallthrough as it uses chrome shims
case 'chrome':
if (!chromeShim || !chromeShim.shimPeerConnection) {
logging('Chrome shim is not included in this adapter release.');
return;
}
logging('adapter.js shimming chrome.');
// Export to the adapter global object visible in the browser.
module.exports.browserShim = chromeShim;
chromeShim.shimGetUserMedia();
chromeShim.shimSourceObject();
chromeShim.shimPeerConnection();
chromeShim.shimOnTrack();
break;
case 'firefox':
if (!firefoxShim || !firefoxShim.shimPeerConnection) {
logging('Firefox shim is not included in this adapter release.');
return;
}
logging('adapter.js shimming firefox.');
// Export to the adapter global object visible in the browser.
module.exports.browserShim = firefoxShim;
firefoxShim.shimGetUserMedia();
firefoxShim.shimSourceObject();
firefoxShim.shimPeerConnection();
firefoxShim.shimOnTrack();
break;
case 'edge':
if (!edgeShim || !edgeShim.shimPeerConnection) {
logging('MS edge shim is not included in this adapter release.');
return;
}
logging('adapter.js shimming edge.');
// Export to the adapter global object visible in the browser.
module.exports.browserShim = edgeShim;
edgeShim.shimPeerConnection();
break;
case 'safari':
if (!safariShim) {
logging('Safari shim is not included in this adapter release.');
return;
}
logging('adapter.js shimming safari.');
// Export to the adapter global object visible in the browser.
module.exports.browserShim = safariShim;
safariShim.shimGetUserMedia();
break;
default:
logging('Unsupported browser!');
}
})();
},{"./chrome/chrome_shim":2,"./edge/edge_shim":5,"./firefox/firefox_shim":6,"./safari/safari_shim":8,"./utils":9}],2:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
/* eslint-env node */
'use strict';
var logging = require('../utils.js').log;
var browserDetails = require('../utils.js').browserDetails;
var chromeShim = {
shimOnTrack: function() {
if (typeof window === 'object' && window.RTCPeerConnection && !('ontrack' in
window.RTCPeerConnection.prototype)) {
Object.defineProperty(window.RTCPeerConnection.prototype, 'ontrack', {
get: function() {
return this._ontrack;
},
set: function(f) {
var self = this;
if (this._ontrack) {
this.removeEventListener('track', this._ontrack);
this.removeEventListener('addstream', this._ontrackpoly);
}
this.addEventListener('track', this._ontrack = f);
this.addEventListener('addstream', this._ontrackpoly = function(e) {
// onaddstream does not fire when a track is added to an existing
// stream. But stream.onaddtrack is implemented so we use that.
e.stream.addEventListener('addtrack', function(te) {
var event = new Event('track');
event.track = te.track;
event.receiver = {track: te.track};
event.streams = [e.stream];
self.dispatchEvent(event);
});
e.stream.getTracks().forEach(function(track) {
var event = new Event('track');
event.track = track;
event.receiver = {track: track};
event.streams = [e.stream];
this.dispatchEvent(event);
}.bind(this));
}.bind(this));
}
});
}
},
shimSourceObject: function() {
if (typeof window === 'object') {
if (window.HTMLMediaElement &&
!('srcObject' in window.HTMLMediaElement.prototype)) {
// Shim the srcObject property, once, when HTMLMediaElement is found.
Object.defineProperty(window.HTMLMediaElement.prototype, 'srcObject', {
get: function() {
return this._srcObject;
},
set: function(stream) {
var self = this;
// Use _srcObject as a private property for this shim
this._srcObject = stream;
if (this.src) {
URL.revokeObjectURL(this.src);
}
if (!stream) {
this.src = '';
return;
}
this.src = URL.createObjectURL(stream);
// We need to recreate the blob url when a track is added or
// removed. Doing it manually since we want to avoid a recursion.
stream.addEventListener('addtrack', function() {
if (self.src) {
URL.revokeObjectURL(self.src);
}
self.src = URL.createObjectURL(stream);
});
stream.addEventListener('removetrack', function() {
if (self.src) {
URL.revokeObjectURL(self.src);
}
self.src = URL.createObjectURL(stream);
});
}
});
}
}
},
shimPeerConnection: function() {
// The RTCPeerConnection object.
window.RTCPeerConnection = function(pcConfig, pcConstraints) {
// Translate iceTransportPolicy to iceTransports,
// see https://code.google.com/p/webrtc/issues/detail?id=4869
logging('PeerConnection');
if (pcConfig && pcConfig.iceTransportPolicy) {
pcConfig.iceTransports = pcConfig.iceTransportPolicy;
}
var pc = new webkitRTCPeerConnection(pcConfig, pcConstraints);
var origGetStats = pc.getStats.bind(pc);
pc.getStats = function(selector, successCallback, errorCallback) {
var self = this;
var args = arguments;
// If selector is a function then we are in the old style stats so just
// pass back the original getStats format to avoid breaking old users.
if (arguments.length > 0 && typeof selector === 'function') {
return origGetStats(selector, successCallback);
}
var fixChromeStats_ = function(response) {
var standardReport = {};
var reports = response.result();
reports.forEach(function(report) {
var standardStats = {
id: report.id,
timestamp: report.timestamp,
type: report.type
};
report.names().forEach(function(name) {
standardStats[name] = report.stat(name);
});
standardReport[standardStats.id] = standardStats;
});
return standardReport;
};
if (arguments.length >= 2) {
var successCallbackWrapper_ = function(response) {
args[1](fixChromeStats_(response));
};
return origGetStats.apply(this, [successCallbackWrapper_,
arguments[0]]);
}
// promise-support
return new Promise(function(resolve, reject) {
if (args.length === 1 && typeof selector === 'object') {
origGetStats.apply(self,
[function(response) {
resolve.apply(null, [fixChromeStats_(response)]);
}, reject]);
} else {
origGetStats.apply(self, [resolve, reject]);
}
});
};
return pc;
};
window.RTCPeerConnection.prototype = webkitRTCPeerConnection.prototype;
// wrap static methods. Currently just generateCertificate.
if (webkitRTCPeerConnection.generateCertificate) {
Object.defineProperty(window.RTCPeerConnection, 'generateCertificate', {
get: function() {
return webkitRTCPeerConnection.generateCertificate;
}
});
}
// add promise support
['createOffer', 'createAnswer'].forEach(function(method) {
var nativeMethod = webkitRTCPeerConnection.prototype[method];
webkitRTCPeerConnection.prototype[method] = function() {
var self = this;
if (arguments.length < 1 || (arguments.length === 1 &&
typeof(arguments[0]) === 'object')) {
var opts = arguments.length === 1 ? arguments[0] : undefined;
return new Promise(function(resolve, reject) {
nativeMethod.apply(self, [resolve, reject, opts]);
});
}
return nativeMethod.apply(this, arguments);
};
});
['setLocalDescription', 'setRemoteDescription', 'addIceCandidate']
.forEach(function(method) {
var nativeMethod = webkitRTCPeerConnection.prototype[method];
webkitRTCPeerConnection.prototype[method] = function() {
var args = arguments;
var self = this;
args[0] = new ((method === 'addIceCandidate')?
RTCIceCandidate : RTCSessionDescription)(args[0]);
return new Promise(function(resolve, reject) {
nativeMethod.apply(self, [args[0],
function() {
resolve();
if (args.length >= 2) {
args[1].apply(null, []);
}
},
function(err) {
reject(err);
if (args.length >= 3) {
args[2].apply(null, [err]);
}
}]
);
});
};
});
},
// Attach a media stream to an element.
attachMediaStream: function(element, stream) {
logging('DEPRECATED, attachMediaStream will soon be removed.');
if (browserDetails.version >= 43) {
element.srcObject = stream;
} else if (typeof element.src !== 'undefined') {
element.src = URL.createObjectURL(stream);
} else {
logging('Error attaching stream to element.');
}
},
reattachMediaStream: function(to, from) {
logging('DEPRECATED, reattachMediaStream will soon be removed.');
if (browserDetails.version >= 43) {
to.srcObject = from.srcObject;
} else {
to.src = from.src;
}
}
};
// Expose public methods.
module.exports = {
shimOnTrack: chromeShim.shimOnTrack,
shimSourceObject: chromeShim.shimSourceObject,
shimPeerConnection: chromeShim.shimPeerConnection,
shimGetUserMedia: require('./getusermedia'),
attachMediaStream: chromeShim.attachMediaStream,
reattachMediaStream: chromeShim.reattachMediaStream
};
},{"../utils.js":9,"./getusermedia":3}],3:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
/* eslint-env node */
'use strict';
var logging = require('../utils.js').log;
// Expose public methods.
module.exports = function() {
var constraintsToChrome_ = function(c) {
if (typeof c !== 'object' || c.mandatory || c.optional) {
return c;
}
var cc = {};
Object.keys(c).forEach(function(key) {
if (key === 'require' || key === 'advanced' || key === 'mediaSource') {
return;
}
var r = (typeof c[key] === 'object') ? c[key] : {ideal: c[key]};
if (r.exact !== undefined && typeof r.exact === 'number') {
r.min = r.max = r.exact;
}
var oldname_ = function(prefix, name) {
if (prefix) {
return prefix + name.charAt(0).toUpperCase() + name.slice(1);
}
return (name === 'deviceId') ? 'sourceId' : name;
};
if (r.ideal !== undefined) {
cc.optional = cc.optional || [];
var oc = {};
if (typeof r.ideal === 'number') {
oc[oldname_('min', key)] = r.ideal;
cc.optional.push(oc);
oc = {};
oc[oldname_('max', key)] = r.ideal;
cc.optional.push(oc);
} else {
oc[oldname_('', key)] = r.ideal;
cc.optional.push(oc);
}
}
if (r.exact !== undefined && typeof r.exact !== 'number') {
cc.mandatory = cc.mandatory || {};
cc.mandatory[oldname_('', key)] = r.exact;
} else {
['min', 'max'].forEach(function(mix) {
if (r[mix] !== undefined) {
cc.mandatory = cc.mandatory || {};
cc.mandatory[oldname_(mix, key)] = r[mix];
}
});
}
});
if (c.advanced) {
cc.optional = (cc.optional || []).concat(c.advanced);
}
return cc;
};
var getUserMedia_ = function(constraints, onSuccess, onError) {
constraints = JSON.parse(JSON.stringify(constraints));
if (constraints.audio) {
constraints.audio = constraintsToChrome_(constraints.audio);
}
if (constraints.video) {
constraints.video = constraintsToChrome_(constraints.video);
}
logging('chrome: ' + JSON.stringify(constraints));
return navigator.webkitGetUserMedia(constraints, onSuccess, onError);
};
navigator.getUserMedia = getUserMedia_;
// Returns the result of getUserMedia as a Promise.
var getUserMediaPromise_ = function(constraints) {
return new Promise(function(resolve, reject) {
navigator.getUserMedia(constraints, resolve, reject);
});
};
if (!navigator.mediaDevices) {
navigator.mediaDevices = {
getUserMedia: getUserMediaPromise_,
enumerateDevices: function() {
return new Promise(function(resolve) {
var kinds = {audio: 'audioinput', video: 'videoinput'};
return MediaStreamTrack.getSources(function(devices) {
resolve(devices.map(function(device) {
return {label: device.label,
kind: kinds[device.kind],
deviceId: device.id,
groupId: ''};
}));
});
});
}
};
}
// A shim for getUserMedia method on the mediaDevices object.
// TODO(KaptenJansson) remove once implemented in Chrome stable.
if (!navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia = function(constraints) {
return getUserMediaPromise_(constraints);
};
} else {
// Even though Chrome 45 has navigator.mediaDevices and a getUserMedia
// function which returns a Promise, it does not accept spec-style
// constraints.
var origGetUserMedia = navigator.mediaDevices.getUserMedia.
bind(navigator.mediaDevices);
navigator.mediaDevices.getUserMedia = function(c) {
if (c) {
logging('spec: ' + JSON.stringify(c)); // whitespace for alignment
c.audio = constraintsToChrome_(c.audio);
c.video = constraintsToChrome_(c.video);
logging('chrome: ' + JSON.stringify(c));
}
return origGetUserMedia(c);
}.bind(this);
}
// Dummy devicechange event methods.
// TODO(KaptenJansson) remove once implemented in Chrome stable.
if (typeof navigator.mediaDevices.addEventListener === 'undefined') {
navigator.mediaDevices.addEventListener = function() {
logging('Dummy mediaDevices.addEventListener called.');
};
}
if (typeof navigator.mediaDevices.removeEventListener === 'undefined') {
navigator.mediaDevices.removeEventListener = function() {
logging('Dummy mediaDevices.removeEventListener called.');
};
}
};
},{"../utils.js":9}],4:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
/* eslint-env node */
'use strict';
// SDP helpers.
var SDPUtils = {};
// Generate an alphanumeric identifier for cname or mids.
// TODO: use UUIDs instead? https://gist.github.com/jed/982883
SDPUtils.generateIdentifier = function() {
return Math.random().toString(36).substr(2, 10);
};
// The RTCP CNAME used by all peerconnections from the same JS.
SDPUtils.localCName = SDPUtils.generateIdentifier();
// Splits SDP into lines, dealing with both CRLF and LF.
SDPUtils.splitLines = function(blob) {
return blob.trim().split('\n').map(function(line) {
return line.trim();
});
};
// Splits SDP into sessionpart and mediasections. Ensures CRLF.
SDPUtils.splitSections = function(blob) {
var parts = blob.split('\nm=');
return parts.map(function(part, index) {
return (index > 0 ? 'm=' + part : part).trim() + '\r\n';
});
};
// Returns lines that start with a certain prefix.
SDPUtils.matchPrefix = function(blob, prefix) {
return SDPUtils.splitLines(blob).filter(function(line) {
return line.indexOf(prefix) === 0;
});
};
// Parses an ICE candidate line. Sample input:
// candidate:702786350 2 udp 41819902 8.8.8.8 60769 typ relay raddr 8.8.8.8
// rport 55996"
SDPUtils.parseCandidate = function(line) {
var parts;
// Parse both variants.
if (line.indexOf('a=candidate:') === 0) {
parts = line.substring(12).split(' ');
} else {
parts = line.substring(10).split(' ');
}
var candidate = {
foundation: parts[0],
component: parts[1],
protocol: parts[2].toLowerCase(),
priority: parseInt(parts[3], 10),
ip: parts[4],
port: parseInt(parts[5], 10),
// skip parts[6] == 'typ'
type: parts[7]
};
for (var i = 8; i < parts.length; i += 2) {
switch (parts[i]) {
case 'raddr':
candidate.relatedAddress = parts[i + 1];
break;
case 'rport':
candidate.relatedPort = parseInt(parts[i + 1], 10);
break;
case 'tcptype':
candidate.tcpType = parts[i + 1];
break;
default: // Unknown extensions are silently ignored.
break;
}
}
return candidate;
};
// Translates a candidate object into SDP candidate attribute.
SDPUtils.writeCandidate = function(candidate) {
var sdp = [];
sdp.push(candidate.foundation);
sdp.push(candidate.component);
sdp.push(candidate.protocol.toUpperCase());
sdp.push(candidate.priority);
sdp.push(candidate.ip);
sdp.push(candidate.port);
var type = candidate.type;
sdp.push('typ');
sdp.push(type);
if (type !== 'host' && candidate.relatedAddress &&
candidate.relatedPort) {
sdp.push('raddr');
sdp.push(candidate.relatedAddress); // was: relAddr
sdp.push('rport');
sdp.push(candidate.relatedPort); // was: relPort
}
if (candidate.tcpType && candidate.protocol.toLowerCase() === 'tcp') {
sdp.push('tcptype');
sdp.push(candidate.tcpType);
}
return 'candidate:' + sdp.join(' ');
};
// Parses an rtpmap line, returns RTCRtpCoddecParameters. Sample input:
// a=rtpmap:111 opus/48000/2
SDPUtils.parseRtpMap = function(line) {
var parts = line.substr(9).split(' ');
var parsed = {
payloadType: parseInt(parts.shift(), 10) // was: id
};
parts = parts[0].split('/');
parsed.name = parts[0];
parsed.clockRate = parseInt(parts[1], 10); // was: clockrate
// was: channels
parsed.numChannels = parts.length === 3 ? parseInt(parts[2], 10) : 1;
return parsed;
};
// Generate an a=rtpmap line from RTCRtpCodecCapability or
// RTCRtpCodecParameters.
SDPUtils.writeRtpMap = function(codec) {
var pt = codec.payloadType;
if (codec.preferredPayloadType !== undefined) {
pt = codec.preferredPayloadType;
}
return 'a=rtpmap:' + pt + ' ' + codec.name + '/' + codec.clockRate +
(codec.numChannels !== 1 ? '/' + codec.numChannels : '') + '\r\n';
};
// Parses an a=extmap line (headerextension from RFC 5285). Sample input:
// a=extmap:2 urn:ietf:params:rtp-hdrext:toffset
SDPUtils.parseExtmap = function(line) {
var parts = line.substr(9).split(' ');
return {
id: parseInt(parts[0], 10),
uri: parts[1]
};
};
// Generates a=extmap line from RTCRtpHeaderExtensionParameters or
// RTCRtpHeaderExtension.
SDPUtils.writeExtmap = function(headerExtension) {
return 'a=extmap:' + (headerExtension.id || headerExtension.preferredId) +
' ' + headerExtension.uri + '\r\n';
};
// Parses an ftmp line, returns dictionary. Sample input:
// a=fmtp:96 vbr=on;cng=on
// Also deals with vbr=on; cng=on
SDPUtils.parseFmtp = function(line) {
var parsed = {};
var kv;
var parts = line.substr(line.indexOf(' ') + 1).split(';');
for (var j = 0; j < parts.length; j++) {
kv = parts[j].trim().split('=');
parsed[kv[0].trim()] = kv[1];
}
return parsed;
};
// Generates an a=ftmp line from RTCRtpCodecCapability or RTCRtpCodecParameters.
SDPUtils.writeFmtp = function(codec) {
var line = '';
var pt = codec.payloadType;
if (codec.preferredPayloadType !== undefined) {
pt = codec.preferredPayloadType;
}
if (codec.parameters && Object.keys(codec.parameters).length) {
var params = [];
Object.keys(codec.parameters).forEach(function(param) {
params.push(param + '=' + codec.parameters[param]);
});
line += 'a=fmtp:' + pt + ' ' + params.join(';') + '\r\n';
}
return line;
};
// Parses an rtcp-fb line, returns RTCPRtcpFeedback object. Sample input:
// a=rtcp-fb:98 nack rpsi
SDPUtils.parseRtcpFb = function(line) {
var parts = line.substr(line.indexOf(' ') + 1).split(' ');
return {
type: parts.shift(),
parameter: parts.join(' ')
};
};
// Generate a=rtcp-fb lines from RTCRtpCodecCapability or RTCRtpCodecParameters.
SDPUtils.writeRtcpFb = function(codec) {
var lines = '';
var pt = codec.payloadType;
if (codec.preferredPayloadType !== undefined) {
pt = codec.preferredPayloadType;
}
if (codec.rtcpFeedback && codec.rtcpFeedback.length) {
// FIXME: special handling for trr-int?
codec.rtcpFeedback.forEach(function(fb) {
lines += 'a=rtcp-fb:' + pt + ' ' + fb.type + ' ' + fb.parameter +
'\r\n';
});
}
return lines;
};
// Parses an RFC 5576 ssrc media attribute. Sample input:
// a=ssrc:3735928559 cname:something
SDPUtils.parseSsrcMedia = function(line) {
var sp = line.indexOf(' ');
var parts = {
ssrc: parseInt(line.substr(7, sp - 7), 10)
};
var colon = line.indexOf(':', sp);
if (colon > -1) {
parts.attribute = line.substr(sp + 1, colon - sp - 1);
parts.value = line.substr(colon + 1);
} else {
parts.attribute = line.substr(sp + 1);
}
return parts;
};
// Extracts DTLS parameters from SDP media section or sessionpart.
// FIXME: for consistency with other functions this should only
// get the fingerprint line as input. See also getIceParameters.
SDPUtils.getDtlsParameters = function(mediaSection, sessionpart) {
var lines = SDPUtils.splitLines(mediaSection);
// Search in session part, too.
lines = lines.concat(SDPUtils.splitLines(sessionpart));
var fpLine = lines.filter(function(line) {
return line.indexOf('a=fingerprint:') === 0;
})[0].substr(14);
// Note: a=setup line is ignored since we use the 'auto' role.
var dtlsParameters = {
role: 'auto',
fingerprints: [{
algorithm: fpLine.split(' ')[0],
value: fpLine.split(' ')[1]
}]
};
return dtlsParameters;
};
// Serializes DTLS parameters to SDP.
SDPUtils.writeDtlsParameters = function(params, setupType) {
var sdp = 'a=setup:' + setupType + '\r\n';
params.fingerprints.forEach(function(fp) {
sdp += 'a=fingerprint:' + fp.algorithm + ' ' + fp.value + '\r\n';
});
return sdp;
};
// Parses ICE information from SDP media section or sessionpart.
// FIXME: for consistency with other functions this should only
// get the ice-ufrag and ice-pwd lines as input.
SDPUtils.getIceParameters = function(mediaSection, sessionpart) {
var lines = SDPUtils.splitLines(mediaSection);
// Search in session part, too.
lines = lines.concat(SDPUtils.splitLines(sessionpart));
var iceParameters = {
usernameFragment: lines.filter(function(line) {
return line.indexOf('a=ice-ufrag:') === 0;
})[0].substr(12),
password: lines.filter(function(line) {
return line.indexOf('a=ice-pwd:') === 0;
})[0].substr(10)
};
return iceParameters;
};
// Serializes ICE parameters to SDP.
SDPUtils.writeIceParameters = function(params) {
return 'a=ice-ufrag:' + params.usernameFragment + '\r\n' +
'a=ice-pwd:' + params.password + '\r\n';
};
// Parses the SDP media section and returns RTCRtpParameters.
SDPUtils.parseRtpParameters = function(mediaSection) {
var description = {
codecs: [],
headerExtensions: [],
fecMechanisms: [],
rtcp: []
};
var lines = SDPUtils.splitLines(mediaSection);
var mline = lines[0].split(' ');
for (var i = 3; i < mline.length; i++) { // find all codecs from mline[3..]
var pt = mline[i];
var rtpmapline = SDPUtils.matchPrefix(
mediaSection, 'a=rtpmap:' + pt + ' ')[0];
if (rtpmapline) {
var codec = SDPUtils.parseRtpMap(rtpmapline);
var fmtps = SDPUtils.matchPrefix(
mediaSection, 'a=fmtp:' + pt + ' ');
// Only the first a=fmtp:<pt> is considered.
codec.parameters = fmtps.length ? SDPUtils.parseFmtp(fmtps[0]) : {};
codec.rtcpFeedback = SDPUtils.matchPrefix(
mediaSection, 'a=rtcp-fb:' + pt + ' ')
.map(SDPUtils.parseRtcpFb);
description.codecs.push(codec);
// parse FEC mechanisms from rtpmap lines.
switch (codec.name.toUpperCase()) {
case 'RED':
case 'ULPFEC':
description.fecMechanisms.push(codec.name.toUpperCase());
break;
default: // only RED and ULPFEC are recognized as FEC mechanisms.
break;
}
}
}
SDPUtils.matchPrefix(mediaSection, 'a=extmap:').forEach(function(line) {
description.headerExtensions.push(SDPUtils.parseExtmap(line));
});
// FIXME: parse rtcp.
return description;
};
// Generates parts of the SDP media section describing the capabilities /
// parameters.
SDPUtils.writeRtpDescription = function(kind, caps) {
var sdp = '';
// Build the mline.
sdp += 'm=' + kind + ' ';
sdp += caps.codecs.length > 0 ? '9' : '0'; // reject if no codecs.
sdp += ' UDP/TLS/RTP/SAVPF ';
sdp += caps.codecs.map(function(codec) {
if (codec.preferredPayloadType !== undefined) {
return codec.preferredPayloadType;
}
return codec.payloadType;
}).join(' ') + '\r\n';
sdp += 'c=IN IP4 0.0.0.0\r\n';
sdp += 'a=rtcp:9 IN IP4 0.0.0.0\r\n';
// Add a=rtpmap lines for each codec. Also fmtp and rtcp-fb.
caps.codecs.forEach(function(codec) {
sdp += SDPUtils.writeRtpMap(codec);
sdp += SDPUtils.writeFmtp(codec);
sdp += SDPUtils.writeRtcpFb(codec);
});
// FIXME: add headerExtensions, fecMechanismş and rtcp.
sdp += 'a=rtcp-mux\r\n';
return sdp;
};
// Parses the SDP media section and returns an array of
// RTCRtpEncodingParameters.
SDPUtils.parseRtpEncodingParameters = function(mediaSection) {
var encodingParameters = [];
var description = SDPUtils.parseRtpParameters(mediaSection);
var hasRed = description.fecMechanisms.indexOf('RED') !== -1;
var hasUlpfec = description.fecMechanisms.indexOf('ULPFEC') !== -1;
// filter a=ssrc:... cname:, ignore PlanB-msid
var ssrcs = SDPUtils.matchPrefix(mediaSection, 'a=ssrc:')
.map(function(line) {
return SDPUtils.parseSsrcMedia(line);
})
.filter(function(parts) {
return parts.attribute === 'cname';
});
var primarySsrc = ssrcs.length > 0 && ssrcs[0].ssrc;
var secondarySsrc;
var flows = SDPUtils.matchPrefix(mediaSection, 'a=ssrc-group:FID')
.map(function(line) {
var parts = line.split(' ');
parts.shift();
return parts.map(function(part) {
return parseInt(part, 10);
});
});
if (flows.length > 0 && flows[0].length > 1 && flows[0][0] === primarySsrc) {
secondarySsrc = flows[0][1];
}
description.codecs.forEach(function(codec) {
if (codec.name.toUpperCase() === 'RTX' && codec.parameters.apt) {
var encParam = {
ssrc: primarySsrc,
codecPayloadType: parseInt(codec.parameters.apt, 10),
rtx: {
ssrc: secondarySsrc
}
};
encodingParameters.push(encParam);
if (hasRed) {
encParam = JSON.parse(JSON.stringify(encParam));
encParam.fec = {
ssrc: secondarySsrc,
mechanism: hasUlpfec ? 'red+ulpfec' : 'red'
};
encodingParameters.push(encParam);
}
}
});
if (encodingParameters.length === 0 && primarySsrc) {
encodingParameters.push({
ssrc: primarySsrc
});
}
// we support both b=AS and b=TIAS but interpret AS as TIAS.
var bandwidth = SDPUtils.matchPrefix(mediaSection, 'b=');
if (bandwidth.length) {
if (bandwidth[0].indexOf('b=TIAS:') === 0) {
bandwidth = parseInt(bandwidth[0].substr(7), 10);
} else if (bandwidth[0].indexOf('b=AS:') === 0) {
bandwidth = parseInt(bandwidth[0].substr(5), 10);
}
encodingParameters.forEach(function(params) {
params.maxBitrate = bandwidth;
});
}
return encodingParameters;
};
SDPUtils.writeSessionBoilerplate = function() {
// FIXME: sess-id should be an NTP timestamp.
return 'v=0\r\n' +
'o=thisisadapterortc 8169639915646943137 2 IN IP4 127.0.0.1\r\n' +
's=-\r\n' +
't=0 0\r\n';
};
SDPUtils.writeMediaSection = function(transceiver, caps, type, stream) {
var sdp = SDPUtils.writeRtpDescription(transceiver.kind, caps);
// Map ICE parameters (ufrag, pwd) to SDP.
sdp += SDPUtils.writeIceParameters(
transceiver.iceGatherer.getLocalParameters());
// Map DTLS parameters to SDP.
sdp += SDPUtils.writeDtlsParameters(
transceiver.dtlsTransport.getLocalParameters(),
type === 'offer' ? 'actpass' : 'active');
sdp += 'a=mid:' + transceiver.mid + '\r\n';
if (transceiver.rtpSender && transceiver.rtpReceiver) {
sdp += 'a=sendrecv\r\n';
} else if (transceiver.rtpSender) {
sdp += 'a=sendonly\r\n';
} else if (transceiver.rtpReceiver) {
sdp += 'a=recvonly\r\n';
} else {
sdp += 'a=inactive\r\n';
}
// FIXME: for RTX there might be multiple SSRCs. Not implemented in Edge yet.
if (transceiver.rtpSender) {
var msid = 'msid:' + stream.id + ' ' +
transceiver.rtpSender.track.id + '\r\n';
sdp += 'a=' + msid;
sdp += 'a=ssrc:' + transceiver.sendEncodingParameters[0].ssrc +
' ' + msid;
}
// FIXME: this should be written by writeRtpDescription.
sdp += 'a=ssrc:' + transceiver.sendEncodingParameters[0].ssrc +
' cname:' + SDPUtils.localCName + '\r\n';
return sdp;
};
// Gets the direction from the mediaSection or the sessionpart.
SDPUtils.getDirection = function(mediaSection, sessionpart) {
// Look for sendrecv, sendonly, recvonly, inactive, default to sendrecv.
var lines = SDPUtils.splitLines(mediaSection);
for (var i = 0; i < lines.length; i++) {
switch (lines[i]) {
case 'a=sendrecv':
case 'a=sendonly':
case 'a=recvonly':
case 'a=inactive':
return lines[i].substr(2);
default:
// FIXME: What should happen here?
}
}
if (sessionpart) {
return SDPUtils.getDirection(sessionpart);
}
return 'sendrecv';
};
// Expose public methods.
module.exports = SDPUtils;
},{}],5:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
/* eslint-env node */
'use strict';
var SDPUtils = require('./edge_sdp');
var logging = require('../utils').log;
var edgeShim = {
shimPeerConnection: function() {
if (window.RTCIceGatherer) {
// ORTC defines an RTCIceCandidate object but no constructor.
// Not implemented in Edge.
if (!window.RTCIceCandidate) {
window.RTCIceCandidate = function(args) {
return args;
};
}
// ORTC does not have a session description object but
// other browsers (i.e. Chrome) that will support both PC and ORTC
// in the future might have this defined already.
if (!window.RTCSessionDescription) {
window.RTCSessionDescription = function(args) {
return args;
};
}
}
window.RTCPeerConnection = function(config) {
var self = this;
var _eventTarget = document.createDocumentFragment();
['addEventListener', 'removeEventListener', 'dispatchEvent']
.forEach(function(method) {
self[method] = _eventTarget[method].bind(_eventTarget);
});
this.onicecandidate = null;
this.onaddstream = null;
this.ontrack = null;
this.onremovestream = null;
this.onsignalingstatechange = null;
this.oniceconnectionstatechange = null;
this.onnegotiationneeded = null;
this.ondatachannel = null;
this.localStreams = [];
this.remoteStreams = [];
this.getLocalStreams = function() {
return self.localStreams;
};
this.getRemoteStreams = function() {
return self.remoteStreams;
};
this.localDescription = new RTCSessionDescription({
type: '',
sdp: ''
});
this.remoteDescription = new RTCSessionDescription({
type: '',
sdp: ''
});
this.signalingState = 'stable';
this.iceConnectionState = 'new';
this.iceGatheringState = 'new';
this.iceOptions = {
gatherPolicy: 'all',
iceServers: []
};
if (config && config.iceTransportPolicy) {
switch (config.iceTransportPolicy) {
case 'all':
case 'relay':
this.iceOptions.gatherPolicy = config.iceTransportPolicy;
break;
case 'none':
// FIXME: remove once implementation and spec have added this.
throw new TypeError('iceTransportPolicy "none" not supported');
default:
// don't set iceTransportPolicy.
break;
}
}
if (config && config.iceServers) {
// Edge does not like
// 1) stun:
// 2) turn: that does not have all of turn:host:port?transport=udp
this.iceOptions.iceServers = config.iceServers.filter(function(server) {
if (server && server.urls) {
server.urls = server.urls.filter(function(url) {
return url.indexOf('turn:') === 0 &&
url.indexOf('transport=udp') !== -1;
})[0];
return !!server.urls;
}
return false;
});
}
// per-track iceGathers, iceTransports, dtlsTransports, rtpSenders, ...
// everything that is needed to describe a SDP m-line.
this.transceivers = [];
// since the iceGatherer is currently created in createOffer but we
// must not emit candidates until after setLocalDescription we buffer
// them in this array.
this._localIceCandidatesBuffer = [];
};
window.RTCPeerConnection.prototype._emitBufferedCandidates = function() {
var self = this;
var sections = SDPUtils.splitSections(self.localDescription.sdp);
// FIXME: need to apply ice candidates in a way which is async but
// in-order
this._localIceCandidatesBuffer.forEach(function(event) {
var end = !event.candidate || Object.keys(event.candidate).length === 0;
if (end) {
for (var j = 1; j < sections.length; j++) {
if (sections[j].indexOf('\r\na=end-of-candidates\r\n') === -1) {
sections[j] += 'a=end-of-candidates\r\n';
}
}
} else if (event.candidate.candidate.indexOf('typ endOfCandidates')
=== -1) {
sections[event.candidate.sdpMLineIndex + 1] +=
'a=' + event.candidate.candidate + '\r\n';
}
self.localDescription.sdp = sections.join('');
self.dispatchEvent(event);
if (self.onicecandidate !== null) {
self.onicecandidate(event);
}
if (!event.candidate && self.iceGatheringState !== 'complete') {
var complete = self.transceivers.every(function(transceiver) {
return transceiver.iceGatherer &&
transceiver.iceGatherer.state === 'completed';
});
if (complete) {
self.iceGatheringState = 'complete';
}
}
});
this._localIceCandidatesBuffer = [];
};
window.RTCPeerConnection.prototype.addStream = function(stream) {
// Clone is necessary for local demos mostly, attaching directly
// to two different senders does not work (build 10547).
this.localStreams.push(stream.clone());
this._maybeFireNegotiationNeeded();
};
window.RTCPeerConnection.prototype.removeStream = function(stream) {
var idx = this.localStreams.indexOf(stream);
if (idx > -1) {
this.localStreams.splice(idx, 1);
this._maybeFireNegotiationNeeded();
}
};
// Determines the intersection of local and remote capabilities.
window.RTCPeerConnection.prototype._getCommonCapabilities =
function(localCapabilities, remoteCapabilities) {
var commonCapabilities = {
codecs: [],
headerExtensions: [],
fecMechanisms: []
};
localCapabilities.codecs.forEach(function(lCodec) {
for (var i = 0; i < remoteCapabilities.codecs.length; i++) {
var rCodec = remoteCapabilities.codecs[i];
if (lCodec.name.toLowerCase() === rCodec.name.toLowerCase() &&
lCodec.clockRate === rCodec.clockRate &&
lCodec.numChannels === rCodec.numChannels) {
// push rCodec so we reply with offerer payload type
commonCapabilities.codecs.push(rCodec);
// FIXME: also need to determine intersection between
// .rtcpFeedback and .parameters
break;
}
}
});
localCapabilities.headerExtensions
.forEach(function(lHeaderExtension) {
for (var i = 0; i < remoteCapabilities.headerExtensions.length;
i++) {
var rHeaderExtension = remoteCapabilities.headerExtensions[i];
if (lHeaderExtension.uri === rHeaderExtension.uri) {
commonCapabilities.headerExtensions.push(rHeaderExtension);
break;
}
}
});
// FIXME: fecMechanisms
return commonCapabilities;
};
// Create ICE gatherer, ICE transport and DTLS transport.
window.RTCPeerConnection.prototype._createIceAndDtlsTransports =
function(mid, sdpMLineIndex) {
var self = this;
var iceGatherer = new RTCIceGatherer(self.iceOptions);
var iceTransport = new RTCIceTransport(iceGatherer);
iceGatherer.onlocalcandidate = function(evt) {
var event = new Event('icecandidate');
event.candidate = {sdpMid: mid, sdpMLineIndex: sdpMLineIndex};
var cand = evt.candidate;
var end = !cand || Object.keys(cand).length === 0;
// Edge emits an empty object for RTCIceCandidateComplete‥
if (end) {
// polyfill since RTCIceGatherer.state is not implemented in
// Edge 10547 yet.
if (iceGatherer.state === undefined) {
iceGatherer.state = 'completed';
}
// Emit a candidate with type endOfCandidates to make the samples
// work. Edge requires addIceCandidate with this empty candidate
// to start checking. The real solution is to signal
// end-of-candidates to the other side when getting the null
// candidate but some apps (like the samples) don't do that.
event.candidate.candidate =
'candidate:1 1 udp 1 0.0.0.0 9 typ endOfCandidates';
} else {
// RTCIceCandidate doesn't have a component, needs to be added
cand.component = iceTransport.component === 'RTCP' ? 2 : 1;
event.candidate.candidate = SDPUtils.writeCandidate(cand);
}
var complete = self.transceivers.every(function(transceiver) {
return transceiver.iceGatherer &&
transceiver.iceGatherer.state === 'completed';
});
// Emit candidate if localDescription is set.
// Also emits null candidate when all gatherers are complete.
switch (self.iceGatheringState) {
case 'new':
self._localIceCandidatesBuffer.push(event);
if (end && complete) {
self._localIceCandidatesBuffer.push(
new Event('icecandidate'));
}
break;
case 'gathering':
self._emitBufferedCandidates();
self.dispatchEvent(event);
if (self.onicecandidate !== null) {
self.onicecandidate(event);
}
if (complete) {
self.dispatchEvent(new Event('icecandidate'));
if (self.onicecandidate !== null) {
self.onicecandidate(new Event('icecandidate'));
}
self.iceGatheringState = 'complete';
}
break;
case 'complete':
// should not happen... currently!
break;
default: // no-op.
break;
}
};
iceTransport.onicestatechange = function() {
self._updateConnectionState();
};
var dtlsTransport = new RTCDtlsTransport(iceTransport);
dtlsTransport.ondtlsstatechange = function() {
self._updateConnectionState();
};
dtlsTransport.onerror = function() {
// onerror does not set state to failed by itself.
dtlsTransport.state = 'failed';
self._updateConnectionState();
};
return {
iceGatherer: iceGatherer,
iceTransport: iceTransport,
dtlsTransport: dtlsTransport
};
};
// Start the RTP Sender and Receiver for a transceiver.
window.RTCPeerConnection.prototype._transceive = function(transceiver,
send, recv) {
var params = this._getCommonCapabilities(transceiver.localCapabilities,
transceiver.remoteCapabilities);
if (send && transceiver.rtpSender) {
params.encodings = transceiver.sendEncodingParameters;
params.rtcp = {
cname: SDPUtils.localCName
};
if (transceiver.recvEncodingParameters.length) {
params.rtcp.ssrc = transceiver.recvEncodingParameters[0].ssrc;
}
transceiver.rtpSender.send(params);
}
if (recv && transceiver.rtpReceiver) {
params.encodings = transceiver.recvEncodingParameters;
params.rtcp = {
cname: transceiver.cname
};
if (transceiver.sendEncodingParameters.length) {
params.rtcp.ssrc = transceiver.sendEncodingParameters[0].ssrc;
}
transceiver.rtpReceiver.receive(params);
}
};
window.RTCPeerConnection.prototype.setLocalDescription =
function(description) {
var self = this;
var sections;
var sessionpart;
if (description.type === 'offer') {
// FIXME: What was the purpose of this empty if statement?
// if (!this._pendingOffer) {
// } else {
if (this._pendingOffer) {
// VERY limited support for SDP munging. Limited to:
// * changing the order of codecs
sections = SDPUtils.splitSections(description.sdp);
sessionpart = sections.shift();
sections.forEach(function(mediaSection, sdpMLineIndex) {
var caps = SDPUtils.parseRtpParameters(mediaSection);
self._pendingOffer[sdpMLineIndex].localCapabilities = caps;
});
this.transceivers = this._pendingOffer;
delete this._pendingOffer;
}
} else if (description.type === 'answer') {
sections = SDPUtils.splitSections(self.remoteDescription.sdp);
sessionpart = sections.shift();
sections.forEach(function(mediaSection, sdpMLineIndex) {
var transceiver = self.transceivers[sdpMLineIndex];
var iceGatherer = transceiver.iceGatherer;
var iceTransport = transceiver.iceTransport;
var dtlsTransport = transceiver.dtlsTransport;
var localCapabilities = transceiver.localCapabilities;
var remoteCapabilities = transceiver.remoteCapabilities;
var rejected = mediaSection.split('\n', 1)[0]
.split(' ', 2)[1] === '0';
if (!rejected) {
var remoteIceParameters = SDPUtils.getIceParameters(
mediaSection, sessionpart);
iceTransport.start(iceGatherer, remoteIceParameters,
'controlled');
var remoteDtlsParameters = SDPUtils.getDtlsParameters(
mediaSection, sessionpart);
dtlsTransport.start(remoteDtlsParameters);
// Calculate intersection of capabilities.
var params = self._getCommonCapabilities(localCapabilities,
remoteCapabilities);
// Start the RTCRtpSender. The RTCRtpReceiver for this
// transceiver has already been started in setRemoteDescription.
self._transceive(transceiver,
params.codecs.length > 0,
false);
}
});
}
this.localDescription = {
type: description.type,
sdp: description.sdp
};
switch (description.type) {
case 'offer':
this._updateSignalingState('have-local-offer');
break;
case 'answer':
this._updateSignalingState('stable');
break;
default:
throw new TypeError('unsupported type "' + description.type +
'"');
}
// If a success callback was provided, emit ICE candidates after it
// has been executed. Otherwise, emit callback after the Promise is
// resolved.
var hasCallback = arguments.length > 1 &&
typeof arguments[1] === 'function';
if (hasCallback) {
var cb = arguments[1];
window.setTimeout(function() {
cb();
if (self.iceGatheringState === 'new') {
self.iceGatheringState = 'gathering';
}
self._emitBufferedCandidates();
}, 0);
}
var p = Promise.resolve();
p.then(function() {
if (!hasCallback) {
if (self.iceGatheringState === 'new') {
self.iceGatheringState = 'gathering';
}
// Usually candidates will be emitted earlier.
window.setTimeout(self._emitBufferedCandidates.bind(self), 500);
}
});
return p;
};
window.RTCPeerConnection.prototype.setRemoteDescription =
function(description) {
var self = this;
var stream = new MediaStream();
var receiverList = [];
var sections = SDPUtils.splitSections(description.sdp);
var sessionpart = sections.shift();
sections.forEach(function(mediaSection, sdpMLineIndex) {
var lines = SDPUtils.splitLines(mediaSection);
var mline = lines[0].substr(2).split(' ');
var kind = mline[0];
var rejected = mline[1] === '0';
var direction = SDPUtils.getDirection(mediaSection, sessionpart);
var transceiver;
var iceGatherer;
var iceTransport;
var dtlsTransport;
var rtpSender;
var rtpReceiver;
var sendEncodingParameters;
var recvEncodingParameters;
var localCapabilities;
var track;
// FIXME: ensure the mediaSection has rtcp-mux set.
var remoteCapabilities = SDPUtils.parseRtpParameters(mediaSection);
var remoteIceParameters;
var remoteDtlsParameters;
if (!rejected) {
remoteIceParameters = SDPUtils.getIceParameters(mediaSection,
sessionpart);
remoteDtlsParameters = SDPUtils.getDtlsParameters(mediaSection,
sessionpart);
}
recvEncodingParameters =
SDPUtils.parseRtpEncodingParameters(mediaSection);
var mid = SDPUtils.matchPrefix(mediaSection, 'a=mid:');
if (mid.length) {
mid = mid[0].substr(6);
} else {
mid = SDPUtils.generateIdentifier();
}
var cname;
// Gets the first SSRC. Note that with RTX there might be multiple
// SSRCs.
var remoteSsrc = SDPUtils.matchPrefix(mediaSection, 'a=ssrc:')
.map(function(line) {
return SDPUtils.parseSsrcMedia(line);
})
.filter(function(obj) {
return obj.attribute === 'cname';
})[0];
if (remoteSsrc) {
cname = remoteSsrc.value;
}
var isComplete = SDPUtils.matchPrefix(mediaSection,
'a=end-of-candidates').length > 0;
var cands = SDPUtils.matchPrefix(mediaSection, 'a=candidate:')
.map(function(cand) {
return SDPUtils.parseCandidate(cand);
})
.filter(function(cand) {
return cand.component === '1';
});
if (description.type === 'offer' && !rejected) {
var transports = self._createIceAndDtlsTransports(mid,
sdpMLineIndex);
if (isComplete) {
transports.iceTransport.setRemoteCandidates(cands);
}
localCapabilities = RTCRtpReceiver.getCapabilities(kind);
sendEncodingParameters = [{
ssrc: (2 * sdpMLineIndex + 2) * 1001
}];
rtpReceiver = new RTCRtpReceiver(transports.dtlsTransport, kind);
track = rtpReceiver.track;
receiverList.push([track, rtpReceiver]);
// FIXME: not correct when there are multiple streams but that is
// not currently supported in this shim.
stream.addTrack(track);
// FIXME: look at direction.
if (self.localStreams.length > 0 &&
self.localStreams[0].getTracks().length >= sdpMLineIndex) {
// FIXME: actually more complicated, needs to match types etc
var localtrack = self.localStreams[0]
.getTracks()[sdpMLineIndex];
rtpSender = new RTCRtpSender(localtrack,
transports.dtlsTransport);
}
self.transceivers[sdpMLineIndex] = {
iceGatherer: transports.iceGatherer,
iceTransport: transports.iceTransport,
dtlsTransport: transports.dtlsTransport,
localCapabilities: localCapabilities,
remoteCapabilities: remoteCapabilities,
rtpSender: rtpSender,
rtpReceiver: rtpReceiver,
kind: kind,
mid: mid,
cname: cname,
sendEncodingParameters: sendEncodingParameters,
recvEncodingParameters: recvEncodingParameters
};
// Start the RTCRtpReceiver now. The RTPSender is started in
// setLocalDescription.
self._transceive(self.transceivers[sdpMLineIndex],
false,
direction === 'sendrecv' || direction === 'sendonly');
} else if (description.type === 'answer' && !rejected) {
transceiver = self.transceivers[sdpMLineIndex];
iceGatherer = transceiver.iceGatherer;
iceTransport = transceiver.iceTransport;
dtlsTransport = transceiver.dtlsTransport;
rtpSender = transceiver.rtpSender;
rtpReceiver = transceiver.rtpReceiver;
sendEncodingParameters = transceiver.sendEncodingParameters;
localCapabilities = transceiver.localCapabilities;
self.transceivers[sdpMLineIndex].recvEncodingParameters =
recvEncodingParameters;
self.transceivers[sdpMLineIndex].remoteCapabilities =
remoteCapabilities;
self.transceivers[sdpMLineIndex].cname = cname;
if (isComplete) {
iceTransport.setRemoteCandidates(cands);
}
iceTransport.start(iceGatherer, remoteIceParameters,
'controlling');
dtlsTransport.start(remoteDtlsParameters);
self._transceive(transceiver,
direction === 'sendrecv' || direction === 'recvonly',
direction === 'sendrecv' || direction === 'sendonly');
if (rtpReceiver &&
(direction === 'sendrecv' || direction === 'sendonly')) {
track = rtpReceiver.track;
receiverList.push([track, rtpReceiver]);
stream.addTrack(track);
} else {
// FIXME: actually the receiver should be created later.
delete transceiver.rtpReceiver;
}
}
});
this.remoteDescription = {
type: description.type,
sdp: description.sdp
};
switch (description.type) {
case 'offer':
this._updateSignalingState('have-remote-offer');
break;
case 'answer':
this._updateSignalingState('stable');
break;
default:
throw new TypeError('unsupported type "' + description.type +
'"');
}
if (stream.getTracks().length) {
self.remoteStreams.push(stream);
window.setTimeout(function() {
var event = new Event('addstream');
event.stream = stream;
self.dispatchEvent(event);
if (self.onaddstream !== null) {
window.setTimeout(function() {
self.onaddstream(event);
}, 0);
}
receiverList.forEach(function(item) {
var track = item[0];
var receiver = item[1];
var trackEvent = new Event('track');
trackEvent.track = track;
trackEvent.receiver = receiver;
trackEvent.streams = [stream];
self.dispatchEvent(event);
if (self.ontrack !== null) {
window.setTimeout(function() {
self.ontrack(trackEvent);
}, 0);
}
});
}, 0);
}
if (arguments.length > 1 && typeof arguments[1] === 'function') {
window.setTimeout(arguments[1], 0);
}
return Promise.resolve();
};
window.RTCPeerConnection.prototype.close = function() {
this.transceivers.forEach(function(transceiver) {
/* not yet
if (transceiver.iceGatherer) {
transceiver.iceGatherer.close();
}
*/
if (transceiver.iceTransport) {
transceiver.iceTransport.stop();
}
if (transceiver.dtlsTransport) {
transceiver.dtlsTransport.stop();
}
if (transceiver.rtpSender) {
transceiver.rtpSender.stop();
}
if (transceiver.rtpReceiver) {
transceiver.rtpReceiver.stop();
}
});
// FIXME: clean up tracks, local streams, remote streams, etc
this._updateSignalingState('closed');
};
// Update the signaling state.
window.RTCPeerConnection.prototype._updateSignalingState =
function(newState) {
this.signalingState = newState;
var event = new Event('signalingstatechange');
this.dispatchEvent(event);
if (this.onsignalingstatechange !== null) {
this.onsignalingstatechange(event);
}
};
// Determine whether to fire the negotiationneeded event.
window.RTCPeerConnection.prototype._maybeFireNegotiationNeeded =
function() {
// Fire away (for now).
var event = new Event('negotiationneeded');
this.dispatchEvent(event);
if (this.onnegotiationneeded !== null) {
this.onnegotiationneeded(event);
}
};
// Update the connection state.
window.RTCPeerConnection.prototype._updateConnectionState = function() {
var self = this;
var newState;
var states = {
'new': 0,
closed: 0,
connecting: 0,
checking: 0,
connected: 0,
completed: 0,
failed: 0
};
this.transceivers.forEach(function(transceiver) {
states[transceiver.iceTransport.state]++;
states[transceiver.dtlsTransport.state]++;
});
// ICETransport.completed and connected are the same for this purpose.
states.connected += states.completed;
newState = 'new';
if (states.failed > 0) {
newState = 'failed';
} else if (states.connecting > 0 || states.checking > 0) {
newState = 'connecting';
} else if (states.disconnected > 0) {
newState = 'disconnected';
} else if (states.new > 0) {
newState = 'new';
} else if (states.connected > 0 || states.completed > 0) {
newState = 'connected';
}
if (newState !== self.iceConnectionState) {
self.iceConnectionState = newState;
var event = new Event('iceconnectionstatechange');
this.dispatchEvent(event);
if (this.oniceconnectionstatechange !== null) {
this.oniceconnectionstatechange(event);
}
}
};
window.RTCPeerConnection.prototype.createOffer = function() {
var self = this;
if (this._pendingOffer) {
throw new Error('createOffer called while there is a pending offer.');
}
var offerOptions;
if (arguments.length === 1 && typeof arguments[0] !== 'function') {
offerOptions = arguments[0];
} else if (arguments.length === 3) {
offerOptions = arguments[2];
}
var tracks = [];
var numAudioTracks = 0;
var numVideoTracks = 0;
// Default to sendrecv.
if (this.localStreams.length) {
numAudioTracks = this.localStreams[0].getAudioTracks().length;
numVideoTracks = this.localStreams[0].getVideoTracks().length;
}
// Determine number of audio and video tracks we need to send/recv.
if (offerOptions) {
// Reject Chrome legacy constraints.
if (offerOptions.mandatory || offerOptions.optional) {
throw new TypeError(
'Legacy mandatory/optional constraints not supported.');
}
if (offerOptions.offerToReceiveAudio !== undefined) {
numAudioTracks = offerOptions.offerToReceiveAudio;
}
if (offerOptions.offerToReceiveVideo !== undefined) {
numVideoTracks = offerOptions.offerToReceiveVideo;
}
}
if (this.localStreams.length) {
// Push local streams.
this.localStreams[0].getTracks().forEach(function(track) {
tracks.push({
kind: track.kind,
track: track,
wantReceive: track.kind === 'audio' ?
numAudioTracks > 0 : numVideoTracks > 0
});
if (track.kind === 'audio') {
numAudioTracks--;
} else if (track.kind === 'video') {
numVideoTracks--;
}
});
}
// Create M-lines for recvonly streams.
while (numAudioTracks > 0 || numVideoTracks > 0) {
if (numAudioTracks > 0) {
tracks.push({
kind: 'audio',
wantReceive: true
});
numAudioTracks--;
}
if (numVideoTracks > 0) {
tracks.push({
kind: 'video',
wantReceive: true
});
numVideoTracks--;
}
}
var sdp = SDPUtils.writeSessionBoilerplate();
var transceivers = [];
tracks.forEach(function(mline, sdpMLineIndex) {
// For each track, create an ice gatherer, ice transport,
// dtls transport, potentially rtpsender and rtpreceiver.
var track = mline.track;
var kind = mline.kind;
var mid = SDPUtils.generateIdentifier();
var transports = self._createIceAndDtlsTransports(mid, sdpMLineIndex);
var localCapabilities = RTCRtpSender.getCapabilities(kind);
var rtpSender;
var rtpReceiver;
// generate an ssrc now, to be used later in rtpSender.send
var sendEncodingParameters = [{
ssrc: (2 * sdpMLineIndex + 1) * 1001
}];
if (track) {
rtpSender = new RTCRtpSender(track, transports.dtlsTransport);
}
if (mline.wantReceive) {
rtpReceiver = new RTCRtpReceiver(transports.dtlsTransport, kind);
}
transceivers[sdpMLineIndex] = {
iceGatherer: transports.iceGatherer,
iceTransport: transports.iceTransport,
dtlsTransport: transports.dtlsTransport,
localCapabilities: localCapabilities,
remoteCapabilities: null,
rtpSender: rtpSender,
rtpReceiver: rtpReceiver,
kind: kind,
mid: mid,
sendEncodingParameters: sendEncodingParameters,
recvEncodingParameters: null
};
var transceiver = transceivers[sdpMLineIndex];
sdp += SDPUtils.writeMediaSection(transceiver,
transceiver.localCapabilities, 'offer', self.localStreams[0]);
});
this._pendingOffer = transceivers;
var desc = new RTCSessionDescription({
type: 'offer',
sdp: sdp
});
if (arguments.length && typeof arguments[0] === 'function') {
window.setTimeout(arguments[0], 0, desc);
}
return Promise.resolve(desc);
};
window.RTCPeerConnection.prototype.createAnswer = function() {
var self = this;
var sdp = SDPUtils.writeSessionBoilerplate();
this.transceivers.forEach(function(transceiver) {
// Calculate intersection of capabilities.
var commonCapabilities = self._getCommonCapabilities(
transceiver.localCapabilities,
transceiver.remoteCapabilities);
sdp += SDPUtils.writeMediaSection(transceiver, commonCapabilities,
'answer', self.localStreams[0]);
});
var desc = new RTCSessionDescription({
type: 'answer',
sdp: sdp
});
if (arguments.length && typeof arguments[0] === 'function') {
window.setTimeout(arguments[0], 0, desc);
}
return Promise.resolve(desc);
};
window.RTCPeerConnection.prototype.addIceCandidate = function(candidate) {
var mLineIndex = candidate.sdpMLineIndex;
if (candidate.sdpMid) {
for (var i = 0; i < this.transceivers.length; i++) {
if (this.transceivers[i].mid === candidate.sdpMid) {
mLineIndex = i;
break;
}
}
}
var transceiver = this.transceivers[mLineIndex];
if (transceiver) {
var cand = Object.keys(candidate.candidate).length > 0 ?
SDPUtils.parseCandidate(candidate.candidate) : {};
// Ignore Chrome's invalid candidates since Edge does not like them.
if (cand.protocol === 'tcp' && cand.port === 0) {
return;
}
// Ignore RTCP candidates, we assume RTCP-MUX.
if (cand.component !== '1') {
return;
}
// A dirty hack to make samples work.
if (cand.type === 'endOfCandidates') {
cand = {};
}
transceiver.iceTransport.addRemoteCandidate(cand);
// update the remoteDescription.
var sections = SDPUtils.splitSections(this.remoteDescription.sdp);
sections[mLineIndex + 1] += (cand.type ? candidate.candidate.trim()
: 'a=end-of-candidates') + '\r\n';
this.remoteDescription.sdp = sections.join('');
}
if (arguments.length > 1 && typeof arguments[1] === 'function') {
window.setTimeout(arguments[1], 0);
}
return Promise.resolve();
};
window.RTCPeerConnection.prototype.getStats = function() {
var promises = [];
this.transceivers.forEach(function(transceiver) {
['rtpSender', 'rtpReceiver', 'iceGatherer', 'iceTransport',
'dtlsTransport'].forEach(function(method) {
if (transceiver[method]) {
promises.push(transceiver[method].getStats());
}
});
});
var cb = arguments.length > 1 && typeof arguments[1] === 'function' &&
arguments[1];
return new Promise(function(resolve) {
var results = {};
Promise.all(promises).then(function(res) {
res.forEach(function(result) {
Object.keys(result).forEach(function(id) {
results[id] = result[id];
});
});
if (cb) {
window.setTimeout(cb, 0, results);
}
resolve(results);
});
});
};
},
// Attach a media stream to an element.
attachMediaStream: function(element, stream) {
logging('DEPRECATED, attachMediaStream will soon be removed.');
element.srcObject = stream;
},
reattachMediaStream: function(to, from) {
logging('DEPRECATED, reattachMediaStream will soon be removed.');
to.srcObject = from.srcObject;
}
};
// Expose public methods.
module.exports = {
shimPeerConnection: edgeShim.shimPeerConnection,
attachMediaStream: edgeShim.attachMediaStream,
reattachMediaStream: edgeShim.reattachMediaStream
};
},{"../utils":9,"./edge_sdp":4}],6:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
/* eslint-env node */
'use strict';
var logging = require('../utils').log;
var browserDetails = require('../utils').browserDetails;
var firefoxShim = {
shimOnTrack: function() {
if (typeof window === 'object' && window.RTCPeerConnection && !('ontrack' in
window.RTCPeerConnection.prototype)) {
Object.defineProperty(window.RTCPeerConnection.prototype, 'ontrack', {
get: function() {
return this._ontrack;
},
set: function(f) {
if (this._ontrack) {
this.removeEventListener('track', this._ontrack);
this.removeEventListener('addstream', this._ontrackpoly);
}
this.addEventListener('track', this._ontrack = f);
this.addEventListener('addstream', this._ontrackpoly = function(e) {
e.stream.getTracks().forEach(function(track) {
var event = new Event('track');
event.track = track;
event.receiver = {track: track};
event.streams = [e.stream];
this.dispatchEvent(event);
}.bind(this));
}.bind(this));
}
});
}
},
shimSourceObject: function() {
// Firefox has supported mozSrcObject since FF22, unprefixed in 42.
if (typeof window === 'object') {
if (window.HTMLMediaElement &&
!('srcObject' in window.HTMLMediaElement.prototype)) {
// Shim the srcObject property, once, when HTMLMediaElement is found.
Object.defineProperty(window.HTMLMediaElement.prototype, 'srcObject', {
get: function() {
return this.mozSrcObject;
},
set: function(stream) {
this.mozSrcObject = stream;
}
});
}
}
},
shimPeerConnection: function() {
// The RTCPeerConnection object.
if (!window.RTCPeerConnection) {
window.RTCPeerConnection = function(pcConfig, pcConstraints) {
if (browserDetails.version < 38) {
// .urls is not supported in FF < 38.
// create RTCIceServers with a single url.
if (pcConfig && pcConfig.iceServers) {
var newIceServers = [];
for (var i = 0; i < pcConfig.iceServers.length; i++) {
var server = pcConfig.iceServers[i];
if (server.hasOwnProperty('urls')) {
for (var j = 0; j < server.urls.length; j++) {
var newServer = {
url: server.urls[j]
};
if (server.urls[j].indexOf('turn') === 0) {
newServer.username = server.username;
newServer.credential = server.credential;
}
newIceServers.push(newServer);
}
} else {
newIceServers.push(pcConfig.iceServers[i]);
}
}
pcConfig.iceServers = newIceServers;
}
}
return new mozRTCPeerConnection(pcConfig, pcConstraints);
};
window.RTCPeerConnection.prototype = mozRTCPeerConnection.prototype;
// wrap static methods. Currently just generateCertificate.
if (mozRTCPeerConnection.generateCertificate) {
Object.defineProperty(window.RTCPeerConnection, 'generateCertificate', {
get: function() {
return mozRTCPeerConnection.generateCertificate;
}
});
}
window.RTCSessionDescription = mozRTCSessionDescription;
window.RTCIceCandidate = mozRTCIceCandidate;
}
// shim away need for obsolete RTCIceCandidate/RTCSessionDescription.
['setLocalDescription', 'setRemoteDescription', 'addIceCandidate']
.forEach(function(method) {
var nativeMethod = RTCPeerConnection.prototype[method];
RTCPeerConnection.prototype[method] = function() {
arguments[0] = new ((method === 'addIceCandidate')?
RTCIceCandidate : RTCSessionDescription)(arguments[0]);
return nativeMethod.apply(this, arguments);
};
});
},
shimGetUserMedia: function() {
// getUserMedia constraints shim.
var getUserMedia_ = function(constraints, onSuccess, onError) {
var constraintsToFF37_ = function(c) {
if (typeof c !== 'object' || c.require) {
return c;
}
var require = [];
Object.keys(c).forEach(function(key) {
if (key === 'require' || key === 'advanced' ||
key === 'mediaSource') {
return;
}
var r = c[key] = (typeof c[key] === 'object') ?
c[key] : {ideal: c[key]};
if (r.min !== undefined ||
r.max !== undefined || r.exact !== undefined) {
require.push(key);
}
if (r.exact !== undefined) {
if (typeof r.exact === 'number') {
r. min = r.max = r.exact;
} else {
c[key] = r.exact;
}
delete r.exact;
}
if (r.ideal !== undefined) {
c.advanced = c.advanced || [];
var oc = {};
if (typeof r.ideal === 'number') {
oc[key] = {min: r.ideal, max: r.ideal};
} else {
oc[key] = r.ideal;
}
c.advanced.push(oc);
delete r.ideal;
if (!Object.keys(r).length) {
delete c[key];
}
}
});
if (require.length) {
c.require = require;
}
return c;
};
constraints = JSON.parse(JSON.stringify(constraints));
if (browserDetails.version < 38) {
logging('spec: ' + JSON.stringify(constraints));
if (constraints.audio) {
constraints.audio = constraintsToFF37_(constraints.audio);
}
if (constraints.video) {
constraints.video = constraintsToFF37_(constraints.video);
}
logging('ff37: ' + JSON.stringify(constraints));
}
return navigator.mozGetUserMedia(constraints, onSuccess, onError);
};
navigator.getUserMedia = getUserMedia_;
// Returns the result of getUserMedia as a Promise.
var getUserMediaPromise_ = function(constraints) {
return new Promise(function(resolve, reject) {
navigator.getUserMedia(constraints, resolve, reject);
});
};
// Shim for mediaDevices on older versions.
if (!navigator.mediaDevices) {
navigator.mediaDevices = {getUserMedia: getUserMediaPromise_,
addEventListener: function() { },
removeEventListener: function() { }
};
}
navigator.mediaDevices.enumerateDevices =
navigator.mediaDevices.enumerateDevices || function() {
return new Promise(function(resolve) {
var infos = [
{kind: 'audioinput', deviceId: 'default', label: '', groupId: ''},
{kind: 'videoinput', deviceId: 'default', label: '', groupId: ''}
];
resolve(infos);
});
};
if (browserDetails.version < 41) {
// Work around http://bugzil.la/1169665
var orgEnumerateDevices =
navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices);
navigator.mediaDevices.enumerateDevices = function() {
return orgEnumerateDevices().then(undefined, function(e) {
if (e.name === 'NotFoundError') {
return [];
}
throw e;
});
};
}
},
// Attach a media stream to an element.
attachMediaStream: function(element, stream) {
logging('DEPRECATED, attachMediaStream will soon be removed.');
element.srcObject = stream;
},
reattachMediaStream: function(to, from) {
logging('DEPRECATED, reattachMediaStream will soon be removed.');
to.srcObject = from.srcObject;
}
};
// Expose public methods.
module.exports = {
shimOnTrack: firefoxShim.shimOnTrack,
shimSourceObject: firefoxShim.shimSourceObject,
shimPeerConnection: firefoxShim.shimPeerConnection,
shimGetUserMedia: require('./getusermedia'),
attachMediaStream: firefoxShim.attachMediaStream,
reattachMediaStream: firefoxShim.reattachMediaStream
};
},{"../utils":9,"./getusermedia":7}],7:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
/* eslint-env node */
'use strict';
var logging = require('../utils').log;
var browserDetails = require('../utils').browserDetails;
// Expose public methods.
module.exports = function() {
// getUserMedia constraints shim.
var getUserMedia_ = function(constraints, onSuccess, onError) {
var constraintsToFF37_ = function(c) {
if (typeof c !== 'object' || c.require) {
return c;
}
var require = [];
Object.keys(c).forEach(function(key) {
if (key === 'require' || key === 'advanced' || key === 'mediaSource') {
return;
}
var r = c[key] = (typeof c[key] === 'object') ?
c[key] : {ideal: c[key]};
if (r.min !== undefined ||
r.max !== undefined || r.exact !== undefined) {
require.push(key);
}
if (r.exact !== undefined) {
if (typeof r.exact === 'number') {
r. min = r.max = r.exact;
} else {
c[key] = r.exact;
}
delete r.exact;
}
if (r.ideal !== undefined) {
c.advanced = c.advanced || [];
var oc = {};
if (typeof r.ideal === 'number') {
oc[key] = {min: r.ideal, max: r.ideal};
} else {
oc[key] = r.ideal;
}
c.advanced.push(oc);
delete r.ideal;
if (!Object.keys(r).length) {
delete c[key];
}
}
});
if (require.length) {
c.require = require;
}
return c;
};
constraints = JSON.parse(JSON.stringify(constraints));
if (browserDetails.version < 38) {
logging('spec: ' + JSON.stringify(constraints));
if (constraints.audio) {
constraints.audio = constraintsToFF37_(constraints.audio);
}
if (constraints.video) {
constraints.video = constraintsToFF37_(constraints.video);
}
logging('ff37: ' + JSON.stringify(constraints));
}
return navigator.mozGetUserMedia(constraints, onSuccess, onError);
};
navigator.getUserMedia = getUserMedia_;
// Returns the result of getUserMedia as a Promise.
var getUserMediaPromise_ = function(constraints) {
return new Promise(function(resolve, reject) {
navigator.getUserMedia(constraints, resolve, reject);
});
};
// Shim for mediaDevices on older versions.
if (!navigator.mediaDevices) {
navigator.mediaDevices = {getUserMedia: getUserMediaPromise_,
addEventListener: function() { },
removeEventListener: function() { }
};
}
navigator.mediaDevices.enumerateDevices =
navigator.mediaDevices.enumerateDevices || function() {
return new Promise(function(resolve) {
var infos = [
{kind: 'audioinput', deviceId: 'default', label: '', groupId: ''},
{kind: 'videoinput', deviceId: 'default', label: '', groupId: ''}
];
resolve(infos);
});
};
if (browserDetails.version < 41) {
// Work around http://bugzil.la/1169665
var orgEnumerateDevices =
navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices);
navigator.mediaDevices.enumerateDevices = function() {
return orgEnumerateDevices().then(undefined, function(e) {
if (e.name === 'NotFoundError') {
return [];
}
throw e;
});
};
}
};
},{"../utils":9}],8:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
'use strict';
var safariShim = {
// TODO: DrAlex, should be here, double check against LayoutTests
// shimOnTrack: function() { },
// TODO: DrAlex
// attachMediaStream: function(element, stream) { },
// reattachMediaStream: function(to, from) { },
// TODO: once the back-end for the mac port is done, add.
// TODO: check for webkitGTK+
// shimPeerConnection: function() { },
shimGetUserMedia: function() {
navigator.getUserMedia = navigator.webkitGetUserMedia;
}
};
// Expose public methods.
module.exports = {
shimGetUserMedia: safariShim.shimGetUserMedia
// TODO
// shimOnTrack: safariShim.shimOnTrack,
// shimPeerConnection: safariShim.shimPeerConnection,
// attachMediaStream: safariShim.attachMediaStream,
// reattachMediaStream: safariShim.reattachMediaStream
};
},{}],9:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
/* eslint-env node */
'use strict';
var logDisabled_ = false;
// Utility methods.
var utils = {
disableLog: function(bool) {
if (typeof bool !== 'boolean') {
return new Error('Argument type: ' + typeof bool +
'. Please use a boolean.');
}
logDisabled_ = bool;
return (bool) ? 'adapter.js logging disabled' :
'adapter.js logging enabled';
},
log: function() {
if (typeof window === 'object') {
if (logDisabled_) {
return;
}
if (typeof console !== 'undefined' && typeof console.log === 'function') {
console.log.apply(console, arguments);
}
}
},
/**
* Extract browser version out of the provided user agent string.
*
* @param {!string} uastring userAgent string.
* @param {!string} expr Regular expression used as match criteria.
* @param {!number} pos position in the version string to be returned.
* @return {!number} browser version.
*/
extractVersion: function(uastring, expr, pos) {
var match = uastring.match(expr);
return match && match.length >= pos && parseInt(match[pos], 10);
},
/**
* Browser detector.
*
* @return {object} result containing browser, version and minVersion
* properties.
*/
detectBrowser: function() {
// Returned result object.
var result = {};
result.browser = null;
result.version = null;
result.minVersion = null;
// Fail early if it's not a browser
if (typeof window === 'undefined' || !window.navigator) {
result.browser = 'Not a browser.';
return result;
}
// Firefox.
if (navigator.mozGetUserMedia) {
result.browser = 'firefox';
result.version = this.extractVersion(navigator.userAgent,
/Firefox\/([0-9]+)\./, 1);
result.minVersion = 31;
// all webkit-based browsers
} else if (navigator.webkitGetUserMedia) {
// Chrome, Chromium, Webview, Opera, all use the chrome shim for now
if (window.webkitRTCPeerConnection) {
result.browser = 'chrome';
result.version = this.extractVersion(navigator.userAgent,
/Chrom(e|ium)\/([0-9]+)\./, 2);
result.minVersion = 38;
// Safari or unknown webkit-based
// for the time being Safari has support for MediaStreams but not webRTC
} else {
// Safari UA substrings of interest for reference:
// - webkit version: AppleWebKit/602.1.25 (also used in Op,Cr)
// - safari UI version: Version/9.0.3 (unique to Safari)
// - safari UI webkit version: Safari/601.4.4 (also used in Op,Cr)
//
// if the webkit version and safari UI webkit versions are equals,
// ... this is a stable version.
//
// only the internal webkit version is important today to know if
// media streams are supported
//
if (navigator.userAgent.match(/Version\/(\d+).(\d+)/)) {
result.browser = 'safari';
result.version = this.extractVersion(navigator.userAgent,
/AppleWebKit\/([0-9]+)\./, 1);
result.minVersion = 602;
// unknown webkit-based browser
} else {
result.browser = 'Unsupported webkit-based browser ' +
'with GUM support but no WebRTC support.';
return result;
}
}
// Edge.
} else if (navigator.mediaDevices &&
navigator.userAgent.match(/Edge\/(\d+).(\d+)$/)) {
result.browser = 'edge';
result.version = this.extractVersion(navigator.userAgent,
/Edge\/(\d+).(\d+)$/, 2);
result.minVersion = 10547;
// Default fallthrough: not supported.
} else {
result.browser = 'Not a supported browser.';
return result;
}
// Warn if version is less than minVersion.
if (result.version < result.minVersion) {
utils.log('Browser: ' + result.browser + ' Version: ' + result.version +
' < minimum supported version: ' + result.minVersion +
'\n some things might not work!');
}
return result;
}
};
// Export.
module.exports = {
log: utils.log,
disableLog: utils.disableLog,
browserDetails: utils.detectBrowser(),
extractVersion: utils.extractVersion
};
},{}]},{},[1])(1)
});
\ No newline at end of file
function trace(text) {
// This function is used for logging.
if (text[text.length - 1] === '\n') {
text = text.substring(0, text.length - 1);
}
if (window.performance) {
var now = (window.performance.now() / 1000).toFixed(3);
console.log(now + ': ' + text);
} else {
console.log(text);
}
}
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
'use strict';
var vid2 = document.getElementById('vid2');
var btn1 = document.getElementById('btn1');
var btn3 = document.getElementById('btn3');
var input1 = document.getElementById('input1');
btn1.addEventListener('click', start);
btn3.addEventListener('click', stop);
btn1.disabled = false;
btn3.disabled = true;
var pc2 = null;
var xmlhttp = null;
function start() {
btn1.disabled = true;
btn3.disabled = false;
trace('Starting Call');
var servers = null;
var addr = input1.value;
pc2 = new RTCPeerConnection(servers);
trace('Created remote peer connection object pc2');
pc2.onicecandidate = iceCallback;
pc2.onaddstream = gotRemoteStream;
if (window.XMLHttpRequest)
{
// IE7+, Firefox, Chrome, Opera, Safari �����ִ�д���
xmlhttp=new XMLHttpRequest();
}
else
{
// IE6, IE5 �����ִ�д���
xmlhttp=new ActiveXObject("Microsoft.XMLHTTP");
}
xmlhttp.onreadystatechange=function()
{
if (xmlhttp.readyState==4 && xmlhttp.status==200)
{
var res = xmlhttp.responseText;
gotoffer(res);
}
}
xmlhttp.open("GET",addr,true);
xmlhttp.send();
}
function onCreateSessionDescriptionError(error) {
trace('Failed to create session description: ' + error.toString());
stop();
}
function onCreateAnswerError(error) {
trace('Failed to set createAnswer: ' + error.toString());
stop();
}
function onSetLocalDescriptionError(error) {
trace('Failed to set setLocalDescription: ' + error.toString());
stop();
}
function onSetLocalDescriptionSuccess() {
trace('localDescription success.');
}
function gotoffer(offer) {
trace('Offer from server \n' + offer);
//??????offer sdp????????RTCSessionDescription????
var desc = new RTCSessionDescription();
desc.sdp = offer;
desc.type = 'offer';
pc2.setRemoteDescription(desc);
// Since the 'remote' side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio and video.
pc2.createAnswer().then(
gotDescription2,
onCreateSessionDescriptionError
);
}
function gotDescription2(desc) {
// Provisional answer, set a=inactive & set sdp type to pranswer.
/*desc.sdp = desc.sdp.replace(/a=recvonly/g, 'a=inactive');
desc.type = 'pranswer';*/
pc2.setLocalDescription(desc).then(
onSetLocalDescriptionSuccess,
onSetLocalDescriptionError
);
trace('Pranswer from pc2 \n' + desc.sdp);
//conn.send(JSON.stringify(desc));
// send desc.sdp to server
}
function stop() {
trace('Ending Call' + '\n\n');
pc2.close();
pc2 = null;
}
function gotRemoteStream(e) {
vid2.srcObject = e.stream;
trace('Received remote stream');
}
function iceCallback(event) {
if (event.candidate) {
trace('Remote ICE candidate: \n ' + event.candidate.candidate);
//conn.send(JSON.stringify(event.candidate));
}
else {
// All ICE candidates have been sent
}
}
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论