首页 > 代码库 > Android IOS WebRTC 音视频开发总结(八)

Android IOS WebRTC 音视频开发总结(八)

这篇文章主要介绍ios webrtcdemo的实现及相关注意事项(本系列文章转载请说明出处:http://www.cnblogs.com/lingyunhu).

前面很多人问webrtc android下有webrtcdemo, ios上怎么找不到,放在哪里呢?

答案:webrtcdemo在ios上没有实现,如果要实现也很简单,既然安卓都有了,依葫芦画瓢即可移植到ios上,不过可能要求您熟悉android语法,这里给出ios上的参考代码:

  1 -(BOOL)initWebrtcObjects  2 {
     //转载请说明出处: RTC_Blacker http://www.cnblogs.com/lingyunhu
3 if ((voE = webrtc::VoiceEngine::Create()) == NULL) { 4 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__); 5 return FALSE; 6 } 7 if ((voeBase = webrtc::VoEBase::GetInterface(voE)) == NULL) { 8 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__); 9 return FALSE; 10 } 11 if ((voeCodec = webrtc::VoECodec::GetInterface(voE)) == NULL) { 12 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__); 13 return FALSE; 14 } 15 if ((voeFile=webrtc::VoEFile::GetInterface(voE))==NULL) { 16 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__); 17 } 18 if ((voeHardware = webrtc::VoEHardware::GetInterface(voE)) == NULL) { 19 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__); 20 return FALSE; 21 } 22 if ((voeNetwork = webrtc::VoENetwork::GetInterface(voE)) == NULL) { 23 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__); 24 return FALSE; 25 } 26 if ((voeAudioProccessing = webrtc::VoEAudioProcessing::GetInterface(voE)) == NULL) { 27 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__); 28 return FALSE; 29 } 30 if ((voeRtpRtcp = webrtc::VoERTP_RTCP::GetInterface(voE)) == NULL) { 31 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__); 32 return FALSE; 33 } 34 35 if(voeBase->Init()!=0){ 36 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__); 37 } 38 39 if ((viE = webrtc::VideoEngine::Create()) == NULL) { 40 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__); 41 return FALSE; 42 } 43 if ((vieBase = webrtc::ViEBase::GetInterface(viE)) == NULL) { 44 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__); 45 return FALSE; 46 } 47 if ((vieCapture = webrtc::ViECapture::GetInterface(viE)) == NULL) { 48 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__); 49 return FALSE; 50 } 51 if ((vieRender = webrtc::ViERender::GetInterface(viE)) == NULL) { 52 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__); 53 return FALSE; 54 } 55 if ((vieCodec = webrtc::ViECodec::GetInterface(viE)) == NULL) { 56 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__); 57 return FALSE; 58 } 59 if ((vieNetwork = webrtc::ViENetwork::GetInterface(viE)) == NULL) { 60 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__); 61 return FALSE; 62 } 63 if ((vieRtpRtcp = webrtc::ViERTP_RTCP::GetInterface(viE)) == NULL) { 64 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__); 65 return FALSE; 66 } 67 68 if (vieBase->Init() != 0) { 69 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__); 70 return FALSE; 71 } 72 73 [self initAudioCodec]; 74 [self initVideoCodec]; 75 76 captureID = 0; 77 videoChannel = -1; 78 79 return TRUE; 80 } 81 82 -(void)initAudioCodec 83 { 84 memset(&voeCodecInst, 0, sizeof(webrtc::CodecInst)); 85 86 if (voeCodec != NULL) { 87 for (int index=0; index < voeCodec->NumOfCodecs(); index++) { 88 webrtc::CodecInst ci; 89 voeCodec->GetCodec(index, ci); 90 if (strncmp(ci.plname, "ISAC", 4) == 0) { 91 memcpy(&voeCodecInst, &ci, sizeof(webrtc::CodecInst)); 92 break; 93 } 94 } 95 //voeCodecInst.channels = 1; 96 //voeCodecInst.rate = -1; 97 } 98 } 99 100 -(BOOL)start101 {102 f ((audioChannel = voeBase->CreateChannel())!=0) {103 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);104 return FALSE;105 }106 if (vieBase->CreateChannel(videoChannel) != 0) {107 DebugLog(@"AVErr: %d %s at line %d", vieBase->LastError(),__FUNCTION__, __LINE__);108 return FALSE;109 }110 DebugLog(@"AVInfo: CreateChannel success! %d, %d",videoChannel,audioChannel);111 112 //vieCodec->SetReceiveCodec(videoChannel,videoCodec);113 114 if(voeAudioProccessing->SetAecmMode()!=0){115 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);116 return FALSE;117 }118 voeAudioProccessing->SetAgcStatus(TRUE, webrtc::kAgcDefault);119 voeAudioProccessing->SetNsStatus(TRUE, webrtc::kNsHighSuppression);120 _voice_capture_device_index = -1;121 voeHardware->SetRecordingDevice(_voice_capture_device_index);122 voeHardware->SetPlayoutDevice(_voice_playback_device_index);123 if(voeHardware->SetLoudspeakerStatus(true)!=0){124 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);125 }126 voeCodec->SetSendCodec(audioChannel, voeCodecInst);127 128 RtpRtcpStreamStruct streamStruct=[self createRtpStreamStruct];129 voeChannelTransport=new webrtc::test::VoiceChannelTransport(voeNetwork, audioChannel);130 voeChannelTransport->SetLocalReceiver2(localARtpPort.rtp,streamStruct );131 voeChannelTransport->SetSendDestination2([remoteIPAddress UTF8String], remoteARtpPort.rtp, remoteARtpPort.rtcp);132 133 if(vieCodec->SetSendCodec(videoChannel, videoCodec) != 0)134 {135 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);136 return FALSE;137 }138 vieRtpRtcp->SetNACKStatus(videoChannel, TRUE);139 vieRtpRtcp->SetRTCPStatus(videoChannel, webrtc::kRtcpNonCompound_RFC5506);140 vieRtpRtcp->SetKeyFrameRequestMethod(videoChannel, webrtc::kViEKeyFrameRequestPliRtcp);141 142 vieBase->SetVoiceEngine(voE);143 if (vieBase->ConnectAudioChannel(videoChannel, audioChannel)) {144 DebugLog(@"AVErr:%s at line %d",__FUNCTION__,__LINE__);145 return FALSE;146 }147 148 if (deviceUniqueID == nil) {149 DebugLog(@"AVInfo NumberOfCaptureDevices is %d", vieCapture->NumberOfCaptureDevices());150 int list_count=vieCapture->NumberOfCaptureDevices();151 if ( list_count> 0) {152 int list_number=0;153 if (list_count>1) {154 list_number=1;//[[AVShareData instance] isUseFrontCamera]?0:1;155 }156 char device_name[KMaxDeviceNameLength];157 char unique_id[KMaxUniqueIdLength];158 memset(unique_id, 0, KMaxUniqueIdLength);159 vieCapture->GetCaptureDevice(list_number, device_name, KMaxDeviceNameLength, unique_id, KMaxUniqueIdLength);160 deviceUniqueID = [NSString stringWithFormat:@"%s", unique_id];161 }162 }163 DebugLog(@"AVInfo deviceUniqueID is %@", deviceUniqueID);164 165 if ((vieCapture->AllocateCaptureDevice([deviceUniqueID UTF8String], deviceUniqueID.length, captureID)) != 0) {166 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);167 return FALSE;168 }169 170 DebugLog(@"AVInfo captureID is %d", captureID);171 172 if (vieCapture->ConnectCaptureDevice(captureID, videoChannel) != 0) {173 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);174 return FALSE;175 }176 177 webrtc::CaptureCapability captureCapability;178 captureCapability.width=352;179 captureCapability.height=288;180 captureCapability.codecType=webrtc::kVideoCodecVP8;181 captureCapability.maxFPS=DEFAULT_VIDEO_CODEC_MAX_FRAMERATE;182 //vieCapture->SetRotateCapturedFrames(captureID, <#const webrtc::RotateCapturedFrame rotation#>)183 if (vieCapture->StartCapture(captureID,captureCapability) != 0) {184 //if (vieCapture->StartCapture(captureID) != 0) {185 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);186 return FALSE;187 }188 if((vieRender->AddRenderer(captureID, [self localRenderView], 0, 0.0, 0.0, 1.0, 1.0)) != 0){189 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);190 return FALSE;191 }192 /*193 if((vieRender->AddRenderer(captureID, [self localRenderView2], 0, 0.0, 0.0, 1.0, 1.0)) != 0){194 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);195 return FALSE;196 }197 */198 199 if (vieRender->StartRender(captureID) != 0) {200 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);201 return FALSE;202 }203 if(vieRender->AddRenderer(videoChannel, [self remoteRenderView], 1, 0.0f, 0.0f, 1.0f, 1.0f)!=0){204 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);205 return FALSE;206 }207 if(vieRender->StartRender(videoChannel)!=0){208 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);209 return FALSE;210 }211 212 if (vieBase->StartReceive(videoChannel)!=0) {213 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);214 return FALSE;215 }216 if (vieBase->StartSend(videoChannel)!=0) {217 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);218 return FALSE;219 }220 if(voeBase->StartReceive(audioChannel) != 0)221 {222 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);223 return FALSE;224 }225 if(voeBase->StartPlayout(audioChannel) != 0)226 {227 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);228 return FALSE;229 }230 if(voeBase->StartSend(audioChannel) != 0)231 {232 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);233 return FALSE;234 }235 236 //webrtc::CodecInst ci;237 //voeFile->StartRecordingMicrophone(@"a.avi",ci,1000);238 239 DebugLog(@"AVInfo: %s at line %d success!", __FUNCTION__, __LINE__);240 return TRUE;241 }242 243 -(BOOL)stop244 {245 if(voeBase->StopSend(audioChannel)!=0){246 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);247 return FALSE;248 }249 if(voeBase->StopReceive(audioChannel)!=0){250 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);251 return FALSE;252 }253 if(voeBase->StopPlayout(audioChannel)!=0){254 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);255 return FALSE;256 }257 258 if(vieBase->StopSend(videoChannel)!=0){259 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);260 return FALSE;261 }262 if(vieBase->StopReceive(videoChannel)!=0){263 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);264 return FALSE;265 }266 if(vieCapture->StopCapture(captureID)!=0){267 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);268 return FALSE;269 }270 if(vieCapture->ReleaseCaptureDevice(captureID)!=0){271 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);272 return FALSE;273 }274 if(vieRender->StopRender(videoChannel)!=0){275 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);276 return FALSE;277 }278 if(vieRender->RemoveRenderer(videoChannel)!=0){279 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);280 return FALSE;281 }282 283 if(voeBase->DeleteChannel(audioChannel)!=0){284 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);285 return FALSE;286 }287 if(vieBase->DeleteChannel(videoChannel)!=0){288 DebugLog(@"AVErr: %s at line %d", __FUNCTION__, __LINE__);289 return FALSE;290 }291 292 DebugLog(@"AVInfo: %s at line %d success", __FUNCTION__, __LINE__);293 294 return TRUE;295 }

 

相关说明:

1,音频编码:

webrtc支持很多种音频编码,ilbc,isac,G711,G722,opus等等.默认isac. 实际使用中发现不同手机噪声,回音效果大不一样,这个好像跟音频编码和AEC有很大关系,不过整体而言ios音质效果好多了(毕竟价格摆在那里,另外跟ios的AEC直接集成在了硬件上也有很大关系),小米效果很一般(不是我黑小米,是就事论事,公司就有小米2S).

2,视频编码:

webrtc使用vp8编码,目前也支持vp8编码,不过有人已经将H264加到VP8里面了,因为H264应用时间比较长,用得也比较广泛,有些项目必须兼容以前的东西,至于两种编码的优劣我就不比较了,网上搜索资料一堆.

3,NACK,FEC:

这个也是webrtc的核心,网络状况不好的情况下通过NACK和FEC来解决丢包的问题,有兴趣的可以看代码了解下里面那个KeyFrame的处理方式.花屏跟这个也有很大关系.

4,AudioChannel,VideoChannel:

看代码可以知道,这两个属性相当于是各个模块关联起来的纽带,如transport,encoder,network,rtpRtcp.

 

以上,如有错误和疑问请纠正或补充,谢谢!

 

 

 

Android IOS WebRTC 音视频开发总结(八)