Flutter webrtc音频不能在android上工作



在Flutter中,我希望在两个对等体之间进行语音呼叫。我用的是Flutter-WebRTC。我正在做一些测试和视频似乎与webrtc工作,但没有音频。我看到了远程对等方的视频,但没有听到任何一方的音频。

一个是我的android手机,另一个是模拟器

我的main代码。飞镖是:

import 'dart:convert';
import 'package:flutter/material.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:sdp_transform/sdp_transform.dart';
import 'dart:developer' as developer;
void main() {
runApp(MyApp());
}
class MyApp extends StatelessWidget {
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Flutter Demo',
theme: ThemeData(
primarySwatch: Colors.blue,
visualDensity: VisualDensity.adaptivePlatformDensity,
),
home: MyHomePage(title: 'WebRTC lets learn together'),
);
}
}
class MyHomePage extends StatefulWidget {
MyHomePage({Key key, this.title}) : super(key: key);
final String title;
@override
_MyHomePageState createState() => _MyHomePageState();
}
class _MyHomePageState extends State<MyHomePage> {
bool _offer = false;
RTCPeerConnection _peerConnection;
MediaStream _localStream;
RTCVideoRenderer _localRenderer = new RTCVideoRenderer();
RTCVideoRenderer _remoteRenderer = new RTCVideoRenderer();
final sdpController = TextEditingController();
@override
dispose() {
_localRenderer.dispose();
_remoteRenderer.dispose();
sdpController.dispose();
super.dispose();
}
@override
void initState() {
initRenderers();
_createPeerConnection().then((pc) {
_peerConnection = pc;
});
super.initState();
}
initRenderers() async {
await _localRenderer.initialize();
await _remoteRenderer.initialize();
}
void _createOffer() async {
RTCSessionDescription description =
await _peerConnection.createOffer({'offerToReceiveAudio': 1, 'offerToReceiveVideo': 1});
var session = parse(description.sdp);
print(json.encode(session));
_offer = true;
_peerConnection.setLocalDescription(description);
}
void _createAnswer() async {
RTCSessionDescription description =
await _peerConnection.createAnswer({'offerToReceiveAudio': 1, 'offerToReceiveVideo': 1});
var session = parse(description.sdp);
print(json.encode(session));
_peerConnection.setLocalDescription(description);
}
void _setRemoteDescription() async {
String jsonString = sdpController.text;
dynamic session = await jsonDecode('$jsonString');
String sdp = write(session, null);
// RTCSessionDescription description =
//     new RTCSessionDescription(session['sdp'], session['type']);
RTCSessionDescription description =
new RTCSessionDescription(sdp, _offer ? 'answer' : 'offer');
print(description.toMap());
await _peerConnection.setRemoteDescription(description);
}
void _addCandidate() async {
String jsonString = sdpController.text;
dynamic session = await jsonDecode('$jsonString');
print(session['candidate']);
dynamic candidate =
new RTCIceCandidate(session['candidate'], session['sdpMid'], session['sdpMlineIndex']);
await _peerConnection.addCandidate(candidate);
}
_createPeerConnection() async {
Map<String, dynamic> configuration = {
"iceServers": [
{"url": "stun:stun.l.google.com:19302"},
]
};
final Map<String, dynamic> offerSdpConstraints = {
"mandatory": {
"OfferToReceiveAudio": true,
"OfferToReceiveVideo": true,
},
"optional": [],
};
_localStream = await _getUserMedia();
RTCPeerConnection pc = await createPeerConnection(configuration, offerSdpConstraints);
pc.addStream(_localStream);
pc.onIceCandidate = (e) {
if (e.candidate != null) {
print(json.encode({
'candidate': e.candidate.toString(),
'sdpMid': e.sdpMid.toString(),
'sdpMlineIndex': e.sdpMlineIndex,
}));
}
};
pc.onIceConnectionState = (e) {
print(e);
};
pc.onAddStream = (stream) {
print('addStream: ' + stream.id);
_remoteRenderer.srcObject = stream;
};
return pc;
}
_getUserMedia() async {
final Map<String, dynamic> mediaConstraints = {
'audio': false,
'video': {
'facingMode': 'user',
},
};
MediaStream stream = await MediaDevices.getUserMedia(mediaConstraints);
_localRenderer.srcObject = stream;
return stream;
}
SizedBox videoRenderers() => SizedBox(
height: 210,
child: Row(children: [
Flexible(
child: new Container(
key: new Key("local"),
margin: new EdgeInsets.fromLTRB(5.0, 5.0, 5.0, 5.0),
decoration: new BoxDecoration(color: Colors.black),
child: new RTCVideoView(_localRenderer)
),
),
Flexible(
child: new Container(
key: new Key("remote"),
margin: new EdgeInsets.fromLTRB(5.0, 5.0, 5.0, 5.0),
decoration: new BoxDecoration(color: Colors.black),
child: new RTCVideoView(_remoteRenderer)),
)
]));
Row offerAndAnswerButtons() =>
Row(mainAxisAlignment: MainAxisAlignment.spaceEvenly, children: <Widget>[
new RaisedButton(
onPressed: _createOffer,
child: Text('Offer'),
color: Colors.amber,
),
RaisedButton(
onPressed: _createAnswer,
child: Text('Answer'),
color: Colors.amber,
),
]);
Row sdpCandidateButtons() =>
Row(mainAxisAlignment: MainAxisAlignment.spaceEvenly, children: <Widget>[
RaisedButton(
onPressed: _setRemoteDescription,
child: Text('Set Remote Desc'),
color: Colors.amber,
),
RaisedButton(
onPressed: _addCandidate,
child: Text('Add Candidate'),
color: Colors.amber,
)
]);
Padding sdpCandidatesTF() => Padding(
padding: const EdgeInsets.all(16.0),
child: TextField(
controller: sdpController,
keyboardType: TextInputType.multiline,
maxLines: 4,
maxLength: TextField.noMaxLength,
),
);
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text(widget.title),
),
body: Container(
child: Column(children: [
videoRenderers(),
offerAndAnswerButtons(),
sdpCandidatesTF(),
sdpCandidateButtons(),
])));
}
}

在构建。gradle,将minSdkVersion改为21.

在AndroidManifest.xml中,添加:

<uses-permission android:name="android.permission.INTERNET"/>
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.CHANGE_NETWORK_STATE" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />

我看到了远程对等端的视频,但没有听到任何一侧的音频。我错过什么了吗?

一个月前我遇到了完全相同的问题。通过在设置中确保模拟器的麦克风处于活动状态并使用主麦克风。我需要注意的另一点是,音频只有在从模拟器发起调用时才能工作。

当我在我的真实手机上点击呼叫按钮时,相机打开了,但没有音频。但是当我先点击模拟器上的按钮时,一切都运行良好。

如果你正在使用Android studio,请注意每次启动模拟器时,使用主机音频输入的选项都会被禁用。

如文档所述:

如果你想使用主机音频数据,你可以通过扩展控制>麦克风和启用虚拟麦克风使用主机音频输入。当模拟器重新启动时,此选项将自动禁用。

_getUserMedia() async {
final Map<String, dynamic> mediaConstraints = {
'audio': false, // ---- Make it true
'video': {
'facingMode': 'user',
},
};

设置音频为true

您应该使用真正的数据库firebase或socket保存它们,而不是打印提供和答案。使用通知将ID发送给接收方。打印offer时,答案将不完整显示。

最新更新