Flutter-WebRTC
Flutter WebRTC 插件(iOS/Android)
用法
将 flutter_webrtc 添加为你的 `pubspec.yaml` 文件中的 依赖项。
iOS
将以下条目添加到你的 Info.plist 文件中,该文件位于 <项目根目录>/ios/Runner/Info.plist
<key>NSCameraUsageDescription</key>
<string>$(PRODUCT_NAME) Camera Usage!</string>
<key>NSMicrophoneUsageDescription</key>
<string>$(PRODUCT_NAME) Microphone Usage!</string>
<key>NSPhotoLibraryUsageDescription</key>
此条目允许你的应用访问摄像头和麦克风。
Android
确保以下权限存在于你的 Android 清单文件中,该文件位于 `
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.CHANGE_NETWORK_STATE" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
Flutter 项目模板已添加此项,因此它可能已存在。
功能
我们计划实现以下功能
- [ ] 数据通道
- [ ] 移植到 Flutter-Desktop-Embedding
- [ ] 屏幕捕获
- [ ] ORTC API
- [ ] 移植到 Fuchsia
示例
import 'package:flutter/material.dart';
import 'package:flutter_webrtc/webrtc.dart';
import 'dart:core';
/**
* getUserMedia sample
*/
class GetUserMediaSample extends StatefulWidget {
static String tag = 'get_usermedia_sample';
@override
_GetUserMediaSampleState createState() => new _GetUserMediaSampleState();
}
class _GetUserMediaSampleState extends State<GetUserMediaSample> {
MediaStream _localStream;
final _localRenderer = new RTCVideoRenderer();
bool _inCalling = false;
@override
initState() {
super.initState();
initRenderers();
}
@override
deactivate() {
super.deactivate();
if (_inCalling) {
_hangUp();
}
}
initRenderers() async {
await _localRenderer.initialize();
}
// Platform messages are asynchronous, so we initialize in an async method.
_makeCall() async {
final Map<String, dynamic> mediaConstraints = {
"audio": true,
"video": {
"mandatory": {
"minWidth":'640', // Provide your own width, height and frame rate here
"minHeight": '480',
"minFrameRate": '30',
},
"facingMode": "user",
"optional": [],
}
};
try {
navigator.getUserMedia(mediaConstraints).then((stream){
_localStream = stream;
_localRenderer.srcObject = _localStream;
});
} catch (e) {
print(e.toString());
}
if (!mounted) return;
setState(() {
_inCalling = true;
});
}
_hangUp() async {
try {
await _localStream.dispose();
_localRenderer.srcObject = null;
} catch (e) {
print(e.toString());
}
setState(() {
_inCalling = false;
});
}
@override
Widget build(BuildContext context) {
return new Scaffold(
appBar: new AppBar(
title: new Text('GetUserMedia API Test'),
),
body: new OrientationBuilder(
builder: (context, orientation) {
return new Center(
child: new Container(
margin: new EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 0.0),
width: MediaQuery.of(context).size.width,
height: MediaQuery.of(context).size.height,
child: RTCVideoView(_localRenderer),
decoration: new BoxDecoration(color: Colors.black54),
),
);
},
),
floatingActionButton: new FloatingActionButton(
onPressed: _inCalling ? _hangUp : _makeCall,
tooltip: _inCalling ? 'Hangup' : 'Call',
child: new Icon(_inCalling ? Icons.call_end : Icons.phone),
),
);
}
}
有关更多示例,请参阅 flutter-webrtc-demo。