diff --git a/OAT.xml b/OAT.xml new file mode 100644 index 0000000000000000000000000000000000000000..035d39316699526415d69c03fdcd39ec79b1776c --- /dev/null +++ b/OAT.xml @@ -0,0 +1,105 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/example/lib/main.dart b/example/lib/main.dart index 9b24aef841ec536e4f1bca2174f163199533862e..1583087d0922a7068a2c339bcd2595193871809a 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -8,6 +8,7 @@ import 'package:flutter_webrtc/flutter_webrtc.dart'; import 'package:flutter_webrtc_example/src/capture_frame_sample.dart'; import 'src/device_enumeration_sample.dart'; +import 'src/device_enumeration_sample_remote.dart'; import 'src/get_display_media_sample.dart'; import 'src/get_user_media_sample.dart' if (dart.library.html) 'src/get_user_media_sample_web.dart'; @@ -83,14 +84,6 @@ class _MyAppState extends State { void _initItems() { items = [ - RouteItem( - title: 'GetUserMedia', - push: (BuildContext context) { - Navigator.push( - context, - MaterialPageRoute( - builder: (BuildContext context) => GetUserMediaSample())); - }), RouteItem( title: 'Device Enumeration', push: (BuildContext context) { @@ -101,40 +94,59 @@ class _MyAppState extends State { DeviceEnumerationSample())); }), RouteItem( - title: 'GetDisplayMedia', - push: (BuildContext context) { - Navigator.push( - context, - MaterialPageRoute( - builder: (BuildContext context) => - GetDisplayMediaSample())); - }), - RouteItem( - title: 'LoopBack Sample (Unified Tracks)', + title: 'Device Enumeration remote', push: (BuildContext context) { Navigator.push( context, MaterialPageRoute( builder: (BuildContext context) => - LoopBackSampleUnifiedTracks())); - }), - RouteItem( - title: 'DataChannelLoopBackSample', - push: (BuildContext context) { - Navigator.push( - context, - MaterialPageRoute( - builder: (BuildContext context) => - DataChannelLoopBackSample())); - }), - RouteItem( - title: 'Capture Frame', - push: (BuildContext context) { - Navigator.push( - context, - MaterialPageRoute( - builder: (BuildContext context) => CaptureFrameSample())); + DeviceEnumerationSampleRemote())); }), + if (!WebRTC.platformIsOhos) ...[ + RouteItem( + title: 'GetUserMedia', + push: (BuildContext context) { + Navigator.push( + context, + MaterialPageRoute( + builder: (BuildContext context) => GetUserMediaSample())); + }), + RouteItem( + title: 'GetDisplayMedia', + push: (BuildContext context) { + Navigator.push( + context, + MaterialPageRoute( + builder: (BuildContext context) => + GetDisplayMediaSample())); + }), + RouteItem( + title: 'LoopBack Sample (Unified Tracks)', + push: (BuildContext context) { + Navigator.push( + context, + MaterialPageRoute( + builder: (BuildContext context) => + LoopBackSampleUnifiedTracks())); + }), + RouteItem( + title: 'DataChannelLoopBackSample', + push: (BuildContext context) { + Navigator.push( + context, + MaterialPageRoute( + builder: (BuildContext context) => + DataChannelLoopBackSample())); + }), + RouteItem( + title: 'Capture Frame', + push: (BuildContext context) { + Navigator.push( + context, + MaterialPageRoute( + builder: (BuildContext context) => CaptureFrameSample())); + }), + ] ]; } } diff --git a/example/lib/src/device_enumeration_sample.dart b/example/lib/src/device_enumeration_sample.dart index 4630001572cbc5ead80e0433cae5cd9512bfce5f..a9e3708ccf1b04d98a211844e7178fbe04a0f6c4 100644 --- a/example/lib/src/device_enumeration_sample.dart +++ b/example/lib/src/device_enumeration_sample.dart @@ -354,58 +354,58 @@ class _DeviceEnumerationSampleState extends State { _speakerphoneOn ? Icons.speaker_phone : Icons.phone_android), tooltip: 'Switch SpeakerPhone', ), - PopupMenuButton( - onSelected: _selectVideoInput, - icon: Icon(Icons.switch_camera), - itemBuilder: (BuildContext context) { - return _devices - .where((device) => device.kind == 'videoinput') - .map((device) { - return PopupMenuItem( - value: device.deviceId, - child: Text(device.label), - ); - }).toList(); - }, - ), - PopupMenuButton( - onSelected: _selectVideoFps, - icon: Icon(Icons.menu), - itemBuilder: (BuildContext context) { - return [ - PopupMenuItem( - value: _selectedVideoFPS, - child: Text('Select FPS ($_selectedVideoFPS)'), - ), - PopupMenuDivider(), - ...['8', '15', '30', '60'] - .map((fps) => PopupMenuItem( - value: fps, - child: Text(fps), - )) - .toList() - ]; - }, - ), - PopupMenuButton( - onSelected: _selectVideoSize, - icon: Icon(Icons.screenshot_monitor), - itemBuilder: (BuildContext context) { - return [ - PopupMenuItem( - value: _selectedVideoSize.toString(), - child: Text('Select Video Size ($_selectedVideoSize)'), - ), - PopupMenuDivider(), - ...['320x180', '640x360', '1280x720', '1920x1080'] - .map((fps) => PopupMenuItem( - value: fps, - child: Text(fps), - )) - .toList() - ]; - }, - ), + // PopupMenuButton( + // onSelected: _selectVideoInput, + // icon: Icon(Icons.switch_camera), + // itemBuilder: (BuildContext context) { + // return _devices + // .where((device) => device.kind == 'videoinput') + // .map((device) { + // return PopupMenuItem( + // value: device.deviceId, + // child: Text(device.label), + // ); + // }).toList(); + // }, + // ), + // PopupMenuButton( + // onSelected: _selectVideoFps, + // icon: Icon(Icons.menu), + // itemBuilder: (BuildContext context) { + // return [ + // PopupMenuItem( + // value: _selectedVideoFPS, + // child: Text('Select FPS ($_selectedVideoFPS)'), + // ), + // PopupMenuDivider(), + // ...['8', '15', '30', '60'] + // .map((fps) => PopupMenuItem( + // value: fps, + // child: Text(fps), + // )) + // .toList() + // ]; + // }, + // ), + // PopupMenuButton( + // onSelected: _selectVideoSize, + // icon: Icon(Icons.screenshot_monitor), + // itemBuilder: (BuildContext context) { + // return [ + // PopupMenuItem( + // value: _selectedVideoSize.toString(), + // child: Text('Select Video Size ($_selectedVideoSize)'), + // ), + // PopupMenuDivider(), + // ...['320x180', '640x360', '1280x720', '1920x1080'] + // .map((fps) => PopupMenuItem( + // value: fps, + // child: Text(fps), + // )) + // .toList() + // ]; + // }, + // ), ], ), body: OrientationBuilder( @@ -420,14 +420,20 @@ class _DeviceEnumerationSampleState extends State { child: Container( margin: const EdgeInsets.fromLTRB(0, 0, 0, 0), decoration: BoxDecoration(color: Colors.black54), - child: RTCVideoView(_localRenderer), + child: RTCVideoView(_localRenderer, + onRendererUpdated: (data) { + _localRenderer.surfaceId = data; + }), ), ), Expanded( child: Container( margin: const EdgeInsets.fromLTRB(0, 0, 0, 0), decoration: BoxDecoration(color: Colors.black54), - child: RTCVideoView(_remoteRenderer), + child: RTCVideoView(_remoteRenderer, + onRendererUpdated: (data) { + _remoteRenderer.surfaceId = data; + }), ), ), ], diff --git a/example/lib/src/device_enumeration_sample_remote.dart b/example/lib/src/device_enumeration_sample_remote.dart new file mode 100644 index 0000000000000000000000000000000000000000..98859962d03e9441189c86d94f284c9777e8bf95 --- /dev/null +++ b/example/lib/src/device_enumeration_sample_remote.dart @@ -0,0 +1,589 @@ +import 'dart:convert'; +import 'dart:core'; +import 'dart:math'; +import 'package:collection/collection.dart'; + +import 'package:flutter/foundation.dart'; +import 'package:flutter/material.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:permission_handler/permission_handler.dart'; +import 'package:web_socket_channel/web_socket_channel.dart'; + +class VideoSize { + VideoSize(this.width, this.height); + + factory VideoSize.fromString(String size) { + final parts = size.split('x'); + return VideoSize(int.parse(parts[0]), int.parse(parts[1])); + } + + final int width; + final int height; + + @override + String toString() { + return '$width x $height'; + } +} + +/* + * DeviceEnumerationSampleRemote + */ +class DeviceEnumerationSampleRemote extends StatefulWidget { + static String tag = 'DeviceEnumerationSampleRemote'; + + @override + _DeviceEnumerationSampleRemoteState createState() => + _DeviceEnumerationSampleRemoteState(); +} + +class _DeviceEnumerationSampleRemoteState + extends State { + MediaStream? _localStream; + final RTCVideoRenderer _localRenderer = RTCVideoRenderer(); + final RTCVideoRenderer _remoteRenderer = RTCVideoRenderer(); + bool _inCalling = false; + + List _devices = []; + + List get audioInputs => + _devices.where((device) => device.kind == 'audioinput').toList(); + + List get audioOutputs => + _devices.where((device) => device.kind == 'audiooutput').toList(); + + List get videoInputs => + _devices.where((device) => device.kind == 'videoinput').toList(); + + String? _selectedVideoInputId; + String? _selectedAudioInputId; + + MediaDeviceInfo get selectedAudioInput => audioInputs.firstWhere( + (device) => device.deviceId == _selectedVideoInputId, + orElse: () => audioInputs.first); + + String? _selectedVideoFPS = '30'; + + String localId = '-1'; + + VideoSize _selectedVideoSize = VideoSize(1280, 720); + final TextEditingController _textController = TextEditingController(); + + late WebSocketChannel _channel; + + @override + void initState() { + super.initState(); + //初始化视频渲染器 + initRenderers(); + //扫描本地外设 + loadDevices(); + navigator.mediaDevices.ondevicechange = (event) { + loadDevices(); + }; + + print('创建信令服务器链接'); + // 初始化信令服务器连接 + _channel = WebSocketChannel.connect( + Uri.parse('wss://youzhi.life:8443'), + ); + + // 监听信令消息 + _channel.stream.listen((message) { + //收到服务器的消息 + print('Connected and received message: $message'); + if (message == 'SESSION_OK') { + _handleOffer(); + } else if (message == 'OFFER_REQUEST') { + _handleOffer(); + } else if (message == 'HELLO') { + _start(); + } else { + Map msg = json.decode(message); + if (msg.containsKey('sdp')) { + receiveSdp(msg['sdp']); + } else if (msg.containsKey('ice')) { + receiveIce(msg['ice']); + } + } + }, onDone: () { + // 连接被关闭 + print('Connection closed'); + }, onError: (error) { + // 连接失败或出错 + print('Connection error: $error'); + }); + } + + void receiveSdp(dynamic sdp) async { + print('receiveSdp in'); + print('receiveSdp in : ' + sdp['type']); + if (sdp['type'] == 'offer') { + await pc?.setRemoteDescription( + RTCSessionDescription(sdp['sdp'], sdp['type'])); + await _handleAnswer(); + } else if (sdp['type'] == 'answer') { + await pc?.setRemoteDescription( + RTCSessionDescription(sdp['sdp'], sdp['type'])); + } + } + + void receiveIce(dynamic ice) async { + print('receiveIce in'); + await pc?.addCandidate( + RTCIceCandidate(ice['candidate'], ice['sdpMid'], ice['sdpMLineIndex'])); + } + + String generateNumericId() { + // 随机生成一个 2 到 4 位的数字 + var min = pow(10, 1 + Random().nextInt(3)).toInt(); + var max = min * 10 - 1; + var numericId = min + Random().nextInt(max - min + 1); + return numericId.toString(); + } + + @override + void deactivate() { + super.deactivate(); + _stop(); + _localRenderer.dispose(); + _remoteRenderer.dispose(); + navigator.mediaDevices.ondevicechange = null; + } + + RTCPeerConnection? pc; + var senders = []; + + Future initPCs() async { + // 初始化 WebRTC 配置 + final configuration = { + 'iceServers': [ + { + 'urls': 'stun:stun.l.google.com:19302', + }, + ] + }; + // 创建 PeerConnection + pc ??= await createPeerConnection(configuration); + // pc ??= await createPeerConnection({}); + + pc?.onConnectionState = (state) { + print('connectionState : $state'); + }; + + pc!.onIceCandidate = (candidate) { + print('onIceCandidate $candidate'); + if (candidate != null) { + _channel.sink.add(json.encode({'ice': candidate.toMap()})); + } + }; + + pc!.onTrack = (event) { + print('onTrack $event'); + print('onTrack : ' + event.track.toString()); + print('onTrack : ' + event.streams.length.toString()); + if (event.track.kind == 'video') { + _remoteRenderer.srcObject = event.streams[0]; + } + }; + } + + Future _handleOffer() async { + print('_handleOffer in'); + final offer = await pc!.createOffer(); + await pc!.setLocalDescription(offer); + print('_handleOffer in : ' + json.encode({'sdp': offer.toMap()})); + _channel.sink.add(json.encode({'sdp': offer.toMap()})); + } + + Future _handleAnswer() async { + final answer = await pc!.createAnswer(); + await pc!.setLocalDescription(answer); + var session = await pc!.getLocalDescription(); + print('_handleAnswer in : ' + session!.type! ?? ''); + _channel.sink.add(json.encode({'sdp': session.toMap()})); + } + + Future _handleCandidate(Map data) async { + final candidate = RTCIceCandidate( + data['candidate'], + data['sdpMid'], + data['sdpMLineIndex'], + ); + await pc!.addCandidate(candidate); + } + + Future stopPCs() async { + await pc?.close(); + pc = null; + } + + Future loadDevices() async { + print('loadDevices'); + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + //Ask for runtime permissions if necessary. + var status = await Permission.bluetooth.request(); + if (status.isPermanentlyDenied) { + print('BLEpermdisabled'); + } + + status = await Permission.bluetoothConnect.request(); + if (status.isPermanentlyDenied) { + print('ConnectPermdisabled'); + } + } + print('loadDevices 2'); + final devices = await navigator.mediaDevices.enumerateDevices(); + setState(() { + _devices = devices; + }); + } + + Future _selectVideoFps(String fps) async { + _selectedVideoFPS = fps; + if (!_inCalling) { + return; + } + await _selectVideoInput(_selectedVideoInputId); + setState(() {}); + } + + Future _selectVideoSize(String size) async { + _selectedVideoSize = VideoSize.fromString(size); + if (!_inCalling) { + return; + } + await _selectVideoInput(_selectedVideoInputId); + setState(() {}); + } + + Future _selectAudioInput(String? deviceId) async { + _selectedAudioInputId = deviceId; + if (!_inCalling) { + return; + } + + var newLocalStream = await navigator.mediaDevices.getUserMedia({ + 'audio': { + if (_selectedAudioInputId != null && kIsWeb) + 'deviceId': _selectedAudioInputId, + if (_selectedAudioInputId != null && !kIsWeb) + 'optional': [ + {'sourceId': _selectedAudioInputId} + ], + }, + 'video': false, + }); + + // replace track. + var newTrack = newLocalStream.getAudioTracks().first; + print('track.settings ' + newTrack.getSettings().toString()); + var sender = + senders.firstWhereOrNull((sender) => sender.track?.kind == 'audio'); + await sender?.replaceTrack(newTrack); + } + + Future _selectAudioOutput(String? deviceId) async { + if (!_inCalling) { + return; + } + await _localRenderer.audioOutput(deviceId!); + } + + var _speakerphoneOn = false; + + Future _setSpeakerphoneOn() async { + _speakerphoneOn = !_speakerphoneOn; + await Helper.setSpeakerphoneOn(_speakerphoneOn); + setState(() {}); + } + + Future _selectVideoInput(String? deviceId) async { + print('_selectVideoInput : ' + (deviceId ?? '')); + print('_selectVideoInput : ' + _inCalling.toString()); + _selectedVideoInputId = deviceId; + if (!_inCalling) { + return; + } + // 2) replace track. + // stop old track. + _localRenderer.srcObject = null; + + _localStream?.getTracks().forEach((track) async { + print('track.id : ' + + track.id.toString() + + ' , kind : ' + + (track.kind ?? '')); + await track.stop(); + }); + await _localStream?.dispose(); + + var newLocalStream = await navigator.mediaDevices.getUserMedia({ + 'audio': true, + 'video': { + if (_selectedVideoInputId != null && kIsWeb) + 'deviceId': _selectedVideoInputId, + if (_selectedVideoInputId != null && !kIsWeb) + 'optional': [ + {'sourceId': _selectedVideoInputId} + ], + 'width': _selectedVideoSize.width, + 'height': _selectedVideoSize.height, + 'frameRate': _selectedVideoFPS, + }, + }); + print('track._selectVideoInput : ownerTag :' + newLocalStream.ownerTag ?? + '0'); + + _localStream = newLocalStream; + _localRenderer.srcObject = _localStream; + // replace track. + var newTrack = _localStream?.getVideoTracks().first; + print('track.settings ' + newTrack!.id.toString()); + var sender = + senders.firstWhereOrNull((sender) => sender.track?.kind == 'video'); + var params = sender!.parameters; + print('params degradationPreference' + + params.degradationPreference.toString()); + params.degradationPreference = RTCDegradationPreference.MAINTAIN_RESOLUTION; + print('track.setParameters : 111'); + await sender.setParameters(params); + print('track.setParameters : 222'); + await sender.replaceTrack(newTrack); + print('track.setParameters : 333'); + } + + Future initRenderers() async { + await _localRenderer.initialize(); + await _remoteRenderer.initialize(); + } + + Future _sendId() async { + // 向信令服务器发送 HELLO 消息 + localId = generateNumericId(); + print('生成的id为: ' + localId); + _channel.sink.add('HELLO ' + localId); + print('Sent: HELLO ' + localId); + } + + Future _start() async { + try { + _localStream = await navigator.mediaDevices.getUserMedia({ + 'audio': true, + 'video': { + if (_selectedVideoInputId != null && kIsWeb) + 'deviceId': _selectedVideoInputId, + if (_selectedVideoInputId != null && !kIsWeb) + 'optional': [ + {'sourceId': _selectedVideoInputId} + ], + 'width': _selectedVideoSize.width, + 'height': _selectedVideoSize.height, + 'frameRate': _selectedVideoFPS, + 'facingMode': 'user', + }, + }); + _localRenderer.srcObject = _localStream; + _inCalling = true; + + await initPCs(); + + _localStream?.getTracks().forEach((track) async { + var rtpSender = await pc?.addTrack(track, _localStream!); + print('track.settings ' + track.getSettings().toString()); + senders.add(rtpSender!); + }); + + setState(() {}); + } catch (e) { + print(e.toString()); + } + } + + Future _stop() async { + try { + _localStream?.getTracks().forEach((track) async { + await track.stop(); + }); + await _localStream?.dispose(); + _localStream = null; + _localRenderer.srcObject = null; + _remoteRenderer.srcObject = null; + senders.clear(); + _inCalling = false; + await stopPCs(); + _speakerphoneOn = false; + await Helper.setSpeakerphoneOn(_speakerphoneOn); + setState(() {}); + } catch (e) { + print(e.toString()); + } + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: Text('DeviceEnumerationSample remote'), + actions: [ + PopupMenuButton( + onSelected: _selectAudioInput, + icon: Icon(Icons.settings_voice), + itemBuilder: (BuildContext context) { + return _devices + .where((device) => device.kind == 'audioinput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + }, + ), + if (!WebRTC.platformIsMobile) + PopupMenuButton( + onSelected: _selectAudioOutput, + icon: Icon(Icons.volume_down_alt), + itemBuilder: (BuildContext context) { + return _devices + .where((device) => device.kind == 'audiooutput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + }, + ), + if (!kIsWeb && WebRTC.platformIsMobile) + IconButton( + disabledColor: Colors.grey, + onPressed: _setSpeakerphoneOn, + icon: Icon(_speakerphoneOn + ? Icons.speaker_phone + : Icons.phone_android), + tooltip: 'Switch SpeakerPhone', + ), + // PopupMenuButton( + // onSelected: _selectVideoInput, + // icon: Icon(Icons.switch_camera), + // itemBuilder: (BuildContext context) { + // return _devices + // .where((device) => device.kind == 'videoinput') + // .map((device) { + // return PopupMenuItem( + // value: device.deviceId, + // child: Text(device.label), + // ); + // }).toList(); + // }, + // ), + // PopupMenuButton( + // onSelected: _selectVideoFps, + // icon: Icon(Icons.menu), + // itemBuilder: (BuildContext context) { + // return [ + // PopupMenuItem( + // value: _selectedVideoFPS, + // child: Text('Select FPS ($_selectedVideoFPS)'), + // ), + // PopupMenuDivider(), + // ...['8', '15', '30', '60'] + // .map((fps) => PopupMenuItem( + // value: fps, + // child: Text(fps), + // )) + // .toList() + // ]; + // }, + // ), + // PopupMenuButton( + // onSelected: _selectVideoSize, + // icon: Icon(Icons.screenshot_monitor), + // itemBuilder: (BuildContext context) { + // return [ + // PopupMenuItem( + // value: _selectedVideoSize.toString(), + // child: Text('Select Video Size ($_selectedVideoSize)'), + // ), + // PopupMenuDivider(), + // ...['320x180', '640x360', '1280x720', '1920x1080'] + // .map((fps) => PopupMenuItem( + // value: fps, + // child: Text(fps), + // )) + // .toList() + // ]; + // }, + // ), + ], + ), + body: OrientationBuilder( + builder: (context, orientation) { + return Center( + child: Container( + width: MediaQuery.of(context).size.width, + color: Colors.white10, + child: Row( + children: [ + Expanded( + child: Container( + margin: const EdgeInsets.fromLTRB(0, 0, 0, 0), + decoration: BoxDecoration(color: Colors.black54), + child: RTCVideoView(_localRenderer, + onRendererUpdated: (data) { + _localRenderer.surfaceId = data; + }), + ), + ), + Expanded( + child: Container( + margin: const EdgeInsets.fromLTRB(0, 0, 0, 0), + decoration: BoxDecoration(color: Colors.black54), + child: RTCVideoView(_remoteRenderer, + onRendererUpdated: (data) { + _remoteRenderer.surfaceId = data; + }), + ), + ), + ], + )), + ); + }, + ), + floatingActionButton: Row( + mainAxisAlignment: MainAxisAlignment.end, + children: [ + Text( + '本机id: ' + localId, + style: TextStyle(color: Colors.black, fontSize: 14), + ), + SizedBox(width: 8), + Container( + width: 105, + height: 30, + child: TextField( + controller: _textController, + decoration: InputDecoration( + border: OutlineInputBorder(), labelText: '输入对方id'))), + SizedBox(width: 8), + // 按钮 + ElevatedButton( + onPressed: () { + _channel.sink.add('SESSION ' + _textController.text); + }, + style: ElevatedButton.styleFrom(fixedSize: Size(88, 30)), + child: Text('加入房间'), + ), + SizedBox(width: 16), + FloatingActionButton( + onPressed: () { + _inCalling ? _stop() : _sendId(); + }, + tooltip: _inCalling ? 'Hangup' : 'Call', + child: Icon(_inCalling ? Icons.call_end : Icons.phone), + ), + ], + )); + } +} diff --git a/example/lib/src/get_display_media_sample.dart b/example/lib/src/get_display_media_sample.dart index 5fe193d08bc699b909792c66bc6383c287aa431d..d8b1a7a1b317448d7a7e27de8f52ea2b922027e7 100644 --- a/example/lib/src/get_display_media_sample.dart +++ b/example/lib/src/get_display_media_sample.dart @@ -155,14 +155,16 @@ class _GetDisplayMediaSampleState extends State { width: MediaQuery.of(context).size.width, color: Colors.white10, child: Stack(children: [ - if (_inCalling) - Container( - margin: EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 0.0), - width: MediaQuery.of(context).size.width, - height: MediaQuery.of(context).size.height, - decoration: BoxDecoration(color: Colors.black54), - child: RTCVideoView(_localRenderer), - ) + // if (_inCalling) + Container( + margin: EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 0.0), + width: MediaQuery.of(context).size.width, + height: MediaQuery.of(context).size.height, + decoration: BoxDecoration(color: Colors.black54), + child: RTCVideoView(_localRenderer, onRendererUpdated: (data) { + _localRenderer.surfaceId = data; + }), + ) ]), )); }, diff --git a/example/lib/src/get_user_media_sample.dart b/example/lib/src/get_user_media_sample.dart index d9c427a0de329373649553867ea130eb8ce65c73..18583842f88ed85d463d7aa0404f50b6660ef581 100644 --- a/example/lib/src/get_user_media_sample.dart +++ b/example/lib/src/get_user_media_sample.dart @@ -243,7 +243,10 @@ class _GetUserMediaSampleState extends State { setZoom(details.scale); } }, - child: RTCVideoView(_localRenderer, mirror: true), + child: RTCVideoView(_localRenderer, mirror: true, + onRendererUpdated: (data) { + _localRenderer.surfaceId = data; + }), ), )); }, diff --git a/example/lib/src/loopback_sample_unified_tracks.dart b/example/lib/src/loopback_sample_unified_tracks.dart index e5da4834ba8bf85d9f0c1b30cae9df4ac74fc426..8d37df7c0829a990069d073871b5197a9fd6290b 100644 --- a/example/lib/src/loopback_sample_unified_tracks.dart +++ b/example/lib/src/loopback_sample_unified_tracks.dart @@ -259,7 +259,7 @@ class _MyAppState extends State { _keySharedProvider ??= await _frameCyrptorFactory.createDefaultKeyProvider(keyProviderOptions); - await _keySharedProvider?.setSharedKey(key: aesKey); + // await _keySharedProvider?.setSharedKey(key: aesKey); acaps = await getRtpSenderCapabilities('audio'); print('sender audio capabilities: ${acaps!.toMap()}'); @@ -745,7 +745,10 @@ class _MyAppState extends State { ], ), Expanded( - child: RTCVideoView(_localRenderer, mirror: true), + child: RTCVideoView(_localRenderer, mirror: true, + onRendererUpdated: (data) { + _localRenderer.surfaceId = data; + }), ), ], )), @@ -778,7 +781,9 @@ class _MyAppState extends State { ], ), Expanded( - child: RTCVideoView(_remoteRenderer), + child: RTCVideoView(_remoteRenderer, onRendererUpdated: (data) { + _remoteRenderer.surfaceId = data; + }), ), ], )), diff --git a/example/ohos/.gitignore b/example/ohos/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..e285c291c1fc98ee161b135a0825e0222ac95438 --- /dev/null +++ b/example/ohos/.gitignore @@ -0,0 +1,21 @@ +/node_modules +/oh_modules +/local.properties +/.idea +**/build +/.hvigor +.cxx +/.clangd +/.clang-format +/.clang-tidy +**/.test +*.har +**/BuildProfile.ets +**/oh-package-lock.json5 + +**/src/main/resources/rawfile/flutter_assets/ +**/libs/arm64-v8a/libapp.so +**/libs/arm64-v8a/libflutter.so +**/libs/arm64-v8a/libvmservice_snapshot.so + +/har diff --git a/example/ohos/AppScope/app.json5 b/example/ohos/AppScope/app.json5 new file mode 100644 index 0000000000000000000000000000000000000000..d4905431a1daca1af7ebc24a065044ad82964f7d --- /dev/null +++ b/example/ohos/AppScope/app.json5 @@ -0,0 +1,10 @@ +{ + "app": { + "bundleName": "com.example.flutter_webrtc", + "vendor": "example", + "versionCode": 1000000, + "versionName": "1.0.0", + "icon": "$media:app_icon", + "label": "$string:app_name" + } +} \ No newline at end of file diff --git a/example/ohos/AppScope/resources/base/element/string.json b/example/ohos/AppScope/resources/base/element/string.json new file mode 100644 index 0000000000000000000000000000000000000000..f770e5cd4b83b1ac9ebc1e67e82376185aae293b --- /dev/null +++ b/example/ohos/AppScope/resources/base/element/string.json @@ -0,0 +1,8 @@ +{ + "string": [ + { + "name": "app_name", + "value": "flutter_webrtc" + } + ] +} diff --git a/example/ohos/AppScope/resources/base/media/app_icon.png b/example/ohos/AppScope/resources/base/media/app_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..ce307a8827bd75456441ceb57d530e4c8d45d36c Binary files /dev/null and b/example/ohos/AppScope/resources/base/media/app_icon.png differ diff --git a/example/ohos/build-profile.json5 b/example/ohos/build-profile.json5 new file mode 100644 index 0000000000000000000000000000000000000000..1d12140d202702d7c73d64f1b291fe5c45a660ce --- /dev/null +++ b/example/ohos/build-profile.json5 @@ -0,0 +1,27 @@ +{ + "app": { + "signingConfigs": [], + "products": [ + { + "name": "default", + "signingConfig": "default", + "compatibleSdkVersion": "5.0.0(12)", + "runtimeOS": "HarmonyOS" + } + ] + }, + "modules": [ + { + "name": "entry", + "srcPath": "./entry", + "targets": [ + { + "name": "default", + "applyToProducts": [ + "default" + ] + } + ] + } + ] +} \ No newline at end of file diff --git a/example/ohos/entry/.gitignore b/example/ohos/entry/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..2795a1c5b1fe53659dd1b71d90ba0592eaf7e043 --- /dev/null +++ b/example/ohos/entry/.gitignore @@ -0,0 +1,7 @@ + +/node_modules +/oh_modules +/.preview +/build +/.cxx +/.test \ No newline at end of file diff --git a/example/ohos/entry/build-profile.json5 b/example/ohos/entry/build-profile.json5 new file mode 100644 index 0000000000000000000000000000000000000000..f3c17539d7ce1a28bc932c0754a5c9981a21cd73 --- /dev/null +++ b/example/ohos/entry/build-profile.json5 @@ -0,0 +1,16 @@ +{ + "apiType": 'stageMode', + "buildOption": { + "externalNativeOptions": { + "abiFilters": [ + "arm64-v8a" + ] + }, + }, + "targets": [ + { + "name": "default", + "runtimeOS": "HarmonyOS" + } + ] +} \ No newline at end of file diff --git a/example/ohos/entry/hvigorfile.ts b/example/ohos/entry/hvigorfile.ts new file mode 100644 index 0000000000000000000000000000000000000000..80e4ec5b81689f238c34614b167a0b9e9c83e8d9 --- /dev/null +++ b/example/ohos/entry/hvigorfile.ts @@ -0,0 +1,2 @@ +// Script for compiling build behavior. It is built in the build plug-in and cannot be modified currently. +export { hapTasks } from '@ohos/hvigor-ohos-plugin'; diff --git a/example/ohos/entry/oh-package.json5 b/example/ohos/entry/oh-package.json5 new file mode 100644 index 0000000000000000000000000000000000000000..f87e260aa04b2675fb0c3f54d784bc9e51f19325 --- /dev/null +++ b/example/ohos/entry/oh-package.json5 @@ -0,0 +1,13 @@ +{ + "name": "entry", + "version": "1.0.0", + "description": "Please describe the basic information.", + "main": "", + "author": "", + "license": "", + "dependencies": { + "flutter_webrtc": "file:../har/flutter_webrtc.har", + "path_provider_ohos": "file:../har/path_provider_ohos.har", + "permission_handler_ohos": "file:../har/permission_handler_ohos.har" + } +} \ No newline at end of file diff --git a/example/ohos/entry/src/main/ets/entryability/EntryAbility.ets b/example/ohos/entry/src/main/ets/entryability/EntryAbility.ets new file mode 100644 index 0000000000000000000000000000000000000000..5287fd39197fe0394732777dceec4b02966cc516 --- /dev/null +++ b/example/ohos/entry/src/main/ets/entryability/EntryAbility.ets @@ -0,0 +1,9 @@ +import { FlutterAbility, FlutterEngine } from '@ohos/flutter_ohos'; +import { GeneratedPluginRegistrant } from '../plugins/GeneratedPluginRegistrant'; + +export default class EntryAbility extends FlutterAbility { + configureFlutterEngine(flutterEngine: FlutterEngine) { + super.configureFlutterEngine(flutterEngine) + GeneratedPluginRegistrant.registerWith(flutterEngine) + } +} diff --git a/example/ohos/entry/src/main/ets/pages/Index.ets b/example/ohos/entry/src/main/ets/pages/Index.ets new file mode 100644 index 0000000000000000000000000000000000000000..ef6205b904b661723d220a64ba31b805b38515b5 --- /dev/null +++ b/example/ohos/entry/src/main/ets/pages/Index.ets @@ -0,0 +1,23 @@ +import common from '@ohos.app.ability.common'; +import { FlutterPage } from '@ohos/flutter_ohos' + +let storage = LocalStorage.getShared() +const EVENT_BACK_PRESS = 'EVENT_BACK_PRESS' + +@Entry(storage) +@Component +struct Index { + private context = getContext(this) as common.UIAbilityContext + @LocalStorageLink('viewId') viewId: string = ""; + + build() { + Column() { + FlutterPage({ viewId: this.viewId }) + } + } + + onBackPress(): boolean { + this.context.eventHub.emit(EVENT_BACK_PRESS) + return true + } +} \ No newline at end of file diff --git a/example/ohos/entry/src/main/module.json5 b/example/ohos/entry/src/main/module.json5 new file mode 100644 index 0000000000000000000000000000000000000000..1907287490006721d809d991e51e0adcd0ccabfc --- /dev/null +++ b/example/ohos/entry/src/main/module.json5 @@ -0,0 +1,39 @@ +{ + "module": { + "name": "entry", + "type": "entry", + "description": "$string:module_desc", + "mainElement": "EntryAbility", + "deviceTypes": [ + "phone" + ], + "deliveryWithInstall": true, + "installationFree": false, + "pages": "$profile:main_pages", + "abilities": [ + { + "name": "EntryAbility", + "srcEntry": "./ets/entryability/EntryAbility.ets", + "description": "$string:EntryAbility_desc", + "icon": "$media:icon", + "label": "$string:EntryAbility_label", + "startWindowIcon": "$media:icon", + "startWindowBackground": "$color:start_window_background", + "exported": true, + "skills": [ + { + "entities": [ + "entity.system.home" + ], + "actions": [ + "action.system.home" + ] + } + ] + } + ], + "requestPermissions": [ + {"name" : "ohos.permission.INTERNET"}, + ] + } +} \ No newline at end of file diff --git a/example/ohos/entry/src/main/resources/base/element/color.json b/example/ohos/entry/src/main/resources/base/element/color.json new file mode 100644 index 0000000000000000000000000000000000000000..3c712962da3c2751c2b9ddb53559afcbd2b54a02 --- /dev/null +++ b/example/ohos/entry/src/main/resources/base/element/color.json @@ -0,0 +1,8 @@ +{ + "color": [ + { + "name": "start_window_background", + "value": "#FFFFFF" + } + ] +} \ No newline at end of file diff --git a/example/ohos/entry/src/main/resources/base/element/string.json b/example/ohos/entry/src/main/resources/base/element/string.json new file mode 100644 index 0000000000000000000000000000000000000000..77ca1a6ed2f3c96dacdd583a92adda3d127d1274 --- /dev/null +++ b/example/ohos/entry/src/main/resources/base/element/string.json @@ -0,0 +1,16 @@ +{ + "string": [ + { + "name": "module_desc", + "value": "module description" + }, + { + "name": "EntryAbility_desc", + "value": "description" + }, + { + "name": "EntryAbility_label", + "value": "flutter_webrtc" + } + ] +} \ No newline at end of file diff --git a/example/ohos/entry/src/main/resources/base/media/icon.png b/example/ohos/entry/src/main/resources/base/media/icon.png new file mode 100644 index 0000000000000000000000000000000000000000..ce307a8827bd75456441ceb57d530e4c8d45d36c Binary files /dev/null and b/example/ohos/entry/src/main/resources/base/media/icon.png differ diff --git a/example/ohos/entry/src/main/resources/base/profile/main_pages.json b/example/ohos/entry/src/main/resources/base/profile/main_pages.json new file mode 100644 index 0000000000000000000000000000000000000000..1898d94f58d6128ab712be2c68acc7c98e9ab9ce --- /dev/null +++ b/example/ohos/entry/src/main/resources/base/profile/main_pages.json @@ -0,0 +1,5 @@ +{ + "src": [ + "pages/Index" + ] +} diff --git a/example/ohos/entry/src/main/resources/en_US/element/string.json b/example/ohos/entry/src/main/resources/en_US/element/string.json new file mode 100644 index 0000000000000000000000000000000000000000..77ca1a6ed2f3c96dacdd583a92adda3d127d1274 --- /dev/null +++ b/example/ohos/entry/src/main/resources/en_US/element/string.json @@ -0,0 +1,16 @@ +{ + "string": [ + { + "name": "module_desc", + "value": "module description" + }, + { + "name": "EntryAbility_desc", + "value": "description" + }, + { + "name": "EntryAbility_label", + "value": "flutter_webrtc" + } + ] +} \ No newline at end of file diff --git a/example/ohos/entry/src/main/resources/zh_CN/element/string.json b/example/ohos/entry/src/main/resources/zh_CN/element/string.json new file mode 100644 index 0000000000000000000000000000000000000000..8bed9a82aa5a2318317a066bba218b02df1ad5ee --- /dev/null +++ b/example/ohos/entry/src/main/resources/zh_CN/element/string.json @@ -0,0 +1,16 @@ +{ + "string": [ + { + "name": "module_desc", + "value": "模块描述" + }, + { + "name": "EntryAbility_desc", + "value": "description" + }, + { + "name": "EntryAbility_label", + "value": "flutter_webrtc" + } + ] +} \ No newline at end of file diff --git a/example/ohos/hvigor/hvigor-config.json5 b/example/ohos/hvigor/hvigor-config.json5 new file mode 100644 index 0000000000000000000000000000000000000000..5ed0b910cce4b7e776973a892bf714f89085978e --- /dev/null +++ b/example/ohos/hvigor/hvigor-config.json5 @@ -0,0 +1,5 @@ +{ + "modelVersion": "5.0.0", + "dependencies": { + } +} \ No newline at end of file diff --git a/example/ohos/hvigorfile.ts b/example/ohos/hvigorfile.ts new file mode 100644 index 0000000000000000000000000000000000000000..0a7e3d8d74177458336fa2f524032b873cbe9552 --- /dev/null +++ b/example/ohos/hvigorfile.ts @@ -0,0 +1,6 @@ +import { appTasks } from '@ohos/hvigor-ohos-plugin'; + +export default { + system: appTasks, /* Built-in plugin of Hvigor. It cannot be modified. */ + plugins:[] /* Custom plugin to extend the functionality of Hvigor. */ +} \ No newline at end of file diff --git a/example/ohos/oh-package.json5 b/example/ohos/oh-package.json5 new file mode 100644 index 0000000000000000000000000000000000000000..199326ecf2fa2c760ab0ee07d0a0092674622b67 --- /dev/null +++ b/example/ohos/oh-package.json5 @@ -0,0 +1,22 @@ +{ + "modelVersion": "5.0.0", + "name": "flutter_webrtc", + "version": "1.0.0", + "description": "Please describe the basic information.", + "main": "", + "author": "", + "license": "", + "dependencies": { + "@ohos/flutter_ohos": "file:./har/flutter.har" + }, + "devDependencies": { + "@ohos/hypium": "1.0.6" + }, + "overrides": { + "@ohos/flutter_ohos": "file:./har/flutter.har", + "flutter_webrtc": "file:./har/flutter_webrtc.har", + "path_provider_ohos": "file:./har/path_provider_ohos.har", + "permission_handler_ohos": "file:./har/permission_handler_ohos.har", + "@ohos/flutter_module": "file:./entry" + } +} \ No newline at end of file diff --git a/example/pubspec.yaml b/example/pubspec.yaml index b77402497861699fc1debf9ad566748fb7b0f080..f4219bbea5049e31b6f75508403d2a020e94a398 100644 --- a/example/pubspec.yaml +++ b/example/pubspec.yaml @@ -15,9 +15,16 @@ dependencies: flutter_webrtc: path: ../ # Required for MediaRecorder example - path_provider: ^2.0.2 - permission_handler: ^10.2.0 + path_provider: + git: + url: "https://gitee.com/openharmony-sig/flutter_packages.git" + path: "packages/path_provider/path_provider" + permission_handler: + git: + url: "https://gitee.com/openharmony-sig/flutter_permission_handler.git" + path: "permission_handler" sdp_transform: ^0.3.2 + web_socket_channel: 2.4.0 dev_dependencies: @@ -26,6 +33,10 @@ dev_dependencies: pedantic: ^1.11.0 +dependency_overrides: + meta: ^1.11.0 # 临时解决dart-flutter1.2.0版本与flutter-SDK依赖mate版本冲突 + collection: 1.17.1 + # For information on the generic Dart part of this file, see the # following page: https://www.dartlang.org/tools/pub/pubspec diff --git a/lib/flutter_webrtc.dart b/lib/flutter_webrtc.dart index ef26b54a5d67a3f149aeaf404c1ecd589c33e3d0..d0358ea5977e7d160fbd91348ac7fe7fb96122aa 100644 --- a/lib/flutter_webrtc.dart +++ b/lib/flutter_webrtc.dart @@ -17,3 +17,4 @@ export 'src/native/utils.dart' if (dart.library.html) 'src/web/utils.dart'; export 'src/native/adapter_type.dart'; export 'src/native/android/audio_configuration.dart'; export 'src/native/ios/audio_configuration.dart'; +export 'src/native/ohos/audio_configuration.dart'; diff --git a/lib/src/helper.dart b/lib/src/helper.dart index 23d5f5a2f6829e016c8ccd079f96208183abecaa..0aacb527bccbdf4be9bb3a5088e2e47af9b55e5c 100644 --- a/lib/src/helper.dart +++ b/lib/src/helper.dart @@ -69,7 +69,9 @@ class Helper { static Future setZoom( MediaStreamTrack videoTrack, double zoomLevel) async { - if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + if (WebRTC.platformIsAndroid || + WebRTC.platformIsIOS || + WebRTC.platformIsOhos) { await WebRTC.invokeMethod( 'mediaStreamTrackSetZoom', {'trackId': videoTrack.id, 'zoomLevel': zoomLevel}, @@ -162,4 +164,10 @@ class Helper { AppleNativeAudioManagement.setAppleAudioConfiguration( AppleNativeAudioManagement.getAppleAudioConfigurationForMode(mode, preferSpeakerOutput: preferSpeakerOutput)); + + /// Set the audio configuration to for Ohos. + static Future setOhosAudioConfiguration( + OhosAudioConfiguration ohosAudioConfiguration) => + OhosNativeAudioManagement.setOhosAudioConfiguration( + ohosAudioConfiguration); } diff --git a/lib/src/native/ohos/audio_configuration.dart b/lib/src/native/ohos/audio_configuration.dart new file mode 100644 index 0000000000000000000000000000000000000000..5caa5e3f5e734cfc38acc8cb5357fd1dd186363d --- /dev/null +++ b/lib/src/native/ohos/audio_configuration.dart @@ -0,0 +1,86 @@ +import 'package:flutter/foundation.dart'; + +import '../utils.dart'; + +enum OhosStreamUsage { + unknown, + music, + voiceCommunication, + voiceAssistant, + alarm, + voiceMessage, + ringtone, + notification, + accessibility, + movie, + game, + audioBook, + navigation, + videoCommunication, +} + +extension OhosStreamUsageExt on OhosStreamUsage { + String get value => describeEnum(this); +} + +extension OhosStreamUsageEnumEx on String { + OhosStreamUsage toOhosStreamUsage() => OhosStreamUsage.values + .firstWhere((d) => describeEnum(d) == toLowerCase()); +} + +enum OhosSourceType { + invalid, + mic, + voiceRecognition, + voiceCommunication, + voiceMessage, + camcorder +} + +extension OhosSourceTypeExt on OhosSourceType { + String get value => describeEnum(this); +} + +extension OhosSourceTypeEnumEx on String { + OhosSourceType toOhosSourceType() => + OhosSourceType.values.firstWhere((d) => describeEnum(d) == toLowerCase()); +} + +class OhosAudioConfiguration { + OhosAudioConfiguration({ + this.ohosStreamUsage, + this.ohosSourceType, + }); + + final OhosStreamUsage? ohosStreamUsage; + final OhosSourceType? ohosSourceType; + + Map toMap() => { + if (ohosStreamUsage != null) 'ohosStreamUsage': ohosStreamUsage!.value, + if (ohosSourceType != null) 'ohosSourceType': ohosSourceType!.value, + }; + + /// A pre-configured OhosAudioConfiguration for media playback. + static final media = OhosAudioConfiguration( + ohosStreamUsage: OhosStreamUsage.music, + ohosSourceType: OhosSourceType.camcorder, + ); + + /// A pre-configured OhosAudioConfiguration for voice communication. + static final communication = OhosAudioConfiguration( + ohosStreamUsage: OhosStreamUsage.voiceCommunication, + ohosSourceType: OhosSourceType.mic, + ); +} + +class OhosNativeAudioManagement { + static Future setOhosAudioConfiguration( + OhosAudioConfiguration config) async { + if (WebRTC.platformIsOhos) { + await WebRTC.invokeMethod( + 'setOhosAudioConfiguration', + {'configuration': config.toMap()}, + ); + } + } +} diff --git a/lib/src/native/ohos/video_render.dart b/lib/src/native/ohos/video_render.dart new file mode 100644 index 0000000000000000000000000000000000000000..85ede2a54684f45f0e54a63acc5a701378ac351e --- /dev/null +++ b/lib/src/native/ohos/video_render.dart @@ -0,0 +1,74 @@ +import 'dart:async'; + +import 'package:flutter/material.dart'; +import 'package:flutter/services.dart'; + +typedef OnViewCreated = Function(OhosRTCVideoRenderController); + +///自定义OhosView +class OhosRTCVideoRender extends StatefulWidget { + + const OhosRTCVideoRender(this.onViewCreated, {Key? key}) : super(key: key); + final OnViewCreated onViewCreated; + + @override + State createState() => _OhosRTCVideoRender(); +} + +class _OhosRTCVideoRender extends State { + late MethodChannel _channel; + + @override + Widget build(BuildContext context) { + return _getPlatformFaceView(); + } + + Widget _getPlatformFaceView() { + return OhosView( + viewType: 'flutter.webrtc.ohos/RTCVideoRender', + onPlatformViewCreated: _onOhosRTCVideoRenderCreated, + creationParams: const {'initParams': 'hello world'}, + creationParamsCodec: const StandardMessageCodec(), + ); + } + + void _onOhosRTCVideoRenderCreated(int id) { + _channel = MethodChannel('flutter.webrtc.ohos/RTCVideoRender$id'); + final controller = OhosRTCVideoRenderController._( + _channel, + ); + widget.onViewCreated(controller); + } +} + +class OhosRTCVideoRenderController { + + OhosRTCVideoRenderController._( + this._channel, + ) { + _channel.setMethodCallHandler( + (call) async { + print('OhosRTCVideoRender method : ${call.method}'); + switch (call.method) { + case 'putSurfaceId': + // 从native端获取数据 + final result = call.arguments; + _controller.sink.add(result); + break; + } + }, + ); + } + final MethodChannel _channel; + final StreamController _controller = StreamController(); + + Stream get customDataStream => _controller.stream; + + // 发送数据给native + Future sendMessageToOhosView(String message) async { + await _channel.invokeMethod( + 'getMessageFromFlutterView', + message, + ); + } +} diff --git a/lib/src/native/rtc_video_renderer_impl.dart b/lib/src/native/rtc_video_renderer_impl.dart index 9607b3902bec40286fa08bd4aad4698fdc4efa57..057189e115337b0a3765e7286c606b31e30956e8 100644 --- a/lib/src/native/rtc_video_renderer_impl.dart +++ b/lib/src/native/rtc_video_renderer_impl.dart @@ -46,6 +46,8 @@ class RTCVideoRenderer extends ValueNotifier @override Function? onFirstFrameRendered; + String? surfaceId ; + @override set srcObject(MediaStream? stream) { if (_disposed) { @@ -55,6 +57,7 @@ class RTCVideoRenderer extends ValueNotifier _srcObject = stream; WebRTC.invokeMethod('videoRendererSetSrcObject', { 'textureId': textureId, + 'surfaceId': surfaceId ?? '', 'streamId': stream?.id ?? '', 'ownerTag': stream?.ownerTag ?? '' }).then((_) { @@ -76,6 +79,7 @@ class RTCVideoRenderer extends ValueNotifier try { await WebRTC.invokeMethod('videoRendererSetSrcObject', { 'textureId': _textureId, + 'surfaceId': surfaceId ?? '', 'streamId': stream?.id ?? '', 'ownerTag': stream?.ownerTag ?? '', 'trackId': trackId ?? '0' diff --git a/lib/src/native/rtc_video_view_impl.dart b/lib/src/native/rtc_video_view_impl.dart index c579466c14a3b3da611e1392f0af7a17be880954..841f64f171a7d11cfb43ed6eed95ca22547a875f 100644 --- a/lib/src/native/rtc_video_view_impl.dart +++ b/lib/src/native/rtc_video_view_impl.dart @@ -1,15 +1,15 @@ import 'dart:math'; - import 'package:flutter/material.dart'; - +import 'package:flutter_webrtc/flutter_webrtc.dart'; import 'package:webrtc_interface/webrtc_interface.dart'; - +import 'ohos/video_render.dart'; import 'rtc_video_renderer_impl.dart'; class RTCVideoView extends StatelessWidget { RTCVideoView( this._renderer, { Key? key, + this.onRendererUpdated = null, this.objectFit = RTCVideoViewObjectFit.RTCVideoViewObjectFitContain, this.mirror = false, this.filterQuality = FilterQuality.low, @@ -21,9 +21,35 @@ class RTCVideoView extends StatelessWidget { final bool mirror; final FilterQuality filterQuality; final WidgetBuilder? placeholderBuilder; + OhosRTCVideoRenderController? _controller; + final Function(String)? onRendererUpdated; // 外部传入的回调 RTCVideoRenderer get videoRenderer => _renderer; + void updateRenderer(String id) { + if (onRendererUpdated != null) { + onRendererUpdated!(id); // 调用外部回调 + } + } + + void _onOhosRTCVideoRenderCreated(OhosRTCVideoRenderController controller) { + _controller = controller; + _controller?.customDataStream.listen((data) { + //接收到来自OHOS端的数据 + print('FlutterWebRTCPlugin-来自ohos的数据:$data'); + updateRenderer(data['surfaceId']!); + }); + } + + Widget _buildOhosRTCVideoRender() { + return Center( + child: Container( + color: Colors.blueAccent.withAlpha(60), + child: OhosRTCVideoRender(_onOhosRTCVideoRenderCreated), + ), + ); + } + @override Widget build(BuildContext context) { return LayoutBuilder( @@ -55,12 +81,14 @@ class RTCVideoView extends StatelessWidget { child: Transform( transform: Matrix4.identity()..rotateY(mirror ? -pi : 0.0), alignment: FractionalOffset.center, - child: videoRenderer.renderVideo - ? Texture( - textureId: videoRenderer.textureId!, - filterQuality: filterQuality, - ) - : placeholderBuilder?.call(context) ?? Container(), + child: WebRTC.platformIsOhos + ? _buildOhosRTCVideoRender() + : (videoRenderer.renderVideo + ? Texture( + textureId: videoRenderer.textureId!, + filterQuality: filterQuality, + ) + : placeholderBuilder?.call(context) ?? Container()), ), ), ), diff --git a/lib/src/native/utils.dart b/lib/src/native/utils.dart index caa518f164fb5a5baa9fec467632fdccf6659cc8..b626e85a178ec6a1b1c880e37d2fd547e327738c 100644 --- a/lib/src/native/utils.dart +++ b/lib/src/native/utils.dart @@ -20,6 +20,8 @@ class WebRTC { static bool get platformIsAndroid => Platform.isAndroid; + static bool get platformIsOhos => Platform.operatingSystem == 'ohos'; + static bool get platformIsWeb => false; static Future invokeMethod(String methodName, diff --git a/lib/src/web/utils.dart b/lib/src/web/utils.dart index 966425d7249f9e41c33830084f1fd0caceaabd26..3e44aa6e7d00643847a6d01890deed68230f8261 100644 --- a/lib/src/web/utils.dart +++ b/lib/src/web/utils.dart @@ -13,6 +13,8 @@ class WebRTC { static bool get platformIsAndroid => false; + static bool get platformIsOhos => false; + static bool get platformIsWeb => true; static Future invokeMethod(String methodName, diff --git a/ohos/.gitignore b/ohos/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..18835ddceb5617477fc548ddb809854a947e435a --- /dev/null +++ b/ohos/.gitignore @@ -0,0 +1,8 @@ +/node_modules +/oh_modules +/.preview +/build +/.cxx +/.test +/oh-package-lock.json5 +/BuildProfile.ets diff --git a/ohos/Index.ets b/ohos/Index.ets new file mode 100644 index 0000000000000000000000000000000000000000..172c1fa49453ec9c5fa7ffc1a154f4f28b750586 --- /dev/null +++ b/ohos/Index.ets @@ -0,0 +1,28 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { FlutterWebRTCPlugin } from './src/main/ets/FlutterWebRTCPlugin' +export default FlutterWebRTCPlugin + + + diff --git a/ohos/build-profile.json5 b/ohos/build-profile.json5 new file mode 100644 index 0000000000000000000000000000000000000000..2014fa25af9aa40a42ca16a454753f84cbcd8184 --- /dev/null +++ b/ohos/build-profile.json5 @@ -0,0 +1,25 @@ +{ + "apiType": "stageMode", + "buildOption": { + }, + "buildOptionSet": [ + { + "name": "release", + "arkOptions": { + "obfuscation": { + "ruleOptions": { + "enable": true, + "files": [ + "./obfuscation-rules.txt" + ] + } + } + }, + }, + ], + "targets": [ + { + "name": "default" + } + ] +} diff --git a/ohos/hvigorfile.ts b/ohos/hvigorfile.ts new file mode 100644 index 0000000000000000000000000000000000000000..42187071482d292588ad40babeda74f7b8d97a23 --- /dev/null +++ b/ohos/hvigorfile.ts @@ -0,0 +1,6 @@ +import { harTasks } from '@ohos/hvigor-ohos-plugin'; + +export default { + system: harTasks, /* Built-in plugin of Hvigor. It cannot be modified. */ + plugins:[] /* Custom plugin to extend the functionality of Hvigor. */ +} diff --git a/ohos/libs/arm64-v8a/libohos_webrtc.so b/ohos/libs/arm64-v8a/libohos_webrtc.so new file mode 100644 index 0000000000000000000000000000000000000000..8a54ff2e6fdfbb1eb92d83ad2d9f7e6a08da862e Binary files /dev/null and b/ohos/libs/arm64-v8a/libohos_webrtc.so differ diff --git a/ohos/obfuscation-rules.txt b/ohos/obfuscation-rules.txt new file mode 100644 index 0000000000000000000000000000000000000000..fdbb5b9852d7dd5f39bddaeb21ab5ee1f3346749 --- /dev/null +++ b/ohos/obfuscation-rules.txt @@ -0,0 +1,22 @@ +# Define project specific obfuscation rules here. +# You can include the obfuscation configuration files in the current module's build-profile.json5. +# +# For more details, see +# https://developer.huawei.com/consumer/cn/doc/harmonyos-guides-V5/source-obfuscation-V5 + +# Obfuscation options: +# -disable-obfuscation: disable all obfuscations +# -enable-property-obfuscation: obfuscate the property names +# -enable-toplevel-obfuscation: obfuscate the names in the global scope +# -compact: remove unnecessary blank spaces and all line feeds +# -remove-log: remove all console.* statements +# -print-namecache: print the name cache that contains the mapping from the old names to new names +# -apply-namecache: reuse the given cache file + +# Keep options: +# -keep-property-name: specifies property names that you want to keep +# -keep-global-name: specifies names that you want to keep in the global scope +-enable-property-obfuscation +-enable-toplevel-obfuscation +-enable-filename-obfuscation +-enable-export-obfuscation \ No newline at end of file diff --git a/ohos/oh-package.json5 b/ohos/oh-package.json5 new file mode 100644 index 0000000000000000000000000000000000000000..78cc628a49bdf505a0dbef8feb7f558a917fe84c --- /dev/null +++ b/ohos/oh-package.json5 @@ -0,0 +1,12 @@ +{ + "name": "flutter_webrtc", + "version": "1.0.0", + "description": "Please describe the basic information.", + "main": "Index.ets", + "author": "", + "license": "Apache-2.0", + "dependencies": { + "libohos_webrtc.so": "file:./src/main/libohos_webrtc", + "@ohos/flutter_ohos": "file:../libs/flutter.har" + } +} \ No newline at end of file diff --git a/ohos/src/main/ets/DataChannelInit.ets b/ohos/src/main/ets/DataChannelInit.ets new file mode 100644 index 0000000000000000000000000000000000000000..b06b7986ee1bcca61264b0e2f2e45e4333f8fc87 --- /dev/null +++ b/ohos/src/main/ets/DataChannelInit.ets @@ -0,0 +1,63 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { RTCDataChannelInit } from 'libohos_webrtc.so'; + +const TAG = 'FlutterWebRTCPlugin-DataChannelInit' + +export class DataChannelInit implements RTCDataChannelInit { + public ordered: boolean = true; + public maxPacketLifeTime: number = -1; + public maxRetransmits: number = -1; + public protocol: string = ''; + public negotiated: boolean = false; + public id: number = -1; + + constructor() { + } + + getOrdered(): boolean { + return this.ordered; + } + + getMaxPacketLifeTime(): number { + return this.maxPacketLifeTime; + } + + getMaxRetransmits(): number { + return this.maxRetransmits; + } + + getProtocol(): string { + return this.protocol; + } + + getNegotiated(): boolean { + return this.negotiated; + } + + getId(): number { + return this.id; + } +} + diff --git a/ohos/src/main/ets/DataChannelObserver.ets b/ohos/src/main/ets/DataChannelObserver.ets new file mode 100644 index 0000000000000000000000000000000000000000..1768316cffa55bd7dd6047f327f4a2af4c713680 --- /dev/null +++ b/ohos/src/main/ets/DataChannelObserver.ets @@ -0,0 +1,142 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the 'Software'), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { Any, BinaryMessenger, Log } from '@ohos/flutter_ohos'; +import EventChannel, { EventSink, StreamHandler } from '@ohos/flutter_ohos/src/main/ets/plugin/common/EventChannel'; +import { RTCDataChannel, Event, MessageEvent, DataChannelState } from 'libohos_webrtc.so'; +import { ArrayList } from '@kit.ArkTS'; +import { ConstraintsMap } from './utils/ConstraintsMap'; +import { DataChannelStateCode } from './utils/ObjectType'; +import { util } from '@kit.ArkTS'; +import { isBinaryBuffer } from './utils/Utils'; + +const TAG = 'FlutterWebRTCPlugin-DataChannelObserver' +const CHANNEL_NAME = 'FlutterWebRTC/dataChannelEvent' + +export class DataChannelObserver implements StreamHandler { + private flutterId: string; + private dataChannel: RTCDataChannel; + private eventChannel: EventChannel; + private eventSink: EventSink | null = null + private eventQueue: ArrayList = new ArrayList(); + + constructor(messenger: BinaryMessenger, peerConnectionId: string, flutterId: string, dataChannel: RTCDataChannel) { + this.flutterId = flutterId; + this.dataChannel = dataChannel; + this.eventChannel = new EventChannel(messenger, `${CHANNEL_NAME}${peerConnectionId}${flutterId}`); + this.eventChannel.setStreamHandler(this); + this.dataChannel.onbufferedamountlow = (data) => this.onBufferedAmountChange(data); + this.dataChannel.onmessage = (data: MessageEvent) => this.onMessage(data); + this.dataChannel.onclosing = (data) => this.onClosing(data); + this.dataChannel.onopen = (data) => this.onOpen(data); + this.dataChannel.onclose = (data) => this.onClose(data); + this.dataChannel.onerror = (data) => this.onError(data); + } + + onClosing(ev: Event) { + this.onStateChange(DataChannelStateCode.CLOSING) + } + + onOpen(ev: Event) { + this.onStateChange(DataChannelStateCode.OPEN) + } + + onClose(ev: Event) { + this.onStateChange(DataChannelStateCode.CLOSED) + } + + onError(ev: Event) { + Log.d(TAG, 'onError' + JSON.stringify(ev)); + } + + onStateChange(state: DataChannelState) { + Log.d(TAG, 'onStateChange' + JSON.stringify(state)); + let params = new ConstraintsMap(); + params.putString('event', 'dataChannelStateChanged'); + params.putNumber('id', this.dataChannel?.id!); + params.putString('state', state); + this.sendEvent(params) + } + + onListen(args: Any, events: EventSink): void { + this.eventSink = events; + for (const event of this.eventQueue) { + this.eventSink.success(event); + } + this.eventQueue.clear(); + } + + onCancel(args: Any): void { + this.eventSink = null; + } + + onBufferedAmountChange(ev: Event): void { + Log.d(TAG, 'onBufferedAmountChange' + JSON.stringify(ev.type)); + let params: ConstraintsMap = new ConstraintsMap(); + params.putString('event', 'dataChannelBufferedAmountChange'); + params.putNumber('id', this.dataChannel?.id); + params.putNumber('bufferedAmount', this.dataChannel.bufferedAmount) + params.putNumber('changedAmount', this.dataChannel.bufferedAmount) + this.sendEvent(params); + } + + onMessage(ev: MessageEvent): void { + let params: ConstraintsMap = new ConstraintsMap(); + params.putString('event', 'dataChannelReceiveMessage'); + params.putNumber('id', this.dataChannel?.id); + let bytes: Uint8Array; + if (ev.data instanceof ArrayBuffer) { + bytes = new Uint8Array(ev.data); + } else if (ev.data instanceof Uint8Array) { + bytes = ev.data; + } else { + bytes = new Uint8Array(ev.data.length); + for (let i = 0; i < ev.data.length; i++) { + bytes[i] = ev.data[i]; + } + } + + if (isBinaryBuffer(bytes)) { + params.putString('type', 'binary'); + params.putByte('data', bytes); + } else { + let decoder = new util.TextDecoder(); + let decodeToStringOptions: util.DecodeToStringOptions = { + stream: false + } + let decodedString = decoder.decodeToString(bytes, decodeToStringOptions); + params.putString('type', 'text'); + params.putString('data', decodedString); + } + + this.sendEvent(params); + } + + sendEvent(params: ConstraintsMap): void { + if (this.eventSink) { + this.eventSink.success(params.toMap()); + } else { + this.eventQueue.add(params.toMap()); + } + } +} \ No newline at end of file diff --git a/ohos/src/main/ets/DependencyRelated.ets b/ohos/src/main/ets/DependencyRelated.ets new file mode 100644 index 0000000000000000000000000000000000000000..89e9b5745a09f66ed1ee67eb1eed30e7756f1483 --- /dev/null +++ b/ohos/src/main/ets/DependencyRelated.ets @@ -0,0 +1,35 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the 'Software'), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { VideoRenderHandler } from './components/VideoRenderHandler'; +import PeerConnectionObserver from './PeerConnectionObserver'; + +/** + * @internal 解决文件循环依赖问题 + */ +export type ComponentHandler = VideoRenderHandler; + +/** + * @internal 解决文件循环依赖问题 + */ +export type Observer = PeerConnectionObserver; \ No newline at end of file diff --git a/ohos/src/main/ets/FlutterRTCFrameCryptor.ets b/ohos/src/main/ets/FlutterRTCFrameCryptor.ets new file mode 100644 index 0000000000000000000000000000000000000000..c0b55b01459daa00c78ba11bf0e8e1a082e006e3 --- /dev/null +++ b/ohos/src/main/ets/FlutterRTCFrameCryptor.ets @@ -0,0 +1,161 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the 'Software'), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { MethodResult, EventChannel, Log, MethodCall } from '@ohos/flutter_ohos'; +import { CryptorMethodHandlers } from './MethodHandlers'; +import { ConstraintsMap } from './utils/ConstraintsMap'; +import { DataMap } from './utils/ObjectType'; +import { util } from '@kit.ArkTS'; +import StateProvider from './StateProvider'; + +const TAG: string = 'FlutterWebRTCPlugin-FlutterRTCFrameCryptor' + +export interface FrameCryptorKeyProvider {} + +export default class FlutterRTCFrameCryptor { + private keyProviders: DataMap = new Map(); + private stateProvider: StateProvider; + + constructor(stateProvider: StateProvider) { + this.stateProvider = stateProvider; + } + + cryptorMethodHandlers: CryptorMethodHandlers = { + 'frameCryptorFactoryCreateFrameCryptor': (call, result) => this.handleCreateFrameCryptor(call, result), + 'frameCryptorSetKeyIndex': (call, result) => this.handleUnknownMethod(call, result), + 'frameCryptorGetKeyIndex': (call, result) => this.handleUnknownMethod(call, result), + 'frameCryptorSetEnabled': (call, result) => this.handleUnknownMethod(call, result), + 'frameCryptorGetEnabled': (call, result) => this.handleUnknownMethod(call, result), + 'frameCryptorDispose': (call, result) => this.handleUnknownMethod(call, result), + 'frameCryptorFactoryCreateKeyProvider': (call, result) => this.handleCreateKeyProvider(call, result), + 'keyProviderSetSharedKey': (call, result) => this.keyProviderSetSharedKey(call, result), + 'keyProviderRatchetSharedKey': (call, result) => this.handleUnknownMethod(call, result), + 'keyProviderExportSharedKey': (call, result) => this.handleUnknownMethod(call, result), + 'keyProviderSetKey': (call, result) => this.handleUnknownMethod(call, result), + 'keyProviderRatchetKey': (call, result) => this.handleUnknownMethod(call, result), + 'keyProviderExportKey': (call, result) => this.handleUnknownMethod(call, result), + 'keyProviderSetSifTrailer': (call, result) => this.handleUnknownMethod(call, result), + 'keyProviderDispose': (call, result) => this.handleUnknownMethod(call, result) + }; + + resultError(method: string, error: string, result: MethodResult) { + let errorMsg: string = `${method}(): ${error}`; + Log.e(TAG, errorMsg); + result.error(method, error, null); + } + + onMethodCall(call: MethodCall, result: MethodResult): void { + let method: string = call.method; + try { + let fun = Reflect.get(this.cryptorMethodHandlers, method) as Function; + if (fun) { + Log.d(TAG, `onMethodCall: ${method}`); + fun(call, result); + } else { + Log.e(TAG, `onMethodCall Unknown method: ${method}`); + result.notImplemented(); + } + } catch (err) { + this.resultError(method || 'onMethodCall', `failed: ${err}`, result); + } + } + + handleCreateFrameCryptor(call: MethodCall, result: MethodResult): void { + Log.d(TAG, `handleCreateFrameCryptor in!`); + let keyProviderId: string = call.argument('keyProviderId'); + Log.d(TAG, `handleCreateFrameCryptor keyProviderId:${keyProviderId}`); + let keyProvider: FrameCryptorKeyProvider = this.keyProviders.get(keyProviderId); + if (!keyProvider) { + this.resultError('frameCryptorFactoryCreateFrameCryptorFailed', 'keyProvider not found', result); + return; + } + + let peerConnectionId: string = call.argument('peerConnectionId'); + Log.d(TAG, `handleCreateFrameCryptor peerConnectionId:${peerConnectionId}`); + let pco = this.stateProvider.getPeerConnectionObserver(peerConnectionId); + if (!pco) { + this.resultError('frameCryptorFactoryCreateFrameCryptorFailed', 'peerConnection not found', result); + return; + } + + let participantId: string = call.argument('participantId'); + let type: string = call.argument('type'); + let algorithm: number = call.argument('algorithm'); + let rtpSenderId: string = call.argument('rtpSenderId'); + let rtpReceiverId: string = call.argument('rtpReceiverId'); + + if (type === 'sender') { + let rtpSender = pco.getRtpSenderById(rtpSenderId); + // TODO 没有接口 + // FrameCryptorFactory.createFrameCryptorForRtpSender + this.resultError('frameCryptorFactoryCreateFrameCryptorFailed', 'no FrameCryptorFactory', result); + return; + + } else if (type === 'receiver') { + let rtpReceiver = pco.getRtpReceiverById(rtpReceiverId); + // TODO 没有接口 + // FrameCryptorFactory.createFrameCryptorForRtpReceiver + this.resultError('frameCryptorFactoryCreateFrameCryptorFailed', 'no FrameCryptorFactory', result); + return; + } else { + this.resultError('frameCryptorFactoryCreateFrameCryptorFailed', 'type must be sender or receiver', result); + return; + } + } + + handleCreateKeyProvider(call: MethodCall, result: MethodResult): void { + Log.d(TAG, `handleCreateKeyProvider in!`); + + let keyProviderId = util.generateRandomUUID(true); + Log.d(TAG, 'RFC 4122 Version 4 UUID:' + keyProviderId); + let configurationMap: ConstraintsMap = new ConstraintsMap(call.argument('keyProviderOptions')); + Log.d(TAG, `keyProviderOptions map: ${configurationMap.toString()}`); + + let resultMap: DataMap = new Map(); + resultMap.set('keyProviderId', keyProviderId); + result.success(resultMap); + } + + + handleUnknownMethod(call: MethodCall, result: MethodResult): void { + //TODO 帧加密功能暂未支持 + Log.d(TAG, `onMethodCall handleUnknownMethod!`); + this.resultError(TAG, `onMethodCall Unknown method: ${call.method}`, result); + } + + keyProviderSetSharedKey(call: MethodCall, result: MethodResult): void { + let keyProviderId: string = call.argument('keyProviderId') ?? ''; + Log.d(TAG, `keyProviderSetSharedKey keyProviderId:${keyProviderId}`); + let keyProvider: FrameCryptorKeyProvider = this.keyProviders.get(keyProviderId); + if (!keyProvider) { + this.resultError('keyProviderSetKeySharedFailed', 'keyProvider not found', result); + return; + } + let keyIndex: number = call.argument('keyIndex'); + let key: Uint8Array[] = call.argument('key'); + // TODO keyProvider.setSharedKey + let paramsResult: ConstraintsMap = new ConstraintsMap(); + paramsResult.putBoolean('result', true); + result.success(paramsResult.toMap()); + } +} \ No newline at end of file diff --git a/ohos/src/main/ets/FlutterRTCVideoRenderer.ets b/ohos/src/main/ets/FlutterRTCVideoRenderer.ets new file mode 100644 index 0000000000000000000000000000000000000000..8dbcc4e6a55f7de1d938fe146ff1cb42232e6ef1 --- /dev/null +++ b/ohos/src/main/ets/FlutterRTCVideoRenderer.ets @@ -0,0 +1,183 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the 'Software'), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { MethodResult, EventChannel, Log } from '@ohos/flutter_ohos'; +import { EventSink, StreamHandler } from '@ohos/flutter_ohos/src/main/ets/plugin/common/EventChannel'; +import { ConstraintsMap } from './utils/ConstraintsMap'; +import webrtc, { MediaStream, MediaStreamTrack, NativeVideoRenderer, VideoTrack } from 'libohos_webrtc.so'; +import StateProvider from './StateProvider'; + +const TAG: string = 'FlutterWebRTCPlugin-FlutterRTCVideoRenderer'; + +export default class FlutterRTCVideoRenderer implements StreamHandler { + private id: number = -1 + private surfaceId: string = '' + private mediaStream: MediaStream | null = null + private ownerTag: string | null = '' + private videoTrack: VideoTrack | null = null; + eventChannel: EventChannel | null = null; + eventSink: EventSink | null = null; + _width: number = 0; + _height: number = 0; + _rotation: number = -1; + private nativeVideoRenderer: NativeVideoRenderer + + constructor(textureId: number, stateProvider: StateProvider) { + this.nativeVideoRenderer = new NativeVideoRenderer(); + this.nativeVideoRenderer.onFrameResolutionChanged = (data: ESObject) => this.frameResolutionChanged(data) + this.eventSink = null; + this.ownerTag = ''; + } + + frameResolutionChanged(data: ESObject) { + Log.d(TAG, 'onFrameResolutionChanged ' + JSON.stringify(data)); + if (this.eventSink) { + if (this._width != data.width || this._height != data.height) { + let params: ConstraintsMap = new ConstraintsMap(); + params.putString('event', 'didTextureChangeVideoSize') + params.putNumber('id', this.id); + params.putNumber('width', data.width); + params.putNumber('height', data.height); + this._width = data.width + this._height = data.height + this.eventSink.success(params.toMap()); + + } + if (data.rotation) { + let params: ConstraintsMap = new ConstraintsMap(); + params.putString('event', 'didTextureChangeRotation'); + params.putNumber('id', this.id); + params.putNumber('rotation', data.rotation); + this._rotation = data.rotation; + this.eventSink.success(params.toMap()); + } + } + } + + setId(id: number) { + this.id = id; + } + + getSurfaceId(): string { + return this.surfaceId + } + + init(id: string) { + this.surfaceId = id + this.nativeVideoRenderer.init(this.surfaceId) + } + + onListen(args: ESObject, events: EventSink): void { + this.eventSink = events; + } + + onCancel(args: ESObject): void { + this.eventSink = null; + } + + public setEventChannel(eventChannel: EventChannel): void { + this.eventChannel = eventChannel; + } + + resultError(method: string, error: string, result: MethodResult) { + let errorMsg: string = method + '(): ' + error; + result.error(method, errorMsg, null); + Log.d(TAG, errorMsg); + } + + public Dispose(): void { + if (this.eventChannel != null) { + this.eventChannel?.setStreamHandler(null); + } + this.eventSink = null; + this.videoTrack?.stop() + this.nativeVideoRenderer.release() + } + + public setStream(mediaStream: MediaStream | null, ownerTag: string | null, trackId?: string): void { + let videoTrack: MediaStreamTrack | null + this.mediaStream = mediaStream; + this.ownerTag = ownerTag; + Log.d(TAG, 'mediaStream id :' + this.mediaStream?.id); + Log.d(TAG, 'mediaStream surfaceId :' + this.surfaceId); + Log.d(TAG, 'mediaStream trackId :' + trackId); + Log.d(TAG, 'mediaStream ownerTag :' + ownerTag); + if (mediaStream == null) { + Log.d(TAG, 'mediaStream mediaStream null :'); + videoTrack = null + } else { + let videoTracks: MediaStreamTrack[] = mediaStream?.getVideoTracks() + videoTrack = videoTracks.length > 0 ? videoTracks[0] : null + } + Log.e(TAG, 'setStream setVideoTrack : ' + videoTrack?.id); + this.setVideoTrack(videoTrack); + } + + setVideoTrack(videoTrack: MediaStreamTrack | null) { + let oldValue: MediaStreamTrack | null = this.videoTrack; + Log.d(TAG, 'setStream setVideoTrack oldValue : ' + oldValue?.id); + Log.d(TAG, 'setStream setVideoTrack videoTrack : ' + videoTrack?.id); + if (oldValue?.id== videoTrack?.id) { + Log.w(TAG, 'FlutterRTCVideoRenderer VideoTrack no change'); + return; + } + if (oldValue) { + this.removeRendererFromVideoTrack(); + } + this.videoTrack = videoTrack; + if (!videoTrack) { + Log.e(TAG, 'FlutterRTCVideoRenderer.setVideoTrack, set video track to null'); + return; + } + try { + Log.d(TAG, 'FlutterRTCVideoRenderer.setVideoTrack, set video track to ' + videoTrack.id); + this.tryAddRendererToVideoTrack(); + } catch (e) { + Log.e(TAG, 'tryAddRendererToVideoTrack ' + e); + } + } + + removeRendererFromVideoTrack() { + this.videoTrack?.stop() + } + + tryAddRendererToVideoTrack() { + if (this.videoTrack) { + this.nativeVideoRenderer.setVideoTrack(this.videoTrack) + } + } + + public checkMediaStream(id: string, ownerTag: string): boolean { + if (null == id || null == this.mediaStream || ownerTag == null || ownerTag !== this.ownerTag) { + return false; + } + return id === this.mediaStream.id; + } + + public checkVideoTrack(id: string, ownerTag: string): boolean { + if (null == id || null == this.videoTrack || ownerTag == null || ownerTag !== this.ownerTag) { + return false; + } + return id === this.videoTrack.id; + } +} \ No newline at end of file diff --git a/ohos/src/main/ets/FlutterWebRTCPlugin.ets b/ohos/src/main/ets/FlutterWebRTCPlugin.ets new file mode 100644 index 0000000000000000000000000000000000000000..632adec39f9884fc125b237c33e5affdbff8b218 --- /dev/null +++ b/ohos/src/main/ets/FlutterWebRTCPlugin.ets @@ -0,0 +1,124 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { + AbilityAware, + AbilityPluginBinding, + Any, + BinaryMessenger, + EventChannel, + FlutterPlugin, + FlutterPluginBinding, + Log, + MethodChannel, + TextureRegistry +} from '@ohos/flutter_ohos'; +import { EventSink, StreamHandler } from '@ohos/flutter_ohos/src/main/ets/plugin/common/EventChannel'; +import AudioSwitchManager from './audio/AudioSwitchManager'; +import MethodCallHandlerImpl from './MethodCallHandlerImpl'; +import { VideoRenderFactory } from './components/VideoRenderFactory'; +import { common } from '@kit.AbilityKit'; +import LifeCycleObserver from './LifeCycleObserver'; +import { PermissionsManager } from './utils/PermissionUtils'; +import { isNotEmpty } from './utils/Utils'; + +const TAG: string = 'FlutterWebRTCPlugin'; +const CHANNEL_NAME: string = 'flutter.webrtc.ohos/RTCVideoRender'; + +export class FlutterWebRTCPlugin implements FlutterPlugin, AbilityAware, StreamHandler { + private context: common.Context | null = null; + private methodChannel: MethodChannel | null = null; + private methodCallHandler: MethodCallHandlerImpl | null = null; + private observer: LifeCycleObserver = new LifeCycleObserver(); + private eventChannel: EventChannel | null = null; + private eventSink: EventSink | null = null; + private callbackId?: number; + + constructor() { + } + + onAttachedToEngine(binding: FlutterPluginBinding): void { + this.context = binding.getApplicationContext(); + binding.getPlatformViewRegistry() + .registerViewFactory(CHANNEL_NAME, new VideoRenderFactory(binding.getBinaryMessenger())); + this.startListening(this.context, binding.getBinaryMessenger(), binding.getTextureRegistry()); + } + + onDetachedFromEngine(binding: FlutterPluginBinding): void { + this.stopListening(); + } + + onAttachedToAbility(binding: AbilityPluginBinding): void { + this.methodCallHandler?.setAbility(binding.getAbility()); + this.callbackId = this.context?.getApplicationContext().on('abilityLifecycle', this.observer); + PermissionsManager.getInstance().init(binding.getAbility().context as common.UIAbilityContext); + } + + onDetachedFromAbilityForConfigChanges(): void { + this.methodCallHandler?.setAbility(null); + } + + onDetachedFromAbility(): void { + this.methodCallHandler?.setAbility(null); + if (isNotEmpty(this.callbackId)) { + this.context?.getApplicationContext().off('abilityLifecycle', this.callbackId); + this.callbackId = undefined; + } + } + + getUniqueClassName(): string { + return TAG; + } + + private startListening(context: Context, messenger: BinaryMessenger, textureRegistry: TextureRegistry): void { + AudioSwitchManager.getInstance().setContext(context); + this.methodCallHandler = new MethodCallHandlerImpl(context, messenger, textureRegistry); + this.methodChannel = new MethodChannel(messenger, 'FlutterWebRTC.Method'); + this.methodChannel.setMethodCallHandler(this.methodCallHandler); + this.eventChannel = new EventChannel(messenger, 'FlutterWebRTC.Event'); + this.eventChannel.setStreamHandler(this); + this.observer.setMethodCallHandler(this.methodCallHandler); + } + + private stopListening(): void { + this.methodCallHandler?.dispose(); + this.methodCallHandler = null; + this.methodChannel?.setMethodCallHandler(null); + this.eventChannel?.setStreamHandler(null); + this.observer.setMethodCallHandler(null); + AudioSwitchManager.instance?.stop(); + } + + onListen(args: Any, events: EventSink): void { + this.eventSink = events; + } + + onCancel(args: Any): void { + this.eventSink = null; + } + + sendEvent(event: Any): void { + this.eventSink?.success(event); + } +} + diff --git a/ohos/src/main/ets/GetUserMediaImpl.ets b/ohos/src/main/ets/GetUserMediaImpl.ets new file mode 100644 index 0000000000000000000000000000000000000000..ff033de97a72fb05a5ca16cc66c39fabe4dbd0c1 --- /dev/null +++ b/ohos/src/main/ets/GetUserMediaImpl.ets @@ -0,0 +1,405 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import StateProvider from './StateProvider'; +import { Log, MethodResult, SurfaceTextureEntry } from '@ohos/flutter_ohos'; +import { ConstraintsMap } from './utils/ConstraintsMap'; +import { ConstraintsArray } from './utils/ConstraintsArray'; +import webrtc, { + AudioSource, + AudioTrack, + MediaStream, + PeerConnectionFactory, + VideoSource, + VideoTrack +} from 'libohos_webrtc.so'; +import { HashMap } from '@kit.ArkTS'; +import { ObjectType } from './utils/ObjectType'; +import AudioSwitchManager from './audio/AudioSwitchManager'; +import { + beginConfig, + getCameraDevices, + getCameraInput, + getCameraManager, + getCaptureSession, + getPreviewOutput, + getSupportedOutputCapability, + isTorchSupported, + setSessionCameraInput, + setSessionPreviewOutput, + setTorchMode, + setZoom, + startSession, + switchCamera +} from './utils/CameraUtil'; +import { camera } from '@kit.CameraKit'; +import { BusinessError } from '@kit.BasicServicesKit'; +import { common } from '@kit.AbilityKit'; + +const TAG: string = 'FlutterWebRTCPlugin-GetUserMediaImpl'; + +export default class GetUserMediaImpl { + private static DEFAULT_WIDTH: number = 1280; + private static DEFAULT_HEIGHT: number = 720; + private static DEFAULT_FPS: number = 30; + private static PERMISSION_AUDIO: string = ''; + private static PERMISSION_VIDEO: string = ''; + private static PERMISSION_SCREEN: string = ''; + private static CAPTURE_PERMISSION_REQUEST_CODE: number = 1; + private static GRANT_RESULTS: string = 'GRANT_RESULT'; + private static PERMISSIONS: string = 'PERMISSION'; + private static PROJECTION_DATA: string = 'PROJECTION_DATA'; + private static RESULT_RECEIVER: string = 'RESULT_RECEIVER'; + private static REQUEST_CODE: string = 'REQUEST_CODE'; + // private mVideoCapturers: HashMap = new HashMap(); + // private mSurfaceTextureHelpers: HashMap = new HashMap(); + private stateProvider: StateProvider | null = null; + private applicationContext: Context | null = null; + private isFacing: boolean = true; + private isTorchOn: boolean = false; + private surfaceTextureEntry: SurfaceTextureEntry | null = null; + + constructor(stateProvider: StateProvider, applicationContext: Context) { + this.stateProvider = stateProvider + this.applicationContext = applicationContext + getCameraManager(getContext() as common.BaseContext); + } + + setSurfaceTextureEntry(surfaceTextureEntry: SurfaceTextureEntry | null) { + this.surfaceTextureEntry = surfaceTextureEntry; + } + + resultError(method: string, error: string, result: MethodResult) { + let errorMsg: string = method + '(): ' + error; + result.error(method, errorMsg, null); + Log.d(TAG, errorMsg); + } + + getUserMedia(constraints: ConstraintsMap, result: MethodResult, mediaStream: MediaStream) { + Log.d(TAG, 'getUserMedia in!'); + + /* + * 这里的作用是获取media的 streamId audioTracks videoTracks + * + * */ + let trackParams: ConstraintsMap[] = [new ConstraintsMap(), new ConstraintsMap()]; + + trackParams[0] = this.getUserAudio(constraints, mediaStream) + trackParams[1] = this.getUserVideo(constraints, mediaStream) + + let audioTracks: ConstraintsArray = new ConstraintsArray(); + let videoTracks: ConstraintsArray = new ConstraintsArray(); + let successResult: ConstraintsMap = new ConstraintsMap(); + + for (let i = 0; i < trackParams.length; i++) { + if (trackParams[i].toMap() == null) { + continue; + } + if (trackParams[i].getString('kind') === 'audio') { + audioTracks.pushMap(trackParams[i].toMap()); + } else { + videoTracks.pushMap(trackParams[i].toMap()); + } + } + + let streamId: string = mediaStream.id; + Log.d(TAG, 'MediaStream id: ' + streamId); + this.stateProvider?.putLocalStream(streamId, mediaStream); + + successResult.putString('streamId', streamId); + successResult.putArray('audioTracks', audioTracks.toArrayList()); + successResult.putArray('videoTracks', videoTracks.toArrayList()); + result.success(successResult.toMap()); + } + + getDisplayMedia(constraints: ConstraintsMap, result: MethodResult, mediaStream: MediaStream) { + Log.d(TAG, 'getDisplayMedia in!'); + } + + getUserAudio(constraints: ConstraintsMap, stream: MediaStream): ConstraintsMap { + Log.d(TAG, 'getUserAudio in!'); + Log.d(TAG, 'getUserAudio in!' + JSON.stringify(constraints)); + AudioSwitchManager.instance.start() + let deviceId: string | null = null + Log.d(TAG, 'getUserAudio audio getType ' + constraints.getType('audio')); + if (constraints.getType('audio') == ObjectType.Null) { + + } else { + deviceId = this.getSourceIdConstraint(constraints.getMap('audio')); + } + + let trackId: string = this.stateProvider!.getNextTrackUUID(); + let pcFactory: PeerConnectionFactory = this.stateProvider!.getPeerConnectionFactory(); + let audioSource: AudioSource = pcFactory.createAudioSource(); + + if (deviceId != null) { + try { + } catch (e) { + Log.d(TAG, 'setPreferredInputDevice error ' + JSON.stringify(e)); + } + } + + let track: AudioTrack = pcFactory.createAudioTrack('audio', audioSource); + stream.addTrack(track); + + this.stateProvider!.putLocalTrack(track.id, track); + + let trackParams: ConstraintsMap = new ConstraintsMap(); + trackParams.putBoolean('enabled', track.enabled); + trackParams.putString('id', track.id); + trackParams.putString('kind', 'audio'); + trackParams.putString('label', track.id); + trackParams.putString('readyState', track.readyState.toString()); + trackParams.putBoolean('remote', false); + + if (deviceId == null) { + deviceId = '' + -1; + } + + let settings: ConstraintsMap = new ConstraintsMap(); + settings.putString('deviceId', deviceId); + settings.putString('kind', 'audioinput'); + settings.putBoolean('autoGainControl', true); + settings.putBoolean('echoCancellation', true); + settings.putBoolean('noiseSuppression', true); + settings.putNumber('channelCount', 1); + settings.putNumber('latency', 0); + trackParams.putMap('settings', settings.toMap()); + + Log.d(TAG, 'getUserAudio out!'); + return trackParams; + } + + getUserVideo(constraints: ConstraintsMap, stream: MediaStream): ConstraintsMap { + Log.d(TAG, 'getUserVideo in!'); + + let videoConstraintsMap: ConstraintsMap = new ConstraintsMap(); + let videoConstraintsMandatory: ConstraintsMap = new ConstraintsMap(); + if (constraints.getType('video') == ObjectType.Map) { + videoConstraintsMap = constraints.getMap('video')!; + if (videoConstraintsMap.getType('mandatory') == ObjectType.Map) { + videoConstraintsMandatory = videoConstraintsMap.getMap('mandatory')!; + } + } + // Log.d(TAG, 'getUserVideo videoConstraintsMandatory:转换为数组并打印 = ' + Array.from(videoConstraintsMandatory.toMap().entries())); + + let facingMode: string = this.getFacingMode(videoConstraintsMap); + this.isFacing = facingMode === '' || !(facingMode === 'environment'); + Log.d(TAG, 'getUserVideo facingMode=' + facingMode + ' isFacing=' + this.isFacing); + + let deviceId: string = ''; + Log.d(TAG, 'getUserVideo deviceId=' + deviceId); + + let videoWidth: number | null = this.getConstrainNumber(videoConstraintsMap, 'width'); + videoWidth = videoWidth != null ? + videoWidth : + videoConstraintsMandatory != null ? + videoConstraintsMandatory.getNumber('minWidth') : + GetUserMediaImpl.DEFAULT_WIDTH; + + let videoHeight = this.getConstrainNumber(videoConstraintsMap, 'height'); + videoHeight = videoHeight != null ? + videoHeight : + videoConstraintsMandatory != null ? + videoConstraintsMandatory.getNumber('minHeight') : + GetUserMediaImpl.DEFAULT_HEIGHT; + + let videoFrameRate = this.getConstrainNumber(videoConstraintsMap, 'frameRate'); + videoFrameRate = videoFrameRate != null ? + videoFrameRate : + videoConstraintsMandatory != null ? + videoConstraintsMandatory.getNumber('minFrameRate') : + GetUserMediaImpl.DEFAULT_HEIGHT; + + let trackId: string = this.stateProvider!.getNextTrackUUID(); + let pcFactory: PeerConnectionFactory = this.stateProvider!.getPeerConnectionFactory(); + let videoSource: VideoSource = pcFactory.createVideoSource(); + + let track: VideoTrack = pcFactory.createVideoTrack('video', videoSource); + stream.addTrack(track); + + this.stateProvider!.putLocalTrack(track.id, track); + let trackParams: ConstraintsMap = new ConstraintsMap(); + trackParams.putBoolean('enabled', track.enabled); + trackParams.putString('id', track.id); + trackParams.putString('kind', 'video'); + trackParams.putString('label', track.id); + trackParams.putString('readyState', track.readyState.toString()); + trackParams.putBoolean('remote', false); + + if (deviceId == null) { + deviceId = '' + -1; + } + + let settings: ConstraintsMap = new ConstraintsMap(); + settings.putString('deviceId', deviceId); + settings.putString('kind', 'videoinput'); + settings.putNumber('width', videoWidth!); + settings.putNumber('height', videoHeight!); + settings.putNumber('frameRate', videoFrameRate!); + if (facingMode != null) { + settings.putString('facingMode', facingMode); + } + trackParams.putMap('settings', settings.toMap()); + + Log.d(TAG, 'getUserVideo trackParams:转换为数组并打印 = ' + Array.from(trackParams.toMap().entries())); + Log.d(TAG, 'getUserVideo trackParams settings:转换为数组并打印 = ' + Array.from(settings.toMap().entries())); + Log.d(TAG, 'getUserVideo out!'); + return trackParams; + } + + /** + * Retrieves 'facingMode' constraint value. + * + * @param mediaConstraints a ConstraintsMap which represents 'GUM' constraints argument. + * @return String value of 'facingMode' constraints in 'GUM' or null if not specified. + */ + private getFacingMode(mediaConstraints: ConstraintsMap): string { + return mediaConstraints == null ? '' : mediaConstraints.getString('facingMode') as string; + } + + /** + * Retrieves 'sourceId' constraint value. + * + * @param mediaConstraints a ConstraintsMap which represents 'GUM' constraints argument + * @return String value of 'sourceId' optional 'GUM' constraint or null if not specified. + */ + private getSourceIdConstraint(mediaConstraints: ConstraintsMap | null): string { + if (mediaConstraints && mediaConstraints.hasKey('deviceId')) { + return mediaConstraints.getString('deviceId'); + } + + if (mediaConstraints && mediaConstraints.getType('optional') == ObjectType.Array) { + let optional: ConstraintsArray = mediaConstraints.getArray('optional')!; + for (let i = 0; i < optional.size(); i++) { + if (optional.getType(i) == ObjectType.Map) { + let option = new ConstraintsMap(optional.getMap(i)); + if (option.getType('sourceId') == ObjectType.String) { + return option.getString('sourceId'); + } + } + } + } + + return ''; + } + + /** + * @return Returns the integer at the key, or the `ideal` property if it is a map. + */ + private getConstrainNumber(constraintsMap: ConstraintsMap, key: string): number | null { + if (constraintsMap == null) { + return null; + } + + if (constraintsMap.getType(key) == ObjectType.Number) { + try { + return constraintsMap.getNumber(key); + } catch (e) { + // Could be a double instead + return Math.round(constraintsMap.getNumber(key)); + } + } + + if (constraintsMap.getType(key) == ObjectType.String) { + try { + return parseInt(constraintsMap.getString(key) as string); + } catch (e) { + // Could be a double instead + return Math.round(constraintsMap.getNumber(key)); + } + } + + if (constraintsMap.getType(key) == ObjectType.Map) { + let innerMap = constraintsMap.getMap(key)!; + if (innerMap.getType('ideal') == ObjectType.Number) { + return innerMap.getNumber('ideal'); + } + } + return null; + } + + + /** + * check torchMode is available + * */ + hasTorch(trackId: string, result: MethodResult): void { + /* + * 修改逻辑,通过trackID获取当前track + * 通过track获取当前相机是哪个, + * */ + Log.d(TAG, `hasTorch in`); + + let hasTorch = isTorchSupported(); + result.success(hasTorch ? hasTorch : false); + } + + setTorch(torch: boolean, result: MethodResult): void { + Log.d(TAG, `setTorch in`); + try { + setTorchMode(torch ? camera.FlashMode.FLASH_MODE_ALWAYS_OPEN : camera.FlashMode.FLASH_MODE_CLOSE); + result.success(null); + this.isTorchOn = torch; + } catch (error) { + // 失败返回错误码error.code并处理 + let err = error as BusinessError; + Log.e(TAG, `The setTorchMode call failed. error code: ${err.code}`); + this.resultError('setTorch', `The setTorchMode call failed. error code: ${err.code}`, result); + } + } + + private cameraPosition: boolean = false + + switchCamera(trackId: string, result: MethodResult): void { + Log.d(TAG, `switchCamera in`); + try { + this.cameraPosition = !this.cameraPosition; + switchCamera(Number(this.cameraPosition), this.surfaceTextureEntry!.getSurfaceId().toString(), 0); + result.success(true); + } catch (error) { + // 失败返回错误码error.code并处理 + let err = error as BusinessError; + Log.e(TAG, `The setTorchMode call failed. error code: ${err.code}`); + this.resultError('switchCamera', `The switchCamera call failed. error code: ${err.code}`, result); + } + } + + setZoom(trackId: string, zoomLevel: number, result: MethodResult): void { + Log.d(TAG, `setZoom in`); + try { + setZoom(zoomLevel); + result.success(null); + } catch (error) { + // 失败返回错误码error.code并处理 + let err = error as BusinessError; + Log.e(TAG, `The setTorchMode call failed. error code: ${err.code}`); + this.resultError('switchCamera', `The switchCamera call failed. error code: ${err.code}`, result); + } + } + + removeVideoCapturer(id: string): void { + + } +} + + diff --git a/ohos/src/main/ets/LifeCycleObserver.ets b/ohos/src/main/ets/LifeCycleObserver.ets new file mode 100644 index 0000000000000000000000000000000000000000..cae87159b1961aae7d5b4af0658d1ff75cd1ff49 --- /dev/null +++ b/ohos/src/main/ets/LifeCycleObserver.ets @@ -0,0 +1,60 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { AbilityLifecycleCallback, UIAbility } from '@kit.AbilityKit'; +import { Log } from '@ohos/flutter_ohos'; +import MethodCallHandlerImpl from './MethodCallHandlerImpl'; + +const TAG: string = 'FlutterWebRTCPlugin-LifeCycleObserver' + +export default class LifeCycleObserver extends AbilityLifecycleCallback { + private methodCallHandler: MethodCallHandlerImpl | null = null; + + constructor() { + super(); + } + + setMethodCallHandler(methodCallHandler: MethodCallHandlerImpl | null) { + this.methodCallHandler = methodCallHandler; + } + + onAbilityCreate(ability: UIAbility) { + Log.d(TAG, 'onAbilityCreate') + } + + onAbilityWillCreate?(ability: UIAbility) { + Log.d(TAG, 'onAbilityWillCreate') + } + + onAbilityDestroy(ability: UIAbility) { + Log.d(TAG, 'onAbilityDestroy') + } + + onAbilityWillDestroy?(ability: UIAbility) { + Log.d(TAG, 'onAbilityWillDestroy') + } + + onAbilityBackground(ability: UIAbility) { + Log.d(TAG, 'onAbilityBackground') + } +} \ No newline at end of file diff --git a/ohos/src/main/ets/MethodCallHandlerImpl.ets b/ohos/src/main/ets/MethodCallHandlerImpl.ets new file mode 100644 index 0000000000000000000000000000000000000000..57c7bcbe785d7adb36a944a656bbc0da60ae4228 --- /dev/null +++ b/ohos/src/main/ets/MethodCallHandlerImpl.ets @@ -0,0 +1,1662 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { + BinaryMessenger, + SurfaceTextureEntry, + Log, + MethodCall, + MethodCallHandler, + MethodResult, + TextureRegistry, + EventChannel, + Any +} from '@ohos/flutter_ohos'; +import StateProvider from './StateProvider'; +import { common, UIAbility } from '@kit.AbilityKit'; +import { ArrayList, HashMap, JSON, PlainArray } from '@kit.ArkTS'; +import util from '@ohos.util' +import PeerConnectionObserver from './PeerConnectionObserver'; +import FlutterRTCVideoRenderer from './FlutterRTCVideoRenderer'; +import { ConstraintsMap } from './utils/ConstraintsMap'; +import { DataArray, DataMap, ObjectType } from './utils/ObjectType'; +import { ConstraintsArray } from './utils/ConstraintsArray'; +import fs from '@ohos.file.fs'; +import { camera } from '@kit.CameraKit'; +import { audio } from '@kit.AudioKit'; +import buffer from '@ohos.buffer'; +import GetUserMediaImpl from './GetUserMediaImpl'; +import { MethodHandlers } from './MethodHandlers'; +import AudioSwitchManager from './audio/AudioSwitchManager'; +import FlutterRTCFrameCryptor from './FlutterRTCFrameCryptor'; +import { + getOptionsAudioConfiguration, + getOptionsForceSWCodec, + getOptionsForceSWCodecList, + getOptionsNetworkIgnoreMask +} from './utils/OptionsUtils'; +import { capabilitiesToMap } from './utils/PCOUtils'; +import { PermissionsManager } from './utils/PermissionUtils'; +import { isEmpty, isNotEmpty } from './utils/Utils'; +import { + MediaStreamTrack, + RTCPeerConnection, + RTCRtpSender, + PeerConnectionFactory, + AudioDeviceModule, + AudioDeviceModuleOptions, + VideoEncoderFactory, + VideoDecoderFactory, + MediaStream, + Event, + RTCDataChannel, + RTCRtpReceiver, + AudioErrorEvent, + AudioStateChangeEvent, + AudioCapturerSamplesReadyEvent, + RTCConfiguration, + RTCIceServer, + MediaStreamConstraints, + MediaDevices, + MediaTrackConstraints, + DisplayMediaStreamOptions, + VideoTrack, + AudioTrack, + RTCSdpType, + RTCSessionDescriptionInit, + RTCIceCandidateInit, + RTCRtpTransceiverDirection, +} from 'libohos_webrtc.so' +import { + getCandidateMap, + getConfigurationMap, + getConstraintsMap, + getDataChannelDictMap, + getDataChannelId, + getDescriptionMap, + getDirection, + getDuration, + getGap, + getLabel, + getOptionsMap, + getPeerConnectionId, + getRtpSenderId, + getSenderId, + getStreamId, + getStreamIds, + getTone, + getTrackId, + getTransceiverId, + getType +} from './utils/CallUtils'; +import { mediaStreamTrackToMap } from './utils/TrackUtils'; + +const TAG: string = 'FlutterWebRTCPlugin-MethodCallHandlerImpl'; + +export default class MethodCallHandlerImpl implements MethodCallHandler, StateProvider { + private ability: UIAbility | null = null; + private messenger: BinaryMessenger; + private context: Context; + private textures: TextureRegistry; + private mPCO: HashMap = new HashMap(); + private pcf: PeerConnectionFactory | null = null; + private pc?: RTCPeerConnection; + private adm?: AudioDeviceModule; + private recvChannel?: RTCDataChannel; + private sendChannel?: RTCDataChannel; + private videoEncoderFactory?: VideoEncoderFactory; + private videoDecoderFactory?: VideoDecoderFactory; + private localStreams: HashMap = new HashMap(); + private localTracks: HashMap = new HashMap(); + private renders: PlainArray = new PlainArray(); + private stateProvider: StateProvider; + private mFactory: PeerConnectionFactory; + private getUserMediaImpl: GetUserMediaImpl | null = null; + private audioDeviceModule: AudioDeviceModule | null = null; + private frameCryptor: FlutterRTCFrameCryptor; + // private videoEncoderFactory: CustomVideoEncoderFactory | null = null; + private mTextureId: number = 0; + private entry: SurfaceTextureEntry | null = null; + + constructor(context: Context, messenger: BinaryMessenger, textureRegistry: TextureRegistry) { + this.context = context; + this.textures = textureRegistry; + this.messenger = messenger; + this.stateProvider = this; + this.mFactory = new PeerConnectionFactory(); + this.frameCryptor = new FlutterRTCFrameCryptor(this); + } + + private methodHandlers: MethodHandlers = { + 'initialize': (call, result) => this.initialize(call, result), + 'createPeerConnection': (call, result) => this.createPeerConnection(call, result), + 'getUserMedia': (call, result) => this.getUserMedia(call, result), + 'createLocalMediaStream': (call, result) => this.createLocalMediaStream(call, result), + 'getSources': (call, result) => this.getSources(call, result), + 'createOffer': (call, result) => this.createOffer(call, result), + 'createAnswer': (call, result) => this.createAnswer(call, result), + 'mediaStreamGetTracks': (call, result) => this.mediaStreamGetTracks(call, result), + 'addStream': (call, result) => this.addStream(call, result), + 'removeStream': (call, result) => this.removeStream(call, result), + 'setLocalDescription': (call, result) => this.setLocalDescription(call, result), + 'setRemoteDescription': (call, result) => this.setRemoteDescription(call, result), + 'sendDtmf': (call, result) => this.sendDtmf(call, result), + 'addCandidate': (call, result) => this.addCandidate(call, result), + 'getStats': (call, result) => this.getStats(call, result), + 'createDataChannel': (call, result) => this.createDataChannel(call, result), + 'dataChannelSend': (call, result) => this.dataChannelSend(call, result), + 'dataChannelClose': (call, result) => this.dataChannelClose(call, result), + 'streamDispose': (call, result) => this.streamDispose(call, result), + 'mediaStreamTrackSetEnable': (call, result) => this.mediaStreamTrackSetEnable(call, result), + 'mediaStreamAddTrack': (call, result) => this.mediaStreamAddTrack(call, result), + 'mediaStreamRemoveTrack': (call, result) => this.mediaStreamRemoveTrack(call, result), + 'trackDispose': (call, result) => this.trackDispose(call, result), + 'restartIce': (call, result) => this.restartIce(call, result), + 'peerConnectionClose': (call, result) => this.peerConnectionClose(call, result), + 'peerConnectionDispose': (call, result) => this.peerConnectionDispose(call, result), + 'createVideoRenderer': (call, result) => this.createVideoRenderer(call, result), + 'videoRendererDispose': (call, result) => this.videoRendererDispose(call, result), + 'videoRendererSetSrcObject': (call, result) => this.videoRendererSetSrcObject(call, result), + 'mediaStreamTrackHasTorch': (call, result) => this.mediaStreamTrackHasTorch(call, result), + 'mediaStreamTrackSetTorch': (call, result) => this.mediaStreamTrackSetTorch(call, result), + 'mediaStreamTrackSetZoom': (call, result) => this.mediaStreamTrackSetZoom(call, result), + 'mediaStreamTrackSwitchCamera': (call, result) => this.mediaStreamTrackSwitchCamera(call, result), + 'setVolume': (call, result) => this.setVolume(call, result), + 'selectAudioOutput': (call, result) => this.selectAudioOutput(call, result), + 'setMicrophoneMute': (call, result) => this.setMicrophoneMute(call, result), + 'selectAudioInput': (call, result) => this.selectAudioInput(call, result), + 'setOhosAudioConfiguration': (call, result) => this.setOhosAudioConfiguration(call, result), + 'enableSpeakerphone': (call, result) => this.enableSpeakerphone(call, result), + 'enableSpeakerphoneButPreferBluetooth': (call, result) => this.enableSpeakerphoneButPreferBluetooth(call, result), + 'getDisplayMedia': (call, result) => this.getDisplayMedia(call, result), + 'startRecordToFile': (call, result) => this.startRecordToFile(call, result), + 'stopRecordToFile': (call, result) => this.stopRecordToFile(call, result), + 'captureFrame': (call, result) => this.captureFrame(call, result), + 'getLocalDescription': (call, result) => this.getLocalDescription(call, result), + 'getRemoteDescription': (call, result) => this.getRemoteDescription(call, result), + 'setConfiguration': (call, result) => this.setConfiguration(call, result), + 'addTrack': (call, result) => this.addTrack(call, result), + 'removeTrack': (call, result) => this.removeTrack(call, result), + 'addTransceiver': (call, result) => this.addTransceiver(call, result), + 'rtpTransceiverSetDirection': (call, result) => this.rtpTransceiverSetDirection(call, result), + 'rtpTransceiverGetDirection': (call, result) => this.rtpTransceiverGetDirection(call, result), + 'rtpTransceiverGetCurrentDirection': (call, result) => this.rtpTransceiverGetCurrentDirection(call, result), + 'rtpTransceiverStop': (call, result) => this.rtpTransceiverStop(call, result), + 'rtpSenderSetParameters': (call, result) => this.rtpSenderSetParameters(call, result), + 'rtpSenderReplaceTrack': (call, result) => this.rtpSenderReplaceTrack(call, result), + 'rtpSenderSetTrack': (call, result) => this.rtpSenderSetTrack(call, result), + 'rtpSenderSetStreams': (call, result) => this.rtpSenderSetStreams(call, result), + 'getSenders': (call, result) => this.getSenders(call, result), + 'getReceivers': (call, result) => this.getReceivers(call, result), + 'getTransceivers': (call, result) => this.getTransceivers(call, result), + 'setPreferredInputDevice': (call, result) => this.setPreferredInputDevice(call, result), + 'getRtpSenderCapabilities': (call, result) => this.getRtpSenderCapabilities(call, result), + 'getRtpReceiverCapabilities': (call, result) => this.getRtpReceiverCapabilities(call, result), + 'setCodecPreferences': (call, result) => this.setCodecPreferences(call, result), + 'getSignalingState': (call, result) => this.getSignalingState(call, result), + 'getIceGatheringState': (call, result) => this.getIceGatheringState(call, result), + 'getIceConnectionState': (call, result) => this.getIceConnectionState(call, result), + 'getConnectionState': (call, result) => this.getConnectionState(call, result) + }; + + resultError(method: string, error: string, result?: MethodResult) { + let errorMsg: string = `${method}(): ${error}`; + Log.e(TAG, errorMsg); + result?.error(method, error, null); + } + + dispose(): void { + for (let mediaStream of this.localStreams.values()) { + this.handleStreamDispose(undefined, mediaStream); + } + this.localStreams.clear(); + this.localTracks.clear(); + for (let connection of this.mPCO.values()) { + connection?.dispose(); + } + this.mPCO.clear(); + } + + onMethodCall(call: MethodCall, result: MethodResult): void { + let method: string = call.method; + try { + let fun = Reflect.get(this.methodHandlers, method) as Function; + if (fun) { + Log.d(TAG, `onMethodCall: ${method}`); + fun(call, result); + } else { + this.frameCryptor.onMethodCall(call, result); + } + } catch (err) { + this.resultError(method || 'onMethodCall', `failed: ${err}`, result); + } + } + + getPCO(call: MethodCall, result: MethodResult): PeerConnectionObserver | null { + let pco = this.getPeerConnectionObserver(getPeerConnectionId(call)); + if (!pco || !pco.getPeerConnection()) { + this.resultError(call.method, 'peerConnection is null', result); + return null; + } + return pco; + } + + getPeerConnection(call: MethodCall, result: MethodResult): RTCPeerConnection | null { + let pco = this.getPCO(call, result); + return pco?.getPeerConnection() ?? null; + } + + getPeerConnectionObserver(peerConnectionId: string): PeerConnectionObserver | undefined { + return this.mPCO.get(peerConnectionId); + } + + getMediaStream(call: MethodCall, result: MethodResult, errorMsg?: string): MediaStream | undefined { + let mediaStream = this.getLocalStreams(getStreamId(call)); + if (!mediaStream) { + this.resultError(call.method, errorMsg || 'mediaStream is null', result); + } + return mediaStream; + } + + getLocalStreams(streamId: string): MediaStream | undefined { + return this.localStreams.get(streamId); + } + + getMediaStreamTrack(call: MethodCall, result: MethodResult): MediaStreamTrack | null { + let track = this.getLocalTrack(getTrackId(call)); + if (!track) { + this.resultError(call.method, 'track is null', result); + return null; + } + return track; + } + + getLocalTrack(trackId: string): MediaStreamTrack | undefined { + return this.localTracks.get(trackId); + } + + initialize(call: MethodCall, result: MethodResult): void { + let optionsMap: ConstraintsMap = getOptionsMap(call); + let networkIgnoreMask: number = getOptionsNetworkIgnoreMask(optionsMap); + let forceSWCodec: boolean = getOptionsForceSWCodec(optionsMap); + let forceSWCodecList: ArrayList = getOptionsForceSWCodecList(optionsMap); + let audioConfiguration: ConstraintsMap = getOptionsAudioConfiguration(optionsMap); + Log.d(TAG, `initialize optionsMap= ${optionsMap.toString()}`); + Log.d(TAG, `initialize networkIgnoreMask= ${networkIgnoreMask}`); + Log.d(TAG, `initialize forceSWCodec= ${forceSWCodec}`); + Log.d(TAG, `initialize forceSWCodecList= ${forceSWCodecList}`); + Log.d(TAG, `initialize audioConfiguration= ${audioConfiguration}`); + + if (this.pcf) { + return; + } + + /* 创建配对工厂 */ + AudioSwitchManager.instance.createAudioDeviceModule(); + this.createPeerConnectionFactory(); + + this.getUserMediaImpl = new GetUserMediaImpl(this.stateProvider, this.context); + // this.getUserMediaImpl.audioDeviceModule = this.adm!; + + // todo 待初始化其他部分接口,当前ohos-webrtc未满足 + result.success(null); + } + + createPeerConnection(call: MethodCall, result: MethodResult): void { + let configurationMap: ConstraintsMap = getConfigurationMap(call); + let constraintsMap: ConstraintsMap = getConstraintsMap(call); + Log.d(TAG, `createPeerConnection : configurationMap=${configurationMap.toString()}`); + Log.d(TAG, `createPeerConnection : constraints=${constraintsMap.toString()}`); + let peerConnectionId: string = this.peerConnectionInit(configurationMap, constraintsMap); + let res: ConstraintsMap = new ConstraintsMap(); + res.putString('peerConnectionId', peerConnectionId); + result.success(res.toMap()); + } + + getUserMedia(call: MethodCall, result: MethodResult): void { + let constraintsMap: ConstraintsMap = getConstraintsMap(call); + PermissionsManager.getInstance().processPermissions(constraintsMap, (code, err) => { + if (!code) { + this.handleGetUserMedia(constraintsMap, result); + } + }); + } + + createLocalMediaStream(call: MethodCall, result: MethodResult): void { + //TODO PeerConnectionFactory 没有接口, + } + + createOffer(call: MethodCall, result: MethodResult): void { + let constraintsMap: ConstraintsMap = getConstraintsMap(call); + const peerConnection = this.getPeerConnection(call, result); + if (!peerConnection) { + return; + } + peerConnection.createOffer() + .then((data) => { + let params: DataMap = new Map(); + params.set('sdp', data.sdp); + params.set('type', data.type); + result.success(params); + }) + .catch((data: Any) => { + this.resultError( + call.method, + 'WEBRTC_CREATE_OFFER_ERROR: peerConnection.createOffer catch: ' + JSON.stringify(data), + result + ) + }) + } + + createAnswer(call: MethodCall, result: MethodResult): void { + let constraintsMap: ConstraintsMap = getConstraintsMap(call); + const peerConnection = this.getPeerConnection(call, result); + if (!peerConnection) { + return; + } + peerConnection.createAnswer() + .then((data) => { + let map: DataMap = new Map(); + map.set('sdp', data.sdp); + map.set('type', data.type); + result.success(map); + }) + .catch((data: Any) => { + this.resultError( + call.method, + 'WEBRTC_CREATE_ANSWER_ERROR: ' + JSON.stringify(data), + result + ); + }) + } + + mediaStreamGetTracks(call: MethodCall, result: MethodResult): void { + let streamId: string = getStreamId(call); + Log.d(TAG, 'mediaStreamGetTracks streamId= ' + streamId); + let stream: MediaStream = this.getStreamForId(streamId, ''); + let resultMap: DataMap = new Map(); + let audioTracks: DataArray = []; + let videoTracks: DataArray = []; + stream.getAudioTracks().forEach(track => { + this.localTracks.set(track.id, track); + audioTracks.push(mediaStreamTrackToMap(track)); + }); + stream.getVideoTracks().forEach(track => { + this.localTracks.set(track.id, track); + videoTracks.push(mediaStreamTrackToMap(track)); + }); + resultMap.set('audioTracks', audioTracks); + resultMap.set('videoTracks', videoTracks); + result.success(resultMap); + } + + addStream(call: MethodCall, result: MethodResult): void { + let mediaStream = this.getMediaStream(call, result); + if (!mediaStream) { + return; + } + let peerConnection = this.getPeerConnection(call, result); + if (!peerConnection) { + return; + } + //addStream,新webrtc的最新规范中被废弃,建议使用 addTrack 方法作为替代。 + if (mediaStream.getTracks() != null && mediaStream.getTracks().length > 0) { + let sender = peerConnection.addTrack(mediaStream.getTracks()[0], mediaStream); + } + Log.d(TAG, 'addStream' + result); + result.success(false); + } + + removeStream(call: MethodCall, result: MethodResult): void { + let mediaStream = this.getMediaStream(call, result); + if (!mediaStream) { + return; + } + let peerConnection = this.getPeerConnection(call, result); + if (!peerConnection) { + return; + } + //removeStream,新webrtc的最新规范中被废弃,建议使用 removeTrack 方法作为替代。 + if (mediaStream.getTracks() != null && mediaStream.getTracks().length > 0) { + let tracks = mediaStream.getTracks()[0]; + let pco = this.getPCO(call, result); + let sender = pco?.getRtpSenderById(tracks.id); + if (!sender) { + this.resultError('peerConnectionRemoveStream', ' removeTrack sender is null', result); + return; + } + peerConnection.removeTrack(sender); + } + result.success(null); + } + + setLocalDescription(call: MethodCall, result: MethodResult): void { + let peerConnection = this.getPeerConnection(call, result); + if (!peerConnection) { + return + } + let descriptionMap: ConstraintsMap = getDescriptionMap(call); + let sdp: string = descriptionMap.getString('sdp'); + let type = descriptionMap.getString('type') as RTCSdpType; + let sdpInit: RTCSessionDescriptionInit = { + sdp: sdp, type: type + } + peerConnection.setLocalDescription(sdpInit) + .then(() => { + result.success(null) + }) + .catch((data: Any) => { + this.resultError( + call.method, + 'WEBRTC_SET_LOCAL_DESCRIPTION_ERROR: peerConnection.setLocalDescription catch: ' + JSON.stringify(data), + result) + }) + } + + setRemoteDescription(call: MethodCall, result: MethodResult): void { + let peerConnection = this.getPeerConnection(call, result); + if (!peerConnection) { + return + } + let descriptionMap: ConstraintsMap = getDescriptionMap(call); + let sdp: string = descriptionMap.getString('sdp'); + let type = descriptionMap.getString('type') as RTCSdpType; + let sdpInit: RTCSessionDescriptionInit = { + sdp: sdp, type: type + } + peerConnection.setRemoteDescription(sdpInit) + .then(() => { + result.success(null) + }) + .catch((data: Any) => { + this.resultError( + call.method, + 'WEBRTC_SET_REMOTE_DESCRIPTION_ERROR: peerConnection.setRemoteDescription catch: ' + JSON.stringify(data), + result) + }) + } + + sendDtmf(call: MethodCall, result: MethodResult): void { + const peerConnection = this.getPeerConnection(call, result); + if (!peerConnection) { + return + } + let audioSender: RTCRtpSender = new RTCRtpSender(); + peerConnection.getSenders().forEach(sender => { + if (sender.track?.kind === 'audio') { + audioSender = sender; + return; + } + }); + if (audioSender.dtmf) { + let dtmfSender = audioSender.dtmf; + let tone: string = getTone(call); + Log.d(TAG, 'sendDtmf tone= ' + tone); + let duration: number | undefined = getDuration(call); + Log.d(TAG, 'sendDtmf duration= ' + duration); + let gap: number | undefined = getGap(call); + Log.d(TAG, 'sendDtmf gap= ' + gap); + dtmfSender.insertDTMF(tone, duration, gap); + } else { + Log.d(TAG, 'sendDtmf audioSender.dtmf is null'); + } + result.success('success'); + } + + addCandidate(call: MethodCall, result: MethodResult): void { + const peerConnection = this.getPeerConnection(call, result); + if (!peerConnection) { + return; + } + let candidateMap: ConstraintsMap = getCandidateMap(call); + let sdpMLineIndex: number = 0; + if (candidateMap.getType('sdpMLineIndex') === ObjectType.Number) { + sdpMLineIndex = candidateMap.getNumber('sdpMLineIndex'); + } + let candidate: RTCIceCandidateInit = { + sdpMid: candidateMap.getString('sdpMid'), + sdpMLineIndex: sdpMLineIndex, + candidate: candidateMap.getString('candidate') + } + peerConnection.addIceCandidate(candidate).then(() => { + result.success(true); + }).catch(() => { + result.success(false); + }); + } + + getStats(call: MethodCall, result: MethodResult): void { + let trackId: string = getTrackId(call); + let pco = this.getPCO(call, result); + if (!pco) { + return; + } + if (trackId == '') { + pco.getStats(result); + } else { + pco.getStatsForTrack(trackId, result); + } + } + + createDataChannel(call: MethodCall, result: MethodResult): void { + this.getPCO(call, result)?.createDataChannel(getLabel(call), getDataChannelDictMap(call), result); + } + + dataChannelSend(call: MethodCall, result: MethodResult): void { + const isBinary: boolean = getType(call) === 'binary'; + let byteBuffer: buffer.Buffer; + if (isBinary) { + byteBuffer = buffer.from(call.argument('data')); + } else { + let data: string = call.argument('data'); + byteBuffer = buffer.from(data, 'utf-8'); + } + this.getPCO(call, result)?.dataChannelSend(getDataChannelId(call), byteBuffer.buffer, isBinary); + result.success(null); + } + + dataChannelClose(call: MethodCall, result: MethodResult): void { + this.getPCO(call, result)?.dataChannelClose(getDataChannelId(call)); + result.success(null); + } + + streamDispose(call: MethodCall, result: MethodResult): void { + let disposeStreamId: string = getStreamId(call); + Log.d(TAG, 'streamDispose streamId= ' + disposeStreamId); + this.handleStreamDispose(disposeStreamId, undefined); + Log.d(TAG, 'streamDispose end'); + result.success(null); + Log.d(TAG, 'streamDispose result.success end'); + } + + mediaStreamTrackSetEnable(call: MethodCall, result: MethodResult): void { + let trackId: string = getTrackId(call); + Log.d(TAG, 'mediaStreamTrackSetEnable trackId= ' + trackId); + let enabled: boolean = call.argument('enabled'); + Log.d(TAG, 'mediaStreamTrackSetEnable enabled= ' + enabled); + let peerConnectionId: string = getPeerConnectionId(call); + Log.d(TAG, 'mediaStreamTrackSetEnable peerConnectionId= ' + peerConnectionId); + + let track = this.getTrackForId(trackId, peerConnectionId); + if (track == null) { + Log.d(TAG, 'mediaStreamTrackSetEnabled() track is null'); + return; + } else if (track.enabled == enabled) { + return; + } + track.enabled = enabled; + result.success(null); + } + + mediaStreamAddTrack(call: MethodCall, result: MethodResult): void { + let streamId: string = getStreamId(call); + Log.d(TAG, 'mediaStreamAddTrack streamId= ' + streamId); + let trackId: string = getTrackId(call); + Log.d(TAG, 'mediaStreamAddTrack trackId= ' + trackId); + let mediaStream = this.getMediaStream(call, result); + if (!mediaStream) { + this.resultError('mediaStreamAddTrack', 'mediaStreamAddTrack() stream [' + streamId + '] is null', result); + return + } + let track = this.getTrackForId(trackId); + if (!track) { + this.resultError('mediaStreamAddTrack', 'mediaStreamAddTrack() track [' + trackId + '] is null', result); + return + } + let kind = track.kind; + //TODO kind 无 audio,需要替换情况 + if (kind === 'audio') { + mediaStream.addTrack(track); + result.success(null); + } else if (kind === 'video') { + mediaStream.addTrack(track); + result.success(null); + } else { + this.resultError('mediaStreamAddTrack', + 'mediaStreamAddTrack() track [' + trackId + '] has unsupported type: ' + kind, result); + } + + for (let i = 0; i < this.renders.length; i++) { + let renderer = this.renders.getValueAt(i); + if (renderer.checkMediaStream(streamId, 'local')) { + renderer.setVideoTrack(this.localTracks.get(trackId)); + } + } + } + + mediaStreamRemoveTrack(call: MethodCall, result: MethodResult): void { + let streamId: string = getStreamId(call); + Log.d(TAG, 'mediaStreamRemoveTrack streamId= ' + streamId); + let trackId: string = getTrackId(call); + Log.d(TAG, 'mediaStreamRemoveTrack trackId= ' + trackId); + this.handleMediaStreamRemoveTrack(streamId, trackId, result); + this.removeStreamForRendererById(streamId); + } + + trackDispose(call: MethodCall, result: MethodResult): void { + let trackId: string = getTrackId(call); + Log.d(TAG, 'trackDispose id : ' + trackId); + Log.d(TAG, 'trackDispose() track is null'); + let track = this.getMediaStreamTrack(call, result); + if (!track) { + return; + } + track.enabled = false + track.stop() + this.removeTrackForRendererById(trackId); + + if (track.kind == 'video') { + // getUserMediaImpl.removeVideoCapturer(trackId); ohos不存在这一步 + } + this.localTracks.remove(trackId) + result.success(null); + } + + restartIce(call: MethodCall, result: MethodResult): void { + this.getPCO(call, result)?.restartIce(); + result.success(null); + } + + peerConnectionClose(call: MethodCall, result: MethodResult): void { + this.getPCO(call, result)?.close(); + result.success(null); + } + + peerConnectionDispose(call: MethodCall, result: MethodResult): void { + this.getPCO(call, result)?.dispose(); + result.success(null); + } + + createVideoRenderer(call: MethodCall, result: MethodResult): void { + let textureId = this.textures.getTextureId(); + let render: FlutterRTCVideoRenderer = + new FlutterRTCVideoRenderer(textureId, this.stateProvider); + Log.d(TAG, 'createVideoRenderer create FlutterRTCVideoRenderer !') + this.renders.add(textureId, render); + let eventChannel: EventChannel = + new EventChannel(this.messenger, 'FlutterWebRTC/Texture' + textureId); + eventChannel.setStreamHandler(render); + render.setEventChannel(eventChannel); + render.setId(textureId); + let params: ConstraintsMap = new ConstraintsMap(); + params.putNumber('textureId', textureId); + Log.d(TAG, 'createVideoRenderer result.success:' + Array.from(params.toMap().entries())) + result.success(params.toMap()); + } + + videoRendererDispose(call: MethodCall, result: MethodResult): void { + let textureId: number = call.argument('textureId'); + Log.d(TAG, 'videoRendererDispose textureId= ' + textureId); + let render: FlutterRTCVideoRenderer = this.renders.get(textureId); + if (!render) { + this.resultError('videoRendererDispose', `render [${textureId}] not found !`, result); + return; + } + render.Dispose(); + this.renders.remove(textureId); + result.success(null); + } + + videoRendererSetSrcObject(call: MethodCall, result: MethodResult): void { + let textureId: number = call.argument('textureId'); + let surfaceId: string = call.argument('surfaceId'); + let streamId: string = getStreamId(call); + let ownerTag: string = call.argument('ownerTag'); + let trackId: string = getTrackId(call); + + Log.d(TAG, 'videoRendererSetSrcObject textureId= ' + textureId); + Log.d(TAG, 'videoRendererSetSrcObject streamId= ' + streamId); + Log.d(TAG, 'videoRendererSetSrcObject ownerTag= ' + ownerTag); + Log.d(TAG, 'videoRendererSetSrcObject trackId= ' + trackId); + Log.d(TAG, 'videoRendererSetSrcObject surfaceId= ' + surfaceId); + + let videoRender: FlutterRTCVideoRenderer = this.renders.get(textureId); + if (!videoRender) { + this.resultError('videoRendererSetSrcObject', `videoRender [${textureId}] not found !`, result); + return; + } + if (!surfaceId) { + this.resultError('videoRendererSetSrcObject', `videoRender surfaceId not found !`, result); + return; + } + if (videoRender.getSurfaceId() == '' && surfaceId != '') { + videoRender.init(surfaceId); + } + let stream: MediaStream; + if (ownerTag === 'local') { + stream = this.localStreams.get(streamId); + } else { + stream = this.getStreamForId(streamId, ownerTag); + } + if (trackId != null && !(trackId === '0')) { + videoRender.setStream(stream, ownerTag, trackId); + } else { + videoRender.setStream(stream, ownerTag); + } + result.success(null); + } + + mediaStreamTrackHasTorch(call: MethodCall, result: MethodResult): void { + let trackId: string = getTrackId(call); + Log.d(TAG, 'mediaStreamTrackHasTorch trackId= ' + trackId); + if (this.getUserMediaImpl) { + Log.d(TAG, 'mediaStreamTrackHasTorch hasTorch in!') + this.getUserMediaImpl.hasTorch(trackId, result); + } else { + this.resultError('mediaStreamTrackHasTorch', 'getUserMediaImpl null !', result); + } + } + + mediaStreamTrackSetTorch(call: MethodCall, result: MethodResult): void { + let trackId: string = getTrackId(call); + Log.d(TAG, 'mediaStreamTrackSetTorch trackId= ' + trackId); + let torch: boolean = call.argument('torch'); + Log.d(TAG, 'mediaStreamTrackSetTorch torch= ' + torch); + if (this.getUserMediaImpl) { + // TODO 无法通过trackID获取到camera进行闪光灯控制,需要添加接口调用底层功能。 + this.getUserMediaImpl.setTorch(torch, result); + } else { + this.resultError('mediaStreamTrackSetTorch', 'getUserMediaImpl null !', result); + } + } + + mediaStreamTrackSetZoom(call: MethodCall, result: MethodResult): void { + let trackId: string = getTrackId(call); + Log.d(TAG, 'mediaStreamTrackSetZoom trackId= ' + trackId); + let zoomLevel: number = call.argument('zoomLevel'); + Log.d(TAG, 'mediaStreamTrackSetZoom zoomLevel= ' + zoomLevel); + if (this.getUserMediaImpl) { + // TODO 无法通过trackID获取到camera进行相机缩放,需要添加接口调用底层功能。 + this.getUserMediaImpl.setZoom(trackId, zoomLevel, result); + } else { + this.resultError('mediaStreamTrackSetZoom', 'getUserMediaImpl null !', result); + } + } + + mediaStreamTrackSwitchCamera(call: MethodCall, result: MethodResult): void { + let trackId: string = getTrackId(call); + Log.d(TAG, 'mediaStreamTrackSetTorch trackId= ' + trackId); + if (this.getUserMediaImpl) { + // TODO 无法通过trackID获取到camera进行相机切换,需要添加接口调用底层功能进行切换。 + /* + * 修改实现方式,切换摄像头,await sender.replaceTrack(newTrack); 通过替换videoTrack + * 首先得判断当前相机是否打开,既已经获取过 getUserMedia + * 从track获取到 mediaStream + * 最终获取到 rtpSender + * */ + this.getUserMediaImpl.switchCamera(trackId, result); + } else { + this.resultError('mediaStreamTrackSwitchCamera', 'getUserMediaImpl null !', result); + } + } + + setVolume(call: MethodCall, result: MethodResult): void { + let trackId: string = getTrackId(call); + let volume: number = call.argument('volume') ?? -1; + let peerConnectionId: string = getPeerConnectionId(call); + this.mediaStreamTrackSetVolume(trackId, volume, peerConnectionId); + result.success(null); + } + + selectAudioOutput(call: MethodCall, result: MethodResult): void { + let deviceId: string = call.argument('deviceId'); + Log.d(TAG, 'selectAudioOutput deviceId= ' + deviceId); + AudioSwitchManager.instance.selectAudioOutput(parseInt(deviceId)); + result.success(null); + } + + setMicrophoneMute(call: MethodCall, result: MethodResult): void { + let mute: boolean = call.argument('mute'); + AudioSwitchManager.instance.setMicrophoneMute(mute) + result.success(null); + } + + selectAudioInput(call: MethodCall, result: MethodResult): void { + let deviceId: string = call.argument('deviceId') + let audioManager = audio.getAudioManager() + let inputDevices = audioManager.getDevices(audio.DeviceFlag.INPUT_DEVICES_FLAG) + //TODO 暂不支持 setPreferredInputDevice 适配偏好设备的方法 + } + + setOhosAudioConfiguration(call: MethodCall, result: MethodResult): void { + let configuration: ConstraintsMap = getConfigurationMap(call); + AudioSwitchManager.instance.setAudioConfiguration(configuration); + result.success(null); + } + + enableSpeakerphone(call: MethodCall, result: MethodResult): void { + let mute: boolean = call.argument('enable'); + AudioSwitchManager.instance.enableSpeakerphone(mute); + result.success(null); + } + + enableSpeakerphoneButPreferBluetooth(call: MethodCall, result: MethodResult): void { + AudioSwitchManager.instance.enableSpeakerButPreferBluetooth(); + } + + async getDisplayMedia(call: MethodCall, result: MethodResult): Promise { + let constraints: ConstraintsMap = getConstraintsMap(call); + let mediaOption: DisplayMediaStreamOptions = {} + if (constraints.getType('audio') === ObjectType.Boolean) { + mediaOption.audio = constraints.getBoolean('audio') + } else if (constraints.getType('audio') == ObjectType.Map) { + let audioOption: ConstraintsMap = constraints.getMap('audio')! + mediaOption.audio = this.getMediaTrackConstraints(audioOption.toMap()) + } + + if (constraints.getType('video') === ObjectType.Boolean) { + mediaOption.video = constraints.getBoolean('video') + } else if (constraints.getType('video') == ObjectType.Map) { + let videoOptionMap: ConstraintsMap = constraints.getMap('video')! + mediaOption.video = this.getMediaTrackConstraints(videoOptionMap.toMap()) + } + + Log.e(TAG, `mediaOption get . ${JSON.stringify(mediaOption)}`); + let mediaDevices = new MediaDevices(); + let mediaStream: MediaStream = await mediaDevices.getDisplayMedia(mediaOption); + let streamId: string = mediaStream.id ?? this.getNextStreamUUID(); + Log.d(TAG, 'getDisplayMedia mediaStream.id streamId= ' + streamId) + + if (!mediaStream) { + this.resultError('getDisplayMedia', 'Failed to create new media stream', result); + return; + } + + let params: DataMap = new Map() + params.set('streamId', mediaStream.id) + Log.d(TAG, 'getDisplayMedia userMedia.id= ' + mediaStream.id) + + let userVideoTracks: DataMap[] = [] + for (let videoTrack of mediaStream.getVideoTracks()) { + let map: DataMap = new Map() + map.set('enabled', videoTrack.enabled) + map.set('id', videoTrack.id) + map.set('kind', videoTrack.kind) + map.set('label', videoTrack.kind) + map.set('readyState', videoTrack.readyState) + map.set('remote', false) + userVideoTracks.push(map) + this.localTracks.set(videoTrack.id, videoTrack) + } + + let userAudioTracks: DataMap[] = [] + for (let audioTrack of mediaStream.getAudioTracks()) { + let map: DataMap = new Map() + map.set('enabled', audioTrack.enabled) + map.set('id', audioTrack.id) + map.set('kind', audioTrack.kind) + map.set('label', audioTrack.kind) + map.set('readyState', audioTrack.readyState) + map.set('remote', false) + this.localTracks.set(audioTrack.id, audioTrack) + userAudioTracks.push(map) + } + params.set('videoTracks', userVideoTracks) + params.set('audioTracks', userAudioTracks) + this.localStreams.set(mediaStream.id, mediaStream) + result.success(params) + } + + startRecordToFile(call: MethodCall, result: MethodResult): void { + //TODO startRecordToFile + } + + stopRecordToFile(call: MethodCall, result: MethodResult): void { + //TODO stopRecordToFile + } + + captureFrame(call: MethodCall, result: MethodResult): void { + /*暂不支持截图 录像*/ + let videoTrackId: string = getTrackId(call); + let peerConnectionId: string = getPeerConnectionId(call); + let path: string = call.argument('path'); + Log.d(TAG, 'path:' + path + ', videoTrackId:' + videoTrackId + ', peerConnectionId:' + peerConnectionId); + if (isNotEmpty(videoTrackId)) { + let track: MediaStreamTrack = this.getTrackForId(videoTrackId, peerConnectionId); + if (track.kind === 'video') { + } else { + this.resultError('captureFrame', 'It\'s not video track', result); + } + } else { + this.resultError('captureFrame', 'Track is null', result); + } + } + + getLocalDescription(call: MethodCall, result: MethodResult): void { + const peerConnection = this.getPeerConnection(call, result); + if (!peerConnection) { + return + } + let sdp = peerConnection.localDescription; + let params: ConstraintsMap = new ConstraintsMap() + params.putString('sdp', sdp!.sdp) + params.putString('type', sdp!.type); + result.success(params.toMap()); + } + + getRemoteDescription(call: MethodCall, result: MethodResult): void { + const peerConnection = this.getPeerConnection(call, result); + if (!peerConnection) { + return + } + let sdp = peerConnection.remoteDescription; + if (!sdp) { + result.success(null); + return + } + let params: ConstraintsMap = new ConstraintsMap(); + params.putString('sdp', sdp.sdp); + params.putString('type', sdp.type); + result.success(params.toMap()); + } + + setConfiguration(call: MethodCall, result: MethodResult): void { + const peerConnection = this.getPeerConnection(call, result); + if (!peerConnection) { + return + } + let configurationMap: ConstraintsMap = getConfigurationMap(call); + peerConnection.setConfiguration(this.parseRTCConfiguration(configurationMap)); + result.success(null); + } + + addTrack(call: MethodCall, result: MethodResult): void { + let pco = this.getPCO(call, result); + if (!pco) { + return; + } + let track = this.getMediaStreamTrack(call, result); + if (!track) { + return; + } + let streamIds: string[] = getStreamIds(call); + let streams: MediaStream[] = []; + for (let streamId of streamIds) { + if (this.localStreams.hasKey(streamId)) { + streams.push(this.localStreams.get(streamId)); + } + } + pco.addTrack(track, streams, result); + } + + removeTrack(call: MethodCall, result: MethodResult): void { + this.getPCO(call, result)?.removeTrack(getSenderId(call), result); + } + + addTransceiver(call: MethodCall, result: MethodResult): void { + const pco = this.getPCO(call, result); + if (!pco) { + return; + } + let transceiverInit: DataMap = call.argument('transceiverInit'); + if (call.hasArgument('trackId')) { + let track = this.getMediaStreamTrack(call, result); + if (!track) { + return; + } + let streams: MediaStream[] = [] + //获取flutter层传下来的流 + if (transceiverInit.has('streamIds')){ + let streamIds:string[] = transceiverInit.get('streamIds') + streamIds.forEach((id) => { + if (this.localStreams.hasKey(id)) { + streams.push(this.localStreams.get(id)) + } + }) + } + //寻找附加的track的视频流 + this.localStreams.forEach((stream) => { + if (stream?.getTracks().find((item) => item.id == track?.id)) { + if (streams.find((item) => item.id == stream.id)) { + return + } + streams.push(stream) + } + }) + if (streams.length > 0) { + transceiverInit.set("streams", streams) + } + pco.addTransceiver(track, result, transceiverInit); + } else if (call.hasArgument('mediaType')) { + let mediaType: string = call.argument('mediaType') + pco.addTransceiver(mediaType, result, transceiverInit); + } else { + this.resultError(call.method, 'Incomplete parameters', result); + } + } + + rtpTransceiverSetDirection(call: MethodCall, result: MethodResult): void { + this.getPCO(call, result)?.rtpTransceiverSetDirection(getDirection(call), getTransceiverId(call), result); + } + + rtpTransceiverGetDirection(call: MethodCall, result: MethodResult): void { + this.getPCO(call, result)?.rtpTransceiverGetDirection(getTransceiverId(call), result); + } + + rtpTransceiverGetCurrentDirection(call: MethodCall, result: MethodResult): void { + this.getPCO(call, result)?.rtpTransceiverGetCurrentDirection(getTransceiverId(call), result); + } + + rtpTransceiverStop(call: MethodCall, result: MethodResult): void { + this.getPCO(call, result)?.rtpTransceiverStop(getTransceiverId(call), result); + } + + rtpSenderSetParameters(call: MethodCall, result: MethodResult): void { + let parameters: DataMap = call.argument('parameters'); + this.getPCO(call, result)?.rtpSenderSetParameters(getRtpSenderId(call), parameters, result); + } + + rtpSenderReplaceTrack(call: MethodCall, result: MethodResult): void { + this.handleRtpSenderSetTrack(call, result, true); + } + + rtpSenderSetTrack(call: MethodCall, result: MethodResult): void { + this.handleRtpSenderSetTrack(call, result, false); + } + + rtpSenderSetStreams(call: MethodCall, result: MethodResult): void { + let pco = this.getPCO(call, result); + if (!pco) { + return; + } + let rtpSenderId: string = getRtpSenderId(call); + let streamIds: string[] = getStreamIds(call); + let streams: MediaStream[] = []; + for (let streamId of streamIds) { + if (this.localStreams.hasKey(streamId)) { + streams.push(this.localStreams.get(streamId)); + } + pco.rtpSenderSetStreams(rtpSenderId, streams, result); + } + } + + getSenders(call: MethodCall, result: MethodResult): void { + this.getPCO(call, result)?.getSenders(result); + } + + getReceivers(call: MethodCall, result: MethodResult): void { + this.getPCO(call, result)?.getReceivers(result); + } + + getTransceivers(call: MethodCall, result: MethodResult): void { + this.getPCO(call, result)?.getTransceivers(result); + } + + setPreferredInputDevice(call: MethodCall, result: MethodResult): void { + let deviceId: string = call.argument('deviceId') ?? ''; + let audioManager = audio.getAudioManager() + let inputDevices = audioManager.getDevices(audio.DeviceFlag.INPUT_DEVICES_FLAG) + //TODO 暂不支持 setPreferredInputDevice 适配偏好设备方法 + } + + getRtpSenderCapabilities(call: MethodCall, result: MethodResult): void { + let kind: string = call.argument('kind') ?? ''; + let mediaType: string = kind === 'video' ? 'video' : 'audio'; + let capabilities = RTCRtpSender.getCapabilities(mediaType) + if (capabilities) { + let params: DataMap = capabilitiesToMap(capabilities); + result.success(params) + } else { + this.resultError('getRtpReceiverCapabilities', 'peerConnection is null', result) + } + } + + getRtpReceiverCapabilities(call: MethodCall, result: MethodResult): void { + let kind: string = call.argument('kind') ?? ''; + let mediaType: string = kind === 'video' ? 'video' : 'audio'; + let capabilities = RTCRtpReceiver.getCapabilities(mediaType) + if (capabilities) { + let params: DataMap = capabilitiesToMap(capabilities); + result.success(params) + } else { + this.resultError('getRtpReceiverCapabilities', 'peerConnection is null', result) + } + } + + setCodecPreferences(call: MethodCall, result: MethodResult): void { + let codecs: DataMap[] = call.argument('codecs'); + this.getPCO(call, result)?.rtpTransceiverSetCodecPreferences(getTransceiverId(call), codecs, result); + } + + getSignalingState(call: MethodCall, result: MethodResult): void { + const peerConnection = this.getPeerConnection(call, result); + if (!peerConnection) { + return; + } + let params: DataMap = new Map(); + params.set('state', peerConnection.signalingState); + result.success(params); + } + + getIceGatheringState(call: MethodCall, result: MethodResult): void { + const peerConnection = this.getPeerConnection(call, result); + if (!peerConnection) { + return; + } + let params: DataMap = new Map(); + params.set('state', peerConnection.iceGatheringState); + result.success(params); + } + + getIceConnectionState(call: MethodCall, result: MethodResult): void { + const peerConnection = this.getPeerConnection(call, result); + if (!peerConnection) { + return + } + let params: DataMap = new Map() + params.set('state', peerConnection.iceConnectionState) + result.success(params) + } + + getConnectionState(call: MethodCall, result: MethodResult): void { + const peerConnection = this.getPeerConnection(call, result); + if (!peerConnection) { + return + } + let params: DataMap = new Map() + params.set('state', peerConnection.connectionState) + result.success(params) + } + + async createVideoEncoderFactory() { + Log.d(TAG, 'this.videoEncoderFactory: '); + } + + async createPeerConnectionFactory() { + /**/ + this.pcf = new PeerConnectionFactory({ + adm: AudioSwitchManager.instance.getAudioDeviceModule() + }); + Log.d(TAG, 'Log this.pcf: ' + JSON.stringify(this.pcf)); + + let filesDir = getContext().filesDir; + let file = fs.openSync(filesDir + '/test.txt', fs.OpenMode.READ_WRITE | fs.OpenMode.CREATE); + this.pcf.startAecDump(file.fd, -1); + } + + peerConnectionInit(configurationMap: ConstraintsMap, constraintsMap: ConstraintsMap) { + let peerConnectionId: string = this.getNextStreamUUID(); + Log.d(TAG, 'peerConnectionInit peerConnectionId= ' + peerConnectionId) + let conf: RTCConfiguration = this.parseRTCConfiguration(configurationMap); + let peerConnection: RTCPeerConnection = this.mFactory.createPeerConnection(conf); + let observer: PeerConnectionObserver = + new PeerConnectionObserver(conf, this, this.messenger, peerConnectionId, peerConnection); + this.mPCO.set(peerConnectionId, observer); + return peerConnectionId; + } + + parseRTCConfiguration(map: ConstraintsMap): RTCConfiguration { + let iceServersArray = new ConstraintsArray(); + if (map.getType('iceServers') === ObjectType.Array) { + iceServersArray = map.getArray('iceServers')!; + Log.d(TAG, 'parseRTCConfiguration iceServersArray= ' + iceServersArray) + } + let conf: RTCConfiguration = { + iceServers: this.createIceServers(iceServersArray) + }; + Log.d(TAG, 'parseRTCConfiguration conf= ' + JSON.stringify(conf)) + return conf; + } + + createIceServers(iceServersArray: ConstraintsArray): RTCIceServer[] { + Log.d(TAG, 'createIceServers in') + let iceServers: RTCIceServer[] = [] + for (let i = 0; i < iceServersArray.size(); i++) { + if (iceServersArray.getType(i) !== ObjectType.Map) { + continue; + } + let iceServerMap: ConstraintsMap = new ConstraintsMap(iceServersArray.getMap(i)); + let hasUsernameAndCredential: boolean = + iceServerMap.hasKey('username') && iceServerMap.hasKey('credential'); + if (iceServerMap.hasKey('url')) { + if (hasUsernameAndCredential) { + iceServers.push({ + urls: iceServerMap.getString('url'), + username: iceServerMap.getString('username'), + credential: iceServerMap.getString('credential') + }) + } + } else if (iceServerMap.hasKey('urls')) { + if (hasUsernameAndCredential) { + iceServers.push({ + urls: iceServerMap.getString('urls'), + username: iceServerMap.getString('username'), + credential: iceServerMap.getString('credential') + }) + } else { + iceServers.push({ + urls: iceServerMap.getString('urls'), + }) + } + } + } + return iceServers + } + + putLocalStream(streamId: string, stream: MediaStream): boolean { + this.localStreams.set(streamId, stream); + return true; + } + + putLocalTrack(trackId: string, track: MediaStreamTrack): boolean { + this.localTracks.set(trackId, track); + return true; + } + + getNextTrackUUID(): string { + let uuid: string = ''; + do { + uuid = util.generateRandomUUID(); + } while (this.getTrackForId(uuid) != null); + + return uuid; + } + + getPeerConnectionFactory(): PeerConnectionFactory { + if (this.pcf) { + return this.pcf; + } + return new PeerConnectionFactory({ + adm: AudioSwitchManager.instance.getAudioDeviceModule() + }); + } + + getAbility(): UIAbility | null { + return this.ability; + } + + getApplicationContext(): common.ApplicationContext | undefined { + return this.context.getApplicationContext(); + } + + getMessenger(): BinaryMessenger { + return this.messenger; + } + + public async handleGetUserMedia(constraints: ConstraintsMap, result: MethodResult) { + let mediaOption: MediaStreamConstraints = {} + if (constraints.getType('audio') === ObjectType.Boolean) { + mediaOption.audio = constraints.getBoolean('audio') + } else if (constraints.getType('audio') == ObjectType.Map) { + let audioOption: ConstraintsMap = constraints.getMap('audio')! + mediaOption.audio = this.getMediaTrackConstraints(audioOption.toMap()) + } + + if (constraints.getType('video') === ObjectType.Boolean) { + mediaOption.video = constraints.getBoolean('video') + } else if (constraints.getType('video') == ObjectType.Map) { + let videoOptionMap: ConstraintsMap = constraints.getMap('video')! + mediaOption.video = this.getMediaTrackConstraints(videoOptionMap.toMap()) + } + + Log.e(TAG, `mediaOption get . ${JSON.stringify(mediaOption)}`); + + let params: DataMap = new Map() + let mediaDevices = new MediaDevices(); + let userMedia: MediaStream = await mediaDevices.getUserMedia(mediaOption); + params.set('streamId', userMedia.id) + + let userVideoTracks: DataMap[] = [] + for (let userVideoTrack of userMedia.getVideoTracks()) { + let map: DataMap = new Map() + map.set('enabled', userVideoTrack.enabled) + map.set('id', userVideoTrack.id) + map.set('kind', userVideoTrack.kind) + map.set('label', userVideoTrack.kind) + map.set('readyState', userVideoTrack.readyState) + map.set('remote', false) + userVideoTracks.push(map) + this.localTracks.set(userVideoTrack.id, userVideoTrack) + } + + let userAudioTracks: DataMap[] = [] + for (let userAudioTrack of userMedia.getAudioTracks()) { + let map: DataMap = new Map() + map.set('enabled', userAudioTrack.enabled) + map.set('id', userAudioTrack.id) + map.set('kind', userAudioTrack.kind) + map.set('label', userAudioTrack.kind) + map.set('readyState', userAudioTrack.readyState) + map.set('remote', false) + this.localTracks.set(userAudioTrack.id, userAudioTrack) + userAudioTracks.push(map) + } + + params.set('videoTracks', userVideoTracks) + params.set('audioTracks', userAudioTracks) + this.localStreams.set(userMedia.id, userMedia) + Log.e(TAG, `mediaOption get . ${JSON.stringify(mediaOption)}`); + result.success(params) + } + + getMediaTrackConstraints(videoOptionMap: DataMap): MediaTrackConstraints { + let videoOption: MediaTrackConstraints = {} + if (videoOptionMap.has('width')) { + videoOption.width = videoOptionMap.get('width') + } + if (videoOptionMap.has('height')) { + videoOption.height = videoOptionMap.get('height') + } + if (videoOptionMap.has('aspectRatio')) { + videoOption.aspectRatio = videoOptionMap.get('aspectRatio') + } + if (videoOptionMap.has('frameRate')) { + videoOption.frameRate = videoOptionMap.get('frameRate') + } + if (videoOptionMap.has('facingMode')) { + videoOption.facingMode = videoOptionMap.get('facingMode') + } + if (videoOptionMap.has('resizeMode')) { + videoOption.resizeMode = videoOptionMap.get('resizeMode') + } + if (videoOptionMap.has('sampleRate')) { + videoOption.sampleRate = videoOptionMap.get('sampleRate') + } + if (videoOptionMap.has('sampleSize')) { + videoOption.sampleSize = videoOptionMap.get('sampleSize') + } + if (videoOptionMap.has('echoCancellation')) { + videoOption.echoCancellation = videoOptionMap.get('echoCancellation') + } + if (videoOptionMap.has('autoGainControl')) { + videoOption.autoGainControl = videoOptionMap.get('autoGainControl') + } + if (videoOptionMap.has('noiseSuppression')) { + videoOption.noiseSuppression = videoOptionMap.get('noiseSuppression') + } + if (videoOptionMap.has('latency')) { + videoOption.latency = videoOptionMap.get('latency') + } + if (videoOptionMap.has('channelCount')) { + videoOption.channelCount = videoOptionMap.get('channelCount') + } + if (videoOptionMap.has('groupId')) { + videoOption.groupId = videoOptionMap.get('groupId') + } + if (videoOptionMap.has('deviceId')) { + videoOption.deviceId = videoOptionMap.get('deviceId') + } + if (videoOptionMap.has('optional')) { + let optionalList: DataMap[] = videoOptionMap.get('optional') + for (let data of optionalList) { + if (data.has('sourceId')) { + videoOption.deviceId = data.get('sourceId') + } + } + } + return videoOption + } + + /* 获取相机设备信息 */ + getSources(call: MethodCall, result: MethodResult): void { + let resultMap: DataMap = new Map() + let mediaDevices = new MediaDevices(); + mediaDevices.enumerateDevices().then((data) => { + Log.i(TAG, 'getUserMediaImpl.getSources : ' + JSON.stringify(data)) + let list: DataArray = [] + for (let info of data) { + let map: DataMap = new Map() + map.set('deviceId', info.deviceId) + map.set('groupId', info.deviceId) + map.set('kind', info.kind) + map.set('label', info.label) + list.push(map) + } + resultMap.set('sources', list); + result.success(resultMap); + }) + } + + getCameraInfo(index: number, camera: camera.CameraDevice): ConstraintsMap { + let params: ConstraintsMap = new ConstraintsMap(); + let facing: string = camera.cameraPosition == 2 ? 'front' : 'back'; + params.putString('label', + 'Camera ' + index + ', Facing ' + facing + ', Orientation ' + camera.cameraOrientation); + params.putString('deviceId', '' + index); + params.putString('facing', facing); + params.putString('kind', 'videoinput'); + + return params; + } + + getAudioInputInfo(audioDeviceDescriptor: audio.AudioDeviceDescriptor): ConstraintsMap { + let type = audioDeviceDescriptor.deviceType.valueOf() + let label = audioDeviceDescriptor.name; + let address = audioDeviceDescriptor.address; + // ohos 麦克风 对应安卓 设备内置麦克风 AudioDeviceInfo.TYPE_BUILTIN_MIC + if (audioDeviceDescriptor.deviceType == audio.DeviceType.MIC) { + label = 'Built-in Microphone (' + address + ')'; + } + + // ohos 有线耳机,带麦克风。 对应安卓 有线耳机类型的音频设备 AudioDeviceInfo.TYPE_WIRED_HEADSET + if (audioDeviceDescriptor.deviceType == audio.DeviceType.WIRED_HEADSET) { + label = 'Wired Headset'; + } + // ohos 蓝牙设备SCO连接 对应安卓 蓝牙SCO协议连接的音频设备 AudioDeviceInfo.TYPE_BLUETOOTH_SCO + // 在ohos中还有A2DP协议连接,这里先放一起 TODO 后面看是否需要修改 + if (audioDeviceDescriptor.deviceType == audio.DeviceType.BLUETOOTH_SCO || + audioDeviceDescriptor.deviceType == audio.DeviceType.BLUETOOTH_A2DP) { + label = 'Bluetooth SCO (' + audioDeviceDescriptor.name + ')'; + } + + let audios: ConstraintsMap = new ConstraintsMap(); + audios.putString('label', label); + audios.putString('deviceId', '' + audioDeviceDescriptor.id); + audios.putString('groupId', '' + type); + audios.putString('facing', ''); + audios.putString('kind', 'audioinput'); + + return audios; + } + + getAudioOutputInfo(audioDeviceDescriptor: audio.AudioDeviceDescriptor): ConstraintsMap { + + let audioOutputMap: ConstraintsMap = new ConstraintsMap(); + + audioOutputMap.putString('label', audioDeviceDescriptor.name); + audioOutputMap.putString('deviceId', '' + audioDeviceDescriptor.id); + audioOutputMap.putString('facing', ''); + audioOutputMap.putString('kind', 'audiooutput'); + + return audioOutputMap; + } + + setAbility(ability: UIAbility | null) { + this.ability = ability + } + + public getNextStreamUUID(): string { + let uuid: string = ''; + do { + uuid = util.generateRandomUUID(); + } while (this.getStreamForId(uuid, '') != null); + + return uuid; + } + + /* 20240913 当前获取不到-MediaStream,依照安卓逻辑适配先*/ + getStreamForId(id: string, peerConnectionId: string): MediaStream { + let stream: MediaStream | null = null; + if (peerConnectionId.length > 0) { + let pco: PeerConnectionObserver = this.mPCO.get(peerConnectionId); + if (pco != null) { + stream = pco.remoteStreams.get(id); + } + } else { + for (let entriesElement of this.mPCO.entries()) { + stream = entriesElement[1].remoteStreams.get(id); + if (stream != null) { + break; + } + } + } + if (stream == null) { + stream = this.localStreams.get(id); + } + return stream; + } + + /* 20240913 当前获取不到-MediaStreamTrack,依照安卓逻辑适配先*/ + getTrackForId(trackId: string, peerConnectionId?: string): MediaStreamTrack { + let track: MediaStreamTrack = this.localTracks.get(trackId) + if (track == null) { + for (let key of this.mPCO.keys()) { + if (peerConnectionId != null && key.localeCompare(peerConnectionId) != 0) { + continue; + } + let pco: PeerConnectionObserver = this.mPCO.get(key); + track = pco.remoteTracks.get(trackId); + if (track == null) { + track = pco.getTransceiversTrack(trackId)!; // 这里并不能强制确定 获取的track不为空。 + } + if (track != null) { + break; + } + } + } + return track + } + + onNegotiationNeeded(event: Event) { + Log.d(TAG, 'onNegotiationNeeded: this=' + JSON.stringify(this)); + Log.d(TAG, 'onNegotiationNeeded: event=' + JSON.stringify(event)); + } + + handleStreamDispose(streamId?: string, stream?: MediaStream) { + let streamL: MediaStream; + Log.d(TAG, 'function streamDispose streamId= ' + streamId); + if (streamId) { + streamL = this.localStreams.get(streamId); + if (streamL) { + Log.i(TAG, 'function streamDispose streamL= ' + stream); + this.handleStreamDispose(undefined, streamL); + this.localStreams.remove(streamId); + this.removeStreamForRendererById(streamId); + return; + } else { + Log.d(TAG, 'streamDispose() streamL is null'); + } + } else { + Log.d(TAG, 'function streamDispose() streamId is null'); + } + + if (stream) { + Log.d(TAG, 'function streamDispose stream= ' + stream); + let videoTracks: VideoTrack[] = stream.getVideoTracks(); + Log.d(TAG, 'function streamDispose videoTracks.length= ' + videoTracks.length); + for (let track of videoTracks) { + this.localTracks.remove(track.id); + stream.removeTrack(track); + } + + let audioTracks: AudioTrack[] = stream.getAudioTracks(); + Log.d(TAG, 'function streamDispose audioTracks.length= ' + audioTracks.length); + for (let track of audioTracks) { + this.localTracks.remove(track.id); + stream.removeTrack(track); + } + } else { + Log.d(TAG, 'function streamDispose() stream is null'); + } + + } + + removeStreamForRendererById(streamId: string): void { + Log.d(TAG, 'streamDispose removeStreamForRendererById '); + if (this.mTextureId) { + Log.d(TAG, 'streamDispose removeStreamForRendererById textureId:' + this.mTextureId); + Log.d(TAG, 'streamDispose removeStreamForRendererById this.renders.length:' + this.renders.length); + let videoRender: FlutterRTCVideoRenderer = this.renders.get(this.mTextureId); + if (videoRender == null) { + Log.e(TAG, 'streamDispose removeStreamForRendererById videoRender [' + this.mTextureId + '] not found !'); + return; + } + } + + for (let i = 0; i < this.renders.length; i++) { + let renderer: FlutterRTCVideoRenderer = this.renders.getValueAt(i); + if (renderer.checkMediaStream(streamId, 'local')) { + renderer.setStream(null, ''); + } + } + } + + removeTrackForRendererById(trackId: string) { + for (let i = 0; i < this.renders.length; i++) { + let renderer: FlutterRTCVideoRenderer = this.renders.getValueAt(i) + if (renderer.checkVideoTrack(trackId, 'local')) { + renderer.setStream(null, null) + } + } + } + + handleRtpSenderSetTrack(call: MethodCall, result: MethodResult, replace: boolean) { + const pco = this.getPCO(call, result); + if (!pco) { + return; + } + let track: MediaStreamTrack | null = null; + if (getTrackId(call).length > 0) { + track = this.getMediaStreamTrack(call, result); + if (!track) { + return; + } + } + pco.rtpSenderSetTrack(getRtpSenderId(call), track, result, replace); + } + + handleMediaStreamRemoveTrack(streamId: string, trackId: string, result: MethodResult) { + let mediaStream = this.localStreams.get(streamId); + if (!mediaStream) { + this.resultError('mediaStreamRemoveTrack', 'mediaStreamRemoveTrack() stream [' + streamId + '] is null', result); + return + } + let track = this.getTrackForId(trackId); + if (!track) { + this.resultError('mediaStreamRemoveTrack', 'mediaStreamRemoveTrack() track [' + trackId + '] is null', result); + return + } + let kind = track.kind; + //TODO kind 无 audio,需要替换情况 + if (kind === 'audio') { + mediaStream.removeTrack(track); + result.success(null); + } else if (kind === 'video') { + mediaStream.removeTrack(track); + result.success(null); + } else { + this.resultError('mediaStreamRemoveTrack', + 'mediaStreamRemoveTrack() track [' + trackId + '] has unsupported type: ' + kind, result); + } + } + + mediaStreamTrackSetVolume(id: string, volume: number, peerConnectionId: string) { + let track: MediaStreamTrack = this.getTrackForId(id, peerConnectionId); + // TODO MediaStream getAudioTracks addTrack + if (track.kind !== 'video') { + Log.d(TAG, 'setVolume(): ' + id + ',' + volume); + } else { + Log.w(TAG, 'setVolume(): track not found: ' + id); + } + } +} \ No newline at end of file diff --git a/ohos/src/main/ets/MethodHandlers.ets b/ohos/src/main/ets/MethodHandlers.ets new file mode 100644 index 0000000000000000000000000000000000000000..5178cd3dcbb015edcf3f097e860f5ac80c7c9435 --- /dev/null +++ b/ohos/src/main/ets/MethodHandlers.ets @@ -0,0 +1,114 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { MethodCall, MethodResult } from '@ohos/flutter_ohos'; + +export interface MethodHandlers { + initialize: (call: MethodCall, result: MethodResult) => void, + createPeerConnection: (call: MethodCall, result: MethodResult) => void, + getUserMedia: (call: MethodCall, result: MethodResult) => void, + createLocalMediaStream: (call: MethodCall, result: MethodResult) => void, + getSources: (call: MethodCall, result: MethodResult) => void, + createOffer: (call: MethodCall, result: MethodResult) => void, + createAnswer: (call: MethodCall, result: MethodResult) => void, + mediaStreamGetTracks: (call: MethodCall, result: MethodResult) => void, + addStream: (call: MethodCall, result: MethodResult) => void, + removeStream: (call: MethodCall, result: MethodResult) => void, + setLocalDescription: (call: MethodCall, result: MethodResult) => void, + setRemoteDescription: (call: MethodCall, result: MethodResult) => void, + sendDtmf: (call: MethodCall, result: MethodResult) => void, + addCandidate: (call: MethodCall, result: MethodResult) => void, + getStats: (call: MethodCall, result: MethodResult) => void, + createDataChannel: (call: MethodCall, result: MethodResult) => void, + dataChannelSend: (call: MethodCall, result: MethodResult) => void, + dataChannelClose: (call: MethodCall, result: MethodResult) => void, + streamDispose: (call: MethodCall, result: MethodResult) => void, + mediaStreamTrackSetEnable: (call: MethodCall, result: MethodResult) => void, + mediaStreamAddTrack: (call: MethodCall, result: MethodResult) => void, + mediaStreamRemoveTrack: (call: MethodCall, result: MethodResult) => void, + trackDispose: (call: MethodCall, result: MethodResult) => void, + restartIce: (call: MethodCall, result: MethodResult) => void, + peerConnectionClose: (call: MethodCall, result: MethodResult) => void, + peerConnectionDispose: (call: MethodCall, result: MethodResult) => void, + createVideoRenderer: (call: MethodCall, result: MethodResult) => void, + videoRendererDispose: (call: MethodCall, result: MethodResult) => void, + videoRendererSetSrcObject: (call: MethodCall, result: MethodResult) => void, + mediaStreamTrackHasTorch: (call: MethodCall, result: MethodResult) => void, + mediaStreamTrackSetTorch: (call: MethodCall, result: MethodResult) => void, + mediaStreamTrackSetZoom: (call: MethodCall, result: MethodResult) => void, + mediaStreamTrackSwitchCamera: (call: MethodCall, result: MethodResult) => void, + setVolume: (call: MethodCall, result: MethodResult) => void, + selectAudioOutput: (call: MethodCall, result: MethodResult) => void, + setMicrophoneMute: (call: MethodCall, result: MethodResult) => void, + selectAudioInput: (call: MethodCall, result: MethodResult) => void, + setOhosAudioConfiguration: (call: MethodCall, result: MethodResult) => void, + enableSpeakerphone: (call: MethodCall, result: MethodResult) => void, + enableSpeakerphoneButPreferBluetooth: (call: MethodCall, result: MethodResult) => void, + getDisplayMedia: (call: MethodCall, result: MethodResult) => void, + startRecordToFile: (call: MethodCall, result: MethodResult) => void, + stopRecordToFile: (call: MethodCall, result: MethodResult) => void, + captureFrame: (call: MethodCall, result: MethodResult) => void, + getLocalDescription: (call: MethodCall, result: MethodResult) => void, + getRemoteDescription: (call: MethodCall, result: MethodResult) => void, + setConfiguration: (call: MethodCall, result: MethodResult) => void, + addTrack: (call: MethodCall, result: MethodResult) => void, + removeTrack: (call: MethodCall, result: MethodResult) => void, + addTransceiver: (call: MethodCall, result: MethodResult) => void, + rtpTransceiverSetDirection: (call: MethodCall, result: MethodResult) => void, + rtpTransceiverGetDirection: (call: MethodCall, result: MethodResult) => void, + rtpTransceiverGetCurrentDirection: (call: MethodCall, result: MethodResult) => void, + rtpTransceiverStop: (call: MethodCall, result: MethodResult) => void, + rtpSenderSetParameters: (call: MethodCall, result: MethodResult) => void, + rtpSenderReplaceTrack: (call: MethodCall, result: MethodResult) => void, + rtpSenderSetTrack: (call: MethodCall, result: MethodResult) => void, + rtpSenderSetStreams: (call: MethodCall, result: MethodResult) => void, + getSenders: (call: MethodCall, result: MethodResult) => void, + getReceivers: (call: MethodCall, result: MethodResult) => void, + getTransceivers: (call: MethodCall, result: MethodResult) => void, + setPreferredInputDevice: (call: MethodCall, result: MethodResult) => void, + getRtpSenderCapabilities: (call: MethodCall, result: MethodResult) => void, + getRtpReceiverCapabilities: (call: MethodCall, result: MethodResult) => void, + setCodecPreferences: (call: MethodCall, result: MethodResult) => void, + getSignalingState: (call: MethodCall, result: MethodResult) => void, + getIceGatheringState: (call: MethodCall, result: MethodResult) => void, + getIceConnectionState: (call: MethodCall, result: MethodResult) => void, + getConnectionState: (call: MethodCall, result: MethodResult) => void, +} + +export interface CryptorMethodHandlers { + frameCryptorFactoryCreateFrameCryptor: (call: MethodCall, result: MethodResult) => void, + frameCryptorSetKeyIndex: (call: MethodCall, result: MethodResult) => void, + frameCryptorGetKeyIndex: (call: MethodCall, result: MethodResult) => void, + frameCryptorSetEnabled: (call: MethodCall, result: MethodResult) => void, + frameCryptorGetEnabled: (call: MethodCall, result: MethodResult) => void, + frameCryptorDispose: (call: MethodCall, result: MethodResult) => void, + frameCryptorFactoryCreateKeyProvider: (call: MethodCall, result: MethodResult) => void, + keyProviderSetSharedKey: (call: MethodCall, result: MethodResult) => void, + keyProviderRatchetSharedKey: (call: MethodCall, result: MethodResult) => void, + keyProviderExportSharedKey: (call: MethodCall, result: MethodResult) => void, + keyProviderSetKey: (call: MethodCall, result: MethodResult) => void, + keyProviderRatchetKey: (call: MethodCall, result: MethodResult) => void, + keyProviderExportKey: (call: MethodCall, result: MethodResult) => void, + keyProviderSetSifTrailer: (call: MethodCall, result: MethodResult) => void, + keyProviderDispose: (call: MethodCall, result: MethodResult) => void, +} \ No newline at end of file diff --git a/ohos/src/main/ets/PeerConnectionObserver.ets b/ohos/src/main/ets/PeerConnectionObserver.ets new file mode 100644 index 0000000000000000000000000000000000000000..cf68c776d9084aa61e39c71e3cdc24e2a27ce1d2 --- /dev/null +++ b/ohos/src/main/ets/PeerConnectionObserver.ets @@ -0,0 +1,750 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { BinaryMessenger, Log, MethodResult } from '@ohos/flutter_ohos'; +import EventChannel, { EventSink, StreamHandler } from '@ohos/flutter_ohos/src/main/ets/plugin/common/EventChannel'; +import webrtc, { + MediaStreamTrack, + RTCDataChannel, + RTCRtpReceiver, + RTCRtpSender, + RTCRtpCapabilities, + RTCRtpCodec, + RTCRtpTransceiverDirection, + RTCPeerConnection, + MediaStream, + RTCRtpTransceiver, + RTCConfiguration, + RTCPeerConnectionIceEvent, + Event, + RTCRtpEncodingParameters, + RTCRtpSendParameters, + RTCStatsReport, + RTCStats, + RTCRtpTransceiverInit, + RTCDataChannelEvent, + RTCTrackEvent, + RTCIceCandidate, +} from 'libohos_webrtc.so' +import StateProvider from './StateProvider'; +import { HashMap, util } from '@kit.ArkTS'; +import { ConstraintsMap } from './utils/ConstraintsMap'; +import { ConstraintsArray } from './utils/ConstraintsArray'; +import AudioSwitchManager from './audio/AudioSwitchManager'; +import { DataChannelObserver } from './DataChannelObserver'; +import { DataArray, DataMap, ObjectType } from './utils/ObjectType'; +import { DataChannelInit } from './DataChannelInit'; +import { candidateToMap, mediaTrackToMap, rtpReceiverToMap, rtpSenderToMap, transceiverToMap } from './utils/PCOUtils'; +import { dataMapToString } from './utils/Utils'; +import { mediaStreamTrackToMap } from './utils/TrackUtils'; + +const TAG = 'FlutterWebRTCPlugin-PeerConnectionObserver'; +const CHANNEL_NAME = 'FlutterWebRTC/peerConnectionEvent'; + +export default class PeerConnectionObserver implements StreamHandler { + private peerConnection: RTCPeerConnection; + private configuration: RTCConfiguration; + private stateProvider: StateProvider; + private messenger: BinaryMessenger; + private id: string; + private eventChannel: EventChannel; + private eventSink: EventSink | null = null; + private dataChannels: HashMap = new HashMap(); + remoteStreams: HashMap = new HashMap(); + remoteTracks: HashMap = new HashMap(); + transceivers: HashMap = new HashMap(); + + constructor(configuration: RTCConfiguration, stateProvider: StateProvider, messenger: BinaryMessenger, + id: string, peerConnection: RTCPeerConnection) { + this.configuration = configuration; + this.stateProvider = stateProvider; + this.messenger = messenger; + this.id = id; + this.peerConnection = peerConnection; + this.setPeerConnectionCallback(); + this.eventChannel = new EventChannel(messenger, `${CHANNEL_NAME}${id}`); + this.eventChannel.setStreamHandler(this); + } + + registerDataChannelObserver(dcId: string, dataChannel: RTCDataChannel) { + Log.d(TAG, 'registerDataChannelObserver dcId: ' + JSON.stringify(dcId)); + new DataChannelObserver(this.messenger, this.id, dcId, dataChannel); + } + + resultError(method: string, error: string, result: MethodResult) { + let errorMsg: string = `${method}(): ${error}`; + Log.e(TAG, errorMsg); + result.error(method, error, null); + } + + onListen(args: Object, events: EventSink): void { + this.eventSink = events; + } + + onCancel(args: ESObject): void { + this.eventSink = null; + } + + sendEvent(event: DataMap) { + this.eventSink?.success(event); + } + + getPeerConnection(): RTCPeerConnection { + return this.peerConnection; + } + + setPeerConnection(peerConnection: RTCPeerConnection): void { + this.peerConnection = peerConnection; + this.setPeerConnectionCallback(); + } + + setPeerConnectionCallback(): void { + this.peerConnection.onicecandidate = (data) => this.onIceCandidate(data); + this.peerConnection.oniceconnectionstatechange = (data) => this.onIceconnectionstatechange(data); + this.peerConnection.onicegatheringstatechange = (data) => this.onIcegatheringstatechange(data); + this.peerConnection.ontrack = (data) => this.onTrack(data); + this.peerConnection.onnegotiationneeded = (data) => this.onNegotiationneeded(data); + this.peerConnection.onsignalingstatechange = (data) => this.onSignalingstatechange(data); + this.peerConnection.onconnectionstatechange = (data) => this.onConnectionstatechange(data); + this.peerConnection.ondatachannel = (data) => this.onDatachannel(data); + } + + onIceCandidate(event: RTCPeerConnectionIceEvent) { + Log.d(TAG, `onIceCandidate: ${event.candidate?.toJSON()}`); + let params: DataMap = new Map(); + params.set('event', 'onCandidate'); + params.set('candidate', candidateToMap(event.candidate)); + this.sendEvent(params); + } + + onIceconnectionstatechange(event: Event) { + Log.d(TAG, `onIceconnectionstatechange: ${this.peerConnection.iceConnectionState}`); + let params: DataMap = new Map(); + params.set('event', 'iceConnectionState'); + params.set('state', this.peerConnection.iceConnectionState); + this.sendEvent(params); + } + + onIcegatheringstatechange(event: Event) { + Log.d(TAG, `onIcegatheringstatechange: ${this.peerConnection.iceGatheringState}`); + let params: DataMap = new Map(); + params.set('event', 'iceGatheringState'); + params.set('state', this.peerConnection.iceGatheringState); + this.sendEvent(params); + } + + onTrack(event: RTCTrackEvent) { + Log.d(TAG, 'onTrack : ' + JSON.stringify(event.track)); + for (let stream of event.streams) { + let track = event.receiver.track; + let params = new ConstraintsMap(); + params.putString('event', 'onAddTrack'); + params.putString('streamId', stream.id); + params.putString('ownerTag', this.id); + params.putString('trackId', track.id); + params.putMap('track', mediaStreamTrackToMap(track, true)); + Log.d(TAG, `onTrack params: ${params.toString()}`); + this.sendEvent(params.toMap()); + if ('audio' == track.kind) { + AudioSwitchManager.instance.start(); + } + } + + let params = new ConstraintsMap(); + let streams: DataMap[] = []; + for (let stream of event.streams) { + streams.push(this.mediaStreamToMap(stream)); + this.remoteStreams.set(stream.id, stream); + } + params.putString('event', 'onTrack'); + params.putArray('streams', streams); + params.putMap('track', mediaTrackToMap(event.receiver.track)); + params.putMap('receiver', rtpReceiverToMap(event.receiver)); + // TODO 没有属性 + // if (this.configuration.sdpSemantics == PeerConnection.SdpSemantics.UNIFIED_PLAN) { + // List transceivers = peerConnection.getTransceivers(); + // for (RtpTransceiver transceiver : transceivers) { + // if (transceiver.getReceiver() != null && receiver.id().equals(transceiver.getReceiver().id())) { + // String transceiverId = transceiver.getMid(); + // if (null == transceiverId) { + // transceiverId = stateProvider.getNextStreamUUID(); + // } + // params.putMap('transceiver', transceiverToMap(transceiverId, transceiver)); + // } + // } + // } + Log.d(TAG, `onTrack params: ${params.toString()}`); + this.sendEvent(params.toMap()); + } + + onNegotiationneeded(event: Event) { + Log.d(TAG, 'onNegotiationneeded'); + let params: DataMap = new Map(); + params.set('event', 'onRenegotiationNeeded'); + this.sendEvent(params); + } + + onSignalingstatechange(event: Event) { + Log.d(TAG, `onSignalingstatechange: ${this.peerConnection.signalingState}`); + let params: DataMap = new Map(); + params.set('event', 'signalingState'); + params.set('state', this.peerConnection.signalingState); + this.sendEvent(params); + } + + onConnectionstatechange(event: Event) { + Log.d(TAG, `onConnectionstatechange: ${this.peerConnection.connectionState}`); + let params: DataMap = new Map(); + params.set('event', 'peerConnectionState'); + params.set('state', this.peerConnection.connectionState); + this.sendEvent(params); + } + + onDatachannel(event: RTCDataChannelEvent) { + let flutterId = this.stateProvider.getNextStreamUUID(); + let params: DataMap = new Map(); + params.set('event', 'didOpenDataChannel'); + params.set('id', event.channel.id!); + params.set('label', event.channel.label); + params.set('flutterId', flutterId); + Log.d(TAG, `onDatachannel params: ${dataMapToString(params)}`); + this.dataChannels.set(flutterId, event.channel); + //订阅事件 + this.registerDataChannelObserver(flutterId, event.channel); + this.sendEvent(params); + } + + mediaStreamToMap(stream: MediaStream): DataMap { + let params: DataMap = new Map() + params.set('streamId', stream.id); + params.set('ownerTag', this.id); + let audioTracks: DataArray = []; + let videoTracks: DataArray = [] + for (let track of stream.getAudioTracks()) { + audioTracks.push(mediaTrackToMap(track)) + } + for (let track of stream.getVideoTracks()) { + videoTracks.push(mediaTrackToMap(track)) + } + params.set('audioTracks', audioTracks); + params.set('videoTracks', videoTracks); + return params; + } + + restartIce(): void { + this.peerConnection.restartIce(); + } + + close(): void { + this.peerConnection.close(); + this.remoteStreams.clear(); + this.remoteTracks.clear(); + this.dataChannels.clear(); + } + + dispose() { + this.close(); + // this.peerConnection.dispose(); + this.eventChannel.setStreamHandler(null); + } + + createDataChannel(label: string, config: ConstraintsMap, result: MethodResult) { + let init: DataChannelInit = new DataChannelInit(); + if (config.getType('id') === ObjectType.Number) { + init.id = config.getNumber('id'); + } + if (config.getType('ordered') === ObjectType.Boolean) { + init.ordered = config.getBoolean('ordered'); + } + if (config.getType('maxRetransmits') === ObjectType.Number) { + init.maxRetransmits = config.getNumber('maxRetransmits'); + } + if (config.getType('protocol') === ObjectType.String) { + init.protocol = config.getString('protocol'); + } + if (config.getType('negotiated') === ObjectType.Boolean) { + init.negotiated = config.getBoolean('negotiated'); + } + Log.d(TAG, 'createDataChannel init = ' + JSON.stringify(init)); + let dataChannel: RTCDataChannel | undefined = this.peerConnection.createDataChannel(label, init); + let flutterId: string = this.getNextDataChannelUUID(); + Log.d(TAG, 'createDataChannel flutterId = ' + flutterId); + if (!dataChannel) { + this.resultError('createDataChannel', 'Can\'t create data-channel for id: ' + init.id, result); + return; + } + this.dataChannels.set(flutterId, dataChannel); + this.registerDataChannelObserver(flutterId, dataChannel); + let params: ConstraintsMap = new ConstraintsMap(); + params.putNumber('id', dataChannel.id); + params.putString('label', dataChannel.label); + params.putString('flutterId', flutterId); + result.success(params.toMap()); + Log.d(TAG, 'createDataChannel params = ' + params.toString()); + } + + dataChannelClose(dataChannelId: string) { + let dataChannel: RTCDataChannel = this.dataChannels.get(dataChannelId); + if (dataChannel) { + dataChannel.close(); + this.dataChannels.remove(dataChannelId); + } else { + Log.d(TAG, 'dataChannelClose() dataChannel is null'); + } + } + + dataChannelSend(dataChannelId: string, byteBuffer: ArrayBuffer, isBinary: boolean) { + let dataChannel: RTCDataChannel = this.dataChannels.get(dataChannelId); + if (dataChannel) { + // TODO isBinary 二进制判断暂未处理,看后续是否需要 + dataChannel.send(byteBuffer); + } else { + Log.d(TAG, 'dataChannelSend() dataChannel is null'); + } + } + + getRtpSenderById(id: string): RTCRtpSender | null { + let senders: Array = this.peerConnection.getSenders(); + Log.d(TAG, 'senders length : ' + senders.length); + for (let sender of senders) { + Log.d(TAG, 'sender id : ' + sender.id); + Log.d(TAG, ' id : ' + id); + if (id === sender.id) { + return sender; + } + } + return null; + } + + getRtpReceiverById(id: string): RTCRtpReceiver | null { + let receivers: Array = this.peerConnection.getReceivers(); + Log.d(TAG, 'receivers length : ' + receivers.length); + for (let receiver of receivers) { + Log.d(TAG, 'receiver id : ' + receiver.id); + Log.d(TAG, ' id : ' + id); + if (id === receiver.id) { + return receiver; + } + } + return null; + } + + /* TODO 不能添加 protected 外部调用不了 20240914 依照安卓逻辑适配先 */ + getTransceiversTrack(trackId: string): MediaStreamTrack | null { + // if (this.configuration.sdpSemantics != this.peerConnection!.SdpSemantics.UNIFIED_PLAN) { + // return null; + // } + let track: MediaStreamTrack | null = null; + let transceivers: RTCRtpTransceiver[] = this.peerConnection.getTransceivers(); + for (let i = 0; i < transceivers.length; i++) { + let receiver: RTCRtpReceiver = transceivers[i].receiver; + if (receiver != null) { + if (receiver.track != null && receiver.track.id === trackId) { + track = receiver.track; + break; + } + } + } + + return track; + } + + getTransceivers(result: MethodResult) { + let transceivers = this.peerConnection.getTransceivers(); + let transceiversParams: ConstraintsArray = new ConstraintsArray(); + for (let transceiver of transceivers) { + let transceiverId = transceiver.mid; + if (!transceiverId) { + transceiverId = this.stateProvider.getNextStreamUUID(); + } + transceiversParams.pushMap(transceiverToMap(transceiverId, transceiver)); + } + let params = new ConstraintsMap(); + params.putArray('transceivers', transceiversParams.toArrayList()); + result.success(params.toMap()); + } + + getSenders(result: MethodResult) { + let senders = this.peerConnection.getSenders(); + let sendersParams = new ConstraintsArray(); + for (let sender of senders) { + sendersParams.pushMap(rtpSenderToMap(sender)); + } + let params = new ConstraintsMap(); + params.putArray('senders', sendersParams.toArrayList()); + result.success(params.toMap()); + } + + getReceivers(result: MethodResult) { + let receivers = this.peerConnection.getReceivers(); + let receiversParams = new ConstraintsArray(); + for (let receiver of receivers) { + receiversParams.pushMap(rtpReceiverToMap(receiver)); + } + let params = new ConstraintsMap(); + params.putArray('receivers', receiversParams.toArrayList()); + result.success(params.toMap()); + } + + addTransceiver(trackOrKind: MediaStreamTrack | string, result: MethodResult, transceiverInit?: DataMap) { + let transceiver: RTCRtpTransceiver | undefined = undefined + transceiver = this.peerConnection.addTransceiver(trackOrKind, this.mapToRtpTransceiverInit(transceiverInit)) + let transceiverId = transceiver?.mid ?? this.stateProvider.getNextStreamUUID(); + this.transceivers.set(transceiverId, transceiver) + result.success(transceiverToMap(transceiverId!, transceiver!)) + } + + mapToRtpTransceiverInit(parameters?: DataMap): RTCRtpTransceiverInit | undefined { + if (!parameters) { + return undefined; + } + let streams: MediaStream[] = parameters.get('streams') ?? []; + let encodingsParams: DataMap[] = parameters.get('sendEncodings'); + let direction: RTCRtpTransceiverDirection = parameters.get('direction') ?? 'sendrecv'; + let sendEncodings: RTCRtpEncodingParameters[] = []; + let init: RTCRtpTransceiverInit | undefined = undefined; + if (encodingsParams != null) { + for (let i = 0; i < encodingsParams.length; i++) { + let params: DataMap = encodingsParams[i]; + sendEncodings.push(this.mapToEncoding(params)); + } + //sendEncodings会造成音频流失效 先注释, sendEncodings: sendEncodings + init = { + direction: direction, streams: streams + } + } else { + init = { + direction: direction, + } + } + return init; + } + + addTrack(track: MediaStreamTrack, streams: MediaStream[], result: MethodResult) { + let sender: RTCRtpSender = this.peerConnection.addTrack(track, ...streams); + result.success(rtpSenderToMap(sender)); + } + + removeTrack(senderId: string, result: MethodResult) { + let sender: RTCRtpSender | null = this.getRtpSenderById(senderId); + if (!sender) { + this.resultError('removeTrack', 'sender is null', result); + return; + } + let res: boolean = false; + if (this.peerConnection) { + this.peerConnection.removeTrack(sender) + res = true + } + let params: DataMap = new Map(); + params.set('result', res); + result.success(params); + } + + async rtpSenderSetParameters(rtpSenderId: string, parameters: DataMap, result: MethodResult) { + Log.d(TAG, 'rtpSenderSetParameters 1111 '); + let sender: RTCRtpSender | null = this.getRtpSenderById(rtpSenderId); + if (!sender) { + this.resultError('rtpSenderSetParameters', 'sender is null', result); + return; + } + Log.d(TAG, 'rtpSenderSetParameters 2222 '); + let updatedParameters: RTCRtpSendParameters = this.updateRtpParameters(sender.getParameters(), parameters) + let params: DataMap = new Map() + Log.d(TAG, 'rtpSenderSetParameters 3333 '); + try { + await sender.setParameters(updatedParameters) + params.set('result', true) + } catch (e) { + params.set('result', false) + } + Log.d(TAG, 'rtpSenderSetParameters 4444 '); + result.success(params) + } + + async rtpSenderSetTrack(rtpSenderId: string, track: MediaStreamTrack | null, result: MethodResult, replace: boolean) { + let sender: RTCRtpSender | null = this.getRtpSenderById(rtpSenderId); + if (!sender) { + this.resultError('rtpSenderSetTrack', 'sender is null', result); + return; + } + try { + await sender.replaceTrack(track) + result.success(null) + } catch (e) { + this.resultError('rtpSenderSetTrack', JSON.stringify(e), result); + } + } + + mapToEncoding(parameters: DataMap): RTCRtpEncodingParameters { + let encoding: RTCRtpEncodingParameters = {} + if (parameters.get('active')) { + encoding.active = parameters.get('active'); + } + if (parameters.get('maxBitrate')) { + encoding.maxBitrate = parameters.get('maxBitrate'); + } + if (parameters.get('maxFramerate')) { + encoding.maxBitrate = parameters.get('maxFramerate'); + } + if (parameters.get('scaleResolutionDownBy')) { + encoding.maxBitrate = parameters.get('scaleResolutionDownBy'); + } + return encoding + } + + getStats(result: MethodResult, selector?: MediaStreamTrack) { + if (selector) { + this.peerConnection.getStats(selector).then((rtcStatsReport) => { + this.handleStatsReport(rtcStatsReport, result); + }) + } else { + this.peerConnection.getStats().then((rtcStatsReport) => { + this.handleStatsReport(rtcStatsReport, result); + }) + } + } + + private handleStatsReport(rtcStatsReport: RTCStatsReport, result: MethodResult): void { + let reports: Map = rtcStatsReport.stats; + let params: ConstraintsMap = new ConstraintsMap(); + let stats: ConstraintsArray = new ConstraintsArray(); + reports.forEach((v: ESObject, k: string) => { + let report_map: DataMap = new Map(); + report_map.set('id', v.id) + report_map.set('type', v.type) + report_map.set('timestamp', v.timestamp) + let v_map: DataMap = new Map(); + Object.keys(v).forEach((value, key) => { + Log.i(TAG, 'reports.forEach : ' + JSON.stringify(value)) + v_map.set(value, v[value]); + }) + report_map.set('values', v_map); + }) + params.putArray('stats', stats.toArrayList()); + Log.i(TAG, 'handleStatsReport stats = ' + Array.from(params.toMap().entries())) + result.success(params.toMap()); + } + + getStatsForTrack(trackId: string, result: MethodResult): void { + if (trackId == null || trackId === '') { + this.resultError('peerConnectionGetStats', 'MediaStreamTrack not found for id: ' + trackId, result); + return; + } + if (this.peerConnection == null) { + this.resultError('peerConnectionGetStats', 'peerConnection is null!', result); + return; + } + + let sender: RTCRtpSender | undefined = undefined + let receiver: RTCRtpReceiver | undefined = undefined + this.peerConnection.getSenders().forEach((s) => { + if (s.track !== null && trackId === s.track.id) { + sender = s; + return; // 退出循环 + } + }); + this.peerConnection.getReceivers().forEach((r) => { + if (r.track !== null && trackId === r.track.id) { + receiver = r; + return; // 退出循环 + } + }); + if (sender !== undefined) { + (sender as RTCRtpSender).getStats().then((rtcStatsReport) => { + this.handleStatsReport(rtcStatsReport, result); + }) + } else if (receiver != undefined) { + (receiver as RTCRtpReceiver).getStats().then((rtcStatsReport) => { + this.handleStatsReport(rtcStatsReport, result); + }) + } else { + this.resultError('peerConnectionGetStats', 'MediaStreamTrack not found for id: ' + trackId, result); + } + } + + getNextDataChannelUUID() { + let uuid: string; + do { + uuid = util.generateRandomUUID(false); + } while (this.dataChannels.get(uuid) != null); + + return uuid; + } + + updateRtpParameters(parameters: RTCRtpSendParameters, newParameters: DataMap): RTCRtpSendParameters { + let encodings: DataMap[] = newParameters.get('encodings') + let nativeEncodings: RTCRtpEncodingParameters[] = parameters.encodings + let degradationPreference: string = newParameters.get('degradationPreference') + if (degradationPreference != null) { + //此属性被注掉掉了 + // parameters.degradationPreference = RtpParameters.DegradationPreference.valueOf(degradationPreference.toUpperCase().replace('-', '_')); + } + + for (let encoding of encodings) { + let currentParams: RTCRtpEncodingParameters | null = null + let rid: string = encoding.get('rid') + if (rid != null) { + for (let x of nativeEncodings) { + if (x.rid == rid) { + currentParams = x + break + } + } + } + + if (currentParams == null) { + let idx: number = encodings.indexOf(encoding) + if (idx < nativeEncodings.length) { + currentParams = nativeEncodings[idx] + } + } else { + if (encoding.has('active')) { + currentParams.active = encoding.get('active') + } + if (encoding.has('maxBitrate')) { + currentParams.maxBitrate = encoding.get('maxBitrate') + } + // if (encoding.has('minBitrate')) { + // currentParams.minBitrate = encoding.get('minBitrate') + // } + if (encoding.has('maxFramerate')) { + currentParams.maxFramerate = encoding.get('maxFramerate') + } + // if (encoding.has('numTemporalLayers')) { + // currentParams.numTemporalLayers = encoding.get('numTemporalLayers') + // } + if (encoding.has('scaleResolutionDownBy')) { + currentParams.scaleResolutionDownBy = encoding.get('scaleResolutionDownBy') + } + } + } + return parameters + } + + rtpSenderSetStreams(rtpSenderId: string, streams: MediaStream[], result: MethodResult) { + let sender = this.getRtpSenderById(rtpSenderId) + if (sender == null) { + this.resultError('rtpSenderSetStream', 'sender is null', result) + return + } + sender.setStreams(...streams) + result.success(null) + } + + rtpTransceiverGetCurrentDirection(transceiverId: string, result: MethodResult): void { + const transceiver = this.getRtpTransceiverById(transceiverId); + if (!transceiver) { + this.resultError('rtpTransceiverGetCurrentDirection', 'transceiver is null', result); + return; + } + const direction = transceiver.direction; + if (!direction) { + result.success(null); + } else { + const params = new ConstraintsMap(); + params.putString('result', direction); + result.success(params.toMap()); + } + } + + rtpTransceiverStop(transceiverId: string, result: MethodResult): void { + const transceiver = this.getRtpTransceiverById(transceiverId); + if (!transceiver) { + this.resultError('rtpTransceiverStop', 'transceiver is null', result); + return; + } + transceiver.stop(); + result.success(null); + } + + private getRtpTransceiverById(id: string): RTCRtpTransceiver | undefined { + let transceiver: RTCRtpTransceiver | undefined = this.transceivers.get(id); + if (!transceiver) { + transceiver = + this.peerConnection.getTransceivers().filter((transceiver: RTCRtpTransceiver) => { + return transceiver.mid === id + })[0] + } + return transceiver; + } + + rtpTransceiverSetDirection(direction: RTCRtpTransceiverDirection, transceiverId: string, result: MethodResult) { + let transceiver = this.getRtpTransceiverById(transceiverId); + if (!transceiver) { + this.resultError('rtpTransceiverSetDirection', 'transceiver is null', result); + return; + } + transceiver.direction = direction + result.success(null); + } + + rtpTransceiverGetDirection(transceiverId: string, result: MethodResult) { + let transceiver = this.getRtpTransceiverById(transceiverId); + if (!transceiver) { + this.resultError('rtpTransceiverGetDirection', 'transceiver is null', result); + return; + } + let params: ConstraintsMap = new ConstraintsMap(); + params.putString('result', transceiver.direction); + result.success(params.toMap()); + } + + /* + * Sets the priority of the encoder (codec) used in this transmitter for the sender and receiver. + * */ + rtpTransceiverSetCodecPreferences(transceiverId: string, codecs: DataArray, result: MethodResult): void { + const transceiver = this.getRtpTransceiverById(transceiverId); + if (!transceiver) { + this.resultError('rtpTransceiverSetCodecPreferences', 'transceiver is null', result); + return; + } + let preferedCodecs: RTCRtpCodec[] = []; + + codecs.forEach((element: DataMap) => { + let codecMap = new ConstraintsMap(element); + let codec: RTCRtpCodec = { + mimeType: '', clockRate: 0 + }; + // TODO 缺少 name、kind + codec.mimeType = codecMap.getString('mimeType'); + codec.clockRate = codecMap.getNumber('clockRate'); + if (codecMap.getType('payloadType') === ObjectType.Number) { + codec.payloadType = codecMap.getNumber('payloadType'); + } + if (codecMap.getType('numChannels') === ObjectType.Number) { + codec.channels = codecMap.getNumber('numChannels'); + } + if (codecMap.getType('sdpFmtpLine') === ObjectType.String) { + codec.sdpFmtpLine = codecMap.getString('sdpFmtpLine'); + } else { + codec.sdpFmtpLine = ''; + } + preferedCodecs.push(codec); + }); + + transceiver.setCodecPreferences(preferedCodecs); + result.success(null); + Log.d(TAG, 'rtpTransceiverSetCodecPreferences success! '); + } +} \ No newline at end of file diff --git a/ohos/src/main/ets/StateProvider.ets b/ohos/src/main/ets/StateProvider.ets new file mode 100644 index 0000000000000000000000000000000000000000..382715f403091e39d68732233dbe8803adcf3b6a --- /dev/null +++ b/ohos/src/main/ets/StateProvider.ets @@ -0,0 +1,49 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { MediaStream, MediaStreamTrack, PeerConnectionFactory } from 'libohos_webrtc.so' +import { BinaryMessenger, } from '@ohos/flutter_ohos'; +import type { Observer } from './DependencyRelated'; +import { UIAbility, common } from '@kit.AbilityKit'; + +export default interface StateProvider { + putLocalStream(streamId: string, stream: MediaStream): boolean; + + putLocalTrack(trackId: string, track: MediaStreamTrack): boolean; + + getLocalTrack(trackId: string): MediaStreamTrack | undefined; + + getNextStreamUUID(): string; + + getNextTrackUUID(): string; + + getPeerConnectionFactory(): PeerConnectionFactory; + + getPeerConnectionObserver(peerConnectionId: string): Observer | undefined; + + getAbility(): UIAbility | null; + + getApplicationContext(): common.ApplicationContext | undefined; + + getMessenger(): BinaryMessenger; +} \ No newline at end of file diff --git a/ohos/src/main/ets/SurfaceTextureRenderer.ets b/ohos/src/main/ets/SurfaceTextureRenderer.ets new file mode 100644 index 0000000000000000000000000000000000000000..d3b1ac01eb6a41f2509856f1174d8daa08391180 --- /dev/null +++ b/ohos/src/main/ets/SurfaceTextureRenderer.ets @@ -0,0 +1,95 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { TextureRegistry, SurfaceTextureEntry, Log } from '@ohos/flutter_ohos'; +import { MediaStream, NativeVideoRenderer, MediaStreamTrack } from 'libohos_webrtc.so'; +import { + beginConfig, + getCameraDevices, + getCameraInput, + getCameraManager, + getCaptureSession, + getPreviewOutput, + getSupportedOutputCapability, + setSessionCameraInput, + setSessionPreviewOutput, + startSession, + stopSession, + releaseCamera, + isTorchSupported, + setTorchMode, + switchCamera +} from './utils/CameraUtil'; +import { common } from '@kit.AbilityKit'; +import StateProvider from './StateProvider'; + +const TAG: string = 'FlutterWebRTCPlugin-SurfaceTextureRenderer'; + +export default class SurfaceTextureRenderer extends NativeVideoRenderer { + private textures: TextureRegistry | null = null + private id: number = -1 + private mediaStream: MediaStream | null = null + private ownerTag: string = '' + private stateProvider: StateProvider | null = null; + private surfaceTextureEntry: SurfaceTextureEntry | null = null + private renderer: NativeVideoRenderer | null = null; + + constructor(name?: string) { + super(); + } + + setStateProvider(stateProvider: StateProvider) { + this.stateProvider = stateProvider; + } + + surfaceCreated(entry: SurfaceTextureEntry) { + this.surfaceTextureEntry = entry; + let surfaceId = this.surfaceTextureEntry!.getSurfaceId().toString(); + Log.d(TAG, 'surfaceCreated surfaceId: ' + surfaceId); + } + + setVideoTrack(track: MediaStreamTrack | null): void { + Log.d(TAG, 'setVideoTrack track.id: ' + track?.id); + this.renderer?.setVideoTrack(track); + } + + startRender() { + this.startSession(); + } + + stopRender() { + this.stopSession(); + this.renderer?.release(); + } + + private startSession() { + Log.d(TAG, `startSession`); + getCameraManager(getContext() as common.BaseContext); + switchCamera(0, this.surfaceTextureEntry!.getSurfaceId().toString(), 0); + } + + private stopSession() { + Log.d(TAG, `stopSession`); + releaseCamera(); + } +} \ No newline at end of file diff --git a/ohos/src/main/ets/audio/AudioSwitchManager.ets b/ohos/src/main/ets/audio/AudioSwitchManager.ets new file mode 100644 index 0000000000000000000000000000000000000000..d954b84d49e794cc2170352c36105318c60268b9 --- /dev/null +++ b/ohos/src/main/ets/audio/AudioSwitchManager.ets @@ -0,0 +1,294 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { audio } from '@kit.AudioKit'; +import { Any, Log } from '@ohos/flutter_ohos'; +import { JSON } from '@kit.ArkTS'; +import { DataMap, ObjectType } from '../utils/ObjectType'; +import { + AudioCapturerSamplesReadyEvent, + AudioDeviceModule, + AudioDeviceModuleOptions, + AudioErrorEvent, + AudioStateChangeEvent +} from 'libohos_webrtc.so'; +import { isNotEmpty } from '../utils/Utils'; +import { ConstraintsMap } from '../utils/ConstraintsMap'; +import { BusinessError } from '@kit.BasicServicesKit'; + +const TAG = 'AudioSwitchManager' + +export default class AudioSwitchManager { + public static instance: AudioSwitchManager; + private audioManager: audio.AudioManager; + private audioRoutingManager: audio.AudioRoutingManager; + private audioRenderer: audio.AudioRenderer | null = null; + private audioCapturer: audio.AudioCapturer | null = null; + private isActive: boolean = false; + private context?: Context; + private adm?: AudioDeviceModule; + private audioStreamInfo: audio.AudioStreamInfo = { + samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000, + channels: audio.AudioChannel.CHANNEL_2, + sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, + encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW + }; + private audioRendererInfo: audio.AudioRendererInfo = { + usage: audio.StreamUsage.STREAM_USAGE_UNKNOWN, + rendererFlags: 0 + }; + private audioCapturerInfo: audio.AudioCapturerInfo = { + source: audio.SourceType.SOURCE_TYPE_MIC, + capturerFlags: 0 + }; + + constructor() { + this.audioManager = audio.getAudioManager(); + this.audioRoutingManager = this.audioManager.getRoutingManager(); + this.createAudioRenderer(); + this.createAudioCapturer(); + } + + public static getInstance(): AudioSwitchManager { + if (!AudioSwitchManager.instance) { + AudioSwitchManager.instance = new AudioSwitchManager(); + } + return AudioSwitchManager.instance; + } + + setContext(context: Context) { + this.context = this.context; + } + + getAudioDeviceModule(): AudioDeviceModule | undefined { + return this.adm; + } + + async createAudioRenderer() { + this.audioRenderer = null; + let audioRendererOptions: audio.AudioRendererOptions = { + streamInfo: this.audioStreamInfo, + rendererInfo: this.audioRendererInfo + }; + try { + this.audioRenderer = await audio.createAudioRenderer(audioRendererOptions); + this.onAudioInterrupt(); + } catch (e) { + Log.e(TAG, 'createAudioRenderer error , msg: ' + JSON.stringify(e)); + } + } + + async createAudioCapturer() { + this.audioCapturer = null; + let audioCapturerOptions: audio.AudioCapturerOptions = { + streamInfo: this.audioStreamInfo, + capturerInfo: this.audioCapturerInfo + }; + try { + this.audioCapturer = await audio.createAudioCapturer(audioCapturerOptions); + } catch (e) { + Log.e(TAG, 'createAudioCapturer error , msg: ' + JSON.stringify(e)); + } + } + + async createAudioDeviceModule() { + this.adm = new AudioDeviceModule({ + useStereoInput: true, + useStereoOutput: true, + useHardwareAcousticEchoCanceler: true, + useHardwareNoiseSuppressor: true + } as AudioDeviceModuleOptions); + this.adm.oncapturererror = (event: AudioErrorEvent) => { + Log.e(TAG, 'oncapturererror: ' + JSON.stringify(event)); + }; + this.adm.oncapturerstatechange = (event: AudioStateChangeEvent) => { + Log.e(TAG, 'oncapturerstatechange: ' + JSON.stringify(event)); + }; + this.adm.oncapturersamplesready = (event: AudioCapturerSamplesReadyEvent) => { + Log.e(TAG, 'oncapturersamplesready: ' + JSON.stringify(event)); + }; + this.adm.onrenderererror = (event: AudioErrorEvent) => { + Log.e(TAG, 'onrenderererror: ' + JSON.stringify(event)); + }; + this.adm.onrendererstatechange = (event: AudioStateChangeEvent) => { + Log.e(TAG, 'onrendererstatechange: ' + JSON.stringify(event)); + }; + Log.d(TAG, 'this.adm: ' + JSON.stringify(this.adm)); + } + + start() { + this.audioRenderer?.start().then(() => { + Log.d(TAG, 'Renderer started'); + this.isActive = true; + }).catch((err: BusinessError) => { + Log.e(TAG, `Renderer started ERROR: ${err}`); + }); + } + + stop() { + Log.d(TAG, 'Stopping the audio manager...'); + this.audioRenderer?.stop().then(() => { + Log.d(TAG, 'Renderer stopped'); + this.isActive = false; + }).catch((err: BusinessError) => { + Log.e(TAG, `Renderer stopped ERROR: ${err}`); + }); + } + + setMicrophoneMute(mute: boolean) { + this.adm?.setMicrophoneMute(mute); + } + + getAudioOutputDevices() { + return this.audioRoutingManager?.getDevicesSync(audio.DeviceFlag.OUTPUT_DEVICES_FLAG); + } + + getAllAudioDevices() { + return this.audioRoutingManager?.getDevicesSync(audio.DeviceFlag.ALL_DEVICES_FLAG); + } + + selectAudioOutput(deviceId: number) { + let device = this.getAudioOutputDevices()?.find(d => d.id === deviceId); + if (!device) { + Log.w(TAG, 'selectAudioOutput, Device with id ' + deviceId + ' not found'); + return; + } + Log.d(TAG, 'selectAudioOutput deviceType = ' + device.deviceType); + this.setCommunicationDevice(device.deviceType); + } + + setCommunicationDevice(deviceType: number) { + this.audioRoutingManager?.setCommunicationDevice(deviceType, this.isActive).then(() => { + Log.d(TAG, 'setCommunicationDevice then deviceType=' + deviceType); + }).catch(() => { + Log.e(TAG, 'setCommunicationDevice catch deviceType=' + deviceType); + }) + } + + enableSpeakerphone(enable: boolean) { + if (enable) { + this.setCommunicationDevice(audio.DeviceType.SPEAKER); + } else { + let devices = this.getAudioOutputDevices(); + if (!devices || devices.length === 0) { + Log.e(TAG, 'enableSpeakerphone, no devices found'); + return; + } + let device = devices?.find(device => device.deviceType !== audio.DeviceType.SPEAKER); + this.setCommunicationDevice(device?.deviceType ?? devices[0].deviceType); + } + } + + enableSpeakerButPreferBluetooth() { + let devices = this.getAllAudioDevices(); + if (!devices || devices.length === 0) { + Log.w(TAG, 'enableSpeakerButPreferBluetooth, No audio devices available'); + return; + } + let audioDevice = devices.find(data => data.deviceType === audio.DeviceType.BLUETOOTH_SCO || + data.deviceType === audio.DeviceType.WIRED_HEADSET); + if (audioDevice) { + this.selectAudioOutput(audioDevice.deviceType); + } + } + + setAudioConfiguration(configuration: ConstraintsMap) { + const setConfig = (key: string, type: ObjectType, setter: (value: Any) => void) => { + let value: Any = null; + if (configuration.getType(key) === type) { + value = configuration.get(key); + } + if (isNotEmpty(value)) { + setter(value); + } + }; + + setConfig('ohosStreamUsage', ObjectType.String, (value: string) => this.setStreamUsage(value)); + setConfig('ohosSourceType', ObjectType.String, (value: string) => this.setSourceType(value)); + + this.createAudioRenderer(); + this.createAudioCapturer(); + } + + setStreamUsage(value: string): void { + let streamUsageMap: Map = new Map([ + ['unknown', audio.StreamUsage.STREAM_USAGE_UNKNOWN], + ['music', audio.StreamUsage.STREAM_USAGE_MUSIC], + ['voiceCommunication', audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION], + ['voiceAssistant', audio.StreamUsage.STREAM_USAGE_VOICE_ASSISTANT], + ['alarm', audio.StreamUsage.STREAM_USAGE_ALARM], + ['voiceMessage', audio.StreamUsage.STREAM_USAGE_VOICE_MESSAGE], + ['ringtone', audio.StreamUsage.STREAM_USAGE_RINGTONE], + ['notification', audio.StreamUsage.STREAM_USAGE_NOTIFICATION], + ['accessibility', audio.StreamUsage.STREAM_USAGE_ACCESSIBILITY], + ['movie', audio.StreamUsage.STREAM_USAGE_MOVIE], + ['game', audio.StreamUsage.STREAM_USAGE_GAME], + ['audioBook', audio.StreamUsage.STREAM_USAGE_AUDIOBOOK], + ['navigation', audio.StreamUsage.STREAM_USAGE_NAVIGATION], + ['videoCommunication', audio.StreamUsage.STREAM_USAGE_VIDEO_COMMUNICATION] + ]); + let streamUsage = streamUsageMap.get(value); + if (streamUsage != undefined) { + this.audioRendererInfo.usage = streamUsage; + } + } + + setSourceType(value: string): void { + let sourceTypeMap: Map = new Map([ + ['invalid', audio.SourceType.SOURCE_TYPE_INVALID], + ['mic', audio.SourceType.SOURCE_TYPE_MIC], + ['voiceRecognition', audio.SourceType.SOURCE_TYPE_VOICE_RECOGNITION], + ['voiceCommunication', audio.SourceType.SOURCE_TYPE_VOICE_COMMUNICATION], + ['voiceMessage', audio.SourceType.SOURCE_TYPE_VOICE_MESSAGE], + ['camcorder', audio.SourceType.SOURCE_TYPE_CAMCORDER] + ]); + let sourceType = sourceTypeMap.get(value); + if (sourceType != undefined) { + this.audioCapturerInfo.source = sourceType; + } + } + + /** + * 音频打断事件监听 + * AudioRenderer对象在start事件发生时会主动获取焦点,在pause、stop等事件发生时会主动释放焦点 + */ + async onAudioInterrupt() { + this.audioRenderer?.on('audioInterrupt', (interruptEvent: audio.InterruptEvent) => { + // 在发生音频打断事件时,audioRenderer收到interruptEvent回调,此处可根据其内容做相应处理。 + // 1、可选:读取interruptEvent.forceType的类型,判断系统是否已强制执行相应操作。 + // 2、必选:读取interruptEvent.hintType的类型,做出相应的处理。 + Log.d(TAG, `onAudioInterrupt: interruptEvent.forceType=${interruptEvent.forceType}`); + Log.d(TAG, `onAudioInterrupt: interruptEvent.hintType=${interruptEvent.hintType}`); + }); + } + + /** + * 当前发声设备切换回调监听 + */ + async onPreferOutputDeviceChangeForRendererInfo() { + this.audioRoutingManager?.on('preferOutputDeviceChangeForRendererInfo', this.audioRendererInfo, + (desc: audio.AudioDeviceDescriptors) => { + Log.d(TAG, `onPreferOutputDeviceChangeForRendererInfo device change To : ${desc[0].deviceType}`); + }); + } +} \ No newline at end of file diff --git a/ohos/src/main/ets/components/VideoRender.ets b/ohos/src/main/ets/components/VideoRender.ets new file mode 100644 index 0000000000000000000000000000000000000000..b0fd48314926b801404a85ef48fc0bb34b9dcbee --- /dev/null +++ b/ohos/src/main/ets/components/VideoRender.ets @@ -0,0 +1,58 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import type { ComponentHandler } from '../DependencyRelated'; +import { Params } from '@ohos/flutter_ohos/src/main/ets/plugin/platform/PlatformView'; +import { VideoRenderController } from './VideoRenderController'; + +const TAG: string = 'FlutterWebRTCPlugin-[VideoRender]'; + +@Component +export struct VideoRender { + @Prop params: Params + videoRenderHandler: ComponentHandler = this.params.platformView as ComponentHandler + private mXComponentController: VideoRenderController = new VideoRenderController(); + + aboutToAppear(): void { + + } + + build() { + Column() { + XComponent({ + type: XComponentType.SURFACE, + controller: this.mXComponentController + }) + .width('100%') + .height('100%') + .onLoad((context?) => { + this.videoRenderHandler.sendMessage(this.mXComponentController.getXComponentSurfaceId()) + }) + .onDestroy(() => { + console.log(TAG, 'onDestroy'); + }) + } + .height('100%') + .width('100%') + } +} diff --git a/ohos/src/main/ets/components/VideoRenderController.ets b/ohos/src/main/ets/components/VideoRenderController.ets new file mode 100644 index 0000000000000000000000000000000000000000..1248970069ed5b6cd730febf2087ad559a083ee7 --- /dev/null +++ b/ohos/src/main/ets/components/VideoRenderController.ets @@ -0,0 +1,40 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { Log } from '@ohos/flutter_ohos'; + +const TAG: string = 'FlutterWebRTCPlugin-[VideoRenderController]'; + +export class VideoRenderController extends XComponentController { + onSurfaceCreated(surfaceId: string): void { + Log.d(TAG, `onSurfaceCreated surfaceId: ${surfaceId}`); + } + + onSurfaceChanged(surfaceId: string, rect: SurfaceRect): void { + Log.d(TAG, `onSurfaceChanged surfaceId: ${surfaceId}, rect: ${JSON.stringify(rect)}`); + } + + onSurfaceDestroyed(surfaceId: string): void { + Log.d(TAG, `onSurfaceDestroyed surfaceId: ${surfaceId}`); + } +} \ No newline at end of file diff --git a/ohos/src/main/ets/components/VideoRenderFactory.ets b/ohos/src/main/ets/components/VideoRenderFactory.ets new file mode 100644 index 0000000000000000000000000000000000000000..40e8599db206de49f7981a31e0ac7ab30acdc430 --- /dev/null +++ b/ohos/src/main/ets/components/VideoRenderFactory.ets @@ -0,0 +1,39 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import common from '@ohos.app.ability.common'; +import { BinaryMessenger, PlatformView, PlatformViewFactory, StandardMessageCodec } from '@ohos/flutter_ohos'; +import { VideoRenderHandler } from './VideoRenderHandler'; + +export class VideoRenderFactory extends PlatformViewFactory { + private messenger: BinaryMessenger; + + constructor(messenger: BinaryMessenger) { + super(StandardMessageCodec.INSTANCE); + this.messenger = messenger; + } + + public create(context: common.Context, viewId: number, args: object): PlatformView { + return new VideoRenderHandler(context, this.messenger, viewId, args); + } +} diff --git a/ohos/src/main/ets/components/VideoRenderHandler.ets b/ohos/src/main/ets/components/VideoRenderHandler.ets new file mode 100644 index 0000000000000000000000000000000000000000..aea3d73e7ade103ddf406b738235545afa5fe029 --- /dev/null +++ b/ohos/src/main/ets/components/VideoRenderHandler.ets @@ -0,0 +1,82 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { + MethodCall, + MethodCallHandler, + MethodResult, + PlatformView, + MethodChannel, + BinaryMessenger, + StandardMethodCodec, + Log, +} from '@ohos/flutter_ohos'; +import { Params } from '@ohos/flutter_ohos/src/main/ets/plugin/platform/PlatformView'; +import { VideoRender } from './VideoRender'; +import { common } from '@kit.AbilityKit'; + +const TAG: string = 'FlutterWebRTCPlugin-[VideoRenderHandler]'; +const CHANNEL_NAME = 'flutter.webrtc.ohos/RTCVideoRender' + +export enum ScalingMode { Fill, AspectFill, AspectFit } + +@Observed +export class VideoRenderHandler extends PlatformView implements MethodCallHandler { + methodChannel: MethodChannel; + + constructor(context: common.Context, message: BinaryMessenger, viewId: number, args: ESObject) { + super(); + this.methodChannel = + new MethodChannel(message, `${CHANNEL_NAME}${viewId}`, StandardMethodCodec.INSTANCE); + this.methodChannel.setMethodCallHandler(this) + } + + onMethodCall(call: MethodCall, result: MethodResult): void { + let method: string = call.method; + Log.d(TAG, 'VideoRenderController method :' + method); + switch (method) { + case 'xxx': + break; + default: + break + } + } + + public sendMessage = (surfaceId: string) => { + Log.d(TAG, 'VideoRenderController sendMessage') + this.methodChannel.invokeMethod('putSurfaceId', { surfaceId: surfaceId }); + } + + getView(): WrappedBuilder<[Params]> { + return new WrappedBuilder(RenderBuilder) + } + + dispose(): void { + this.methodChannel?.setMethodCallHandler(null); + } +} + +@Builder +function RenderBuilder(params: Params) { + VideoRender({ params: params }) +} \ No newline at end of file diff --git a/ohos/src/main/ets/record/AudioChannel.ets b/ohos/src/main/ets/record/AudioChannel.ets new file mode 100644 index 0000000000000000000000000000000000000000..d57a62eca332120255f728ca39a1da939267fa32 --- /dev/null +++ b/ohos/src/main/ets/record/AudioChannel.ets @@ -0,0 +1,27 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +export enum AudioChannel { + INPUT, + OUTPUT +} \ No newline at end of file diff --git a/ohos/src/main/ets/utils/AdapterType.ets b/ohos/src/main/ets/utils/AdapterType.ets new file mode 100644 index 0000000000000000000000000000000000000000..2c62cae4919f9be17c31bc1ca571b801c9731239 --- /dev/null +++ b/ohos/src/main/ets/utils/AdapterType.ets @@ -0,0 +1,32 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +export enum AdapterType { + ADAPTER_TYPE_UNKNOWN = 0, + ADAPTER_TYPE_ETHERNET = 1, + ADAPTER_TYPE_WIFI = 2, + ADAPTER_TYPE_CELLULAR = 4, + ADAPTER_TYPE_VPN = 8, + ADAPTER_TYPE_LOOPBACK = 16, + ADAPTER_TYPE_ANY = 32 +} \ No newline at end of file diff --git a/ohos/src/main/ets/utils/CallUtils.ets b/ohos/src/main/ets/utils/CallUtils.ets new file mode 100644 index 0000000000000000000000000000000000000000..6fbf59df9de08fbf242c1395479edb0c7cfb01e6 --- /dev/null +++ b/ohos/src/main/ets/utils/CallUtils.ets @@ -0,0 +1,111 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { MethodCall } from "@ohos/flutter_ohos"; +import { RTCRtpTransceiverDirection } from "libohos_webrtc.so"; +import { ConstraintsMap } from "./ConstraintsMap"; +import { DataMap } from "./ObjectType"; + +export function getPeerConnectionId(call: MethodCall): string { + return call.argument('peerConnectionId') ?? ''; +} + +export function getStreamId(call: MethodCall): string { + return call.argument('streamId') ?? ''; +} + +export function getLabel(call: MethodCall): string { + return call.argument('label') ?? ''; +} + +export function getTrackId(call: MethodCall): string { + return call.argument('trackId') ?? ''; +} + +export function getDataChannelId(call: MethodCall): string { + return call.argument('dataChannelId') ?? ''; +} + +export function getTransceiverId(call: MethodCall): string { + return call.argument('transceiverId') ?? ''; +} + +export function getRtpSenderId(call: MethodCall): string { + return call.argument('rtpSenderId') ?? ''; +} + +export function getSenderId(call: MethodCall): string { + return call.argument('senderId') ?? ''; +} + +export function getType(call: MethodCall): string { + return call.argument('type') ?? ''; +} + +export function getTone(call: MethodCall): string { + return call.argument('tone') ?? ''; +} + +export function getDirection(call: MethodCall): RTCRtpTransceiverDirection { + return call.argument('direction') ?? 'sendrecv'; +} + +export function getStreamIds(call: MethodCall): string[] { + return call.argument('streamIds') ?? []; +} + +export function getDuration(call: MethodCall): number | undefined { + return call.argument('duration'); +} + +export function getGap(call: MethodCall): number | undefined { + return call.argument('gap'); +} + +export function getConstraintsMap(call: MethodCall): ConstraintsMap { + return constructMap(call.argument('constraints')); +} + +export function getOptionsMap(call: MethodCall): ConstraintsMap { + return constructMap(call.argument('options')); +} + +export function getConfigurationMap(call: MethodCall): ConstraintsMap { + return constructMap(call.argument('configuration')); +} + +export function getDescriptionMap(call: MethodCall): ConstraintsMap { + return constructMap(call.argument('description')); +} + +export function getCandidateMap(call: MethodCall): ConstraintsMap { + return constructMap(call.argument('candidate')); +} + +export function getDataChannelDictMap(call: MethodCall): ConstraintsMap { + return constructMap(call.argument('dataChannelDict')); +} + +export function constructMap(map: DataMap): ConstraintsMap { + return new ConstraintsMap(map); +} \ No newline at end of file diff --git a/ohos/src/main/ets/utils/Callback.ets b/ohos/src/main/ets/utils/Callback.ets new file mode 100644 index 0000000000000000000000000000000000000000..47e2bc2efae8b1f6ace5386eece9b50208d3147d --- /dev/null +++ b/ohos/src/main/ets/utils/Callback.ets @@ -0,0 +1,28 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { Any } from '@ohos/flutter_ohos'; + +export default interface Callback { + invoke(...args: Any[]): void; +} \ No newline at end of file diff --git a/ohos/src/main/ets/utils/CameraUtil.ets b/ohos/src/main/ets/utils/CameraUtil.ets new file mode 100644 index 0000000000000000000000000000000000000000..f77f358dbe1ca29d1aa902a7bb2837556748db00 --- /dev/null +++ b/ohos/src/main/ets/utils/CameraUtil.ets @@ -0,0 +1,389 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the 'Software'), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import camera from '@ohos.multimedia.camera'; +import { BusinessError } from '@ohos.base'; +import common from '@ohos.app.ability.common'; +import { Log } from '@ohos/flutter_ohos'; +import { display } from '@kit.ArkUI'; +const TAG: string = 'FlutterWebRTCPlugin'; + +let cameraManager: camera.CameraManager +let previewOutput: camera.PreviewOutput; +let cameraInput: camera.CameraInput; +let captureSession: camera.PhotoSession; +let photoOutPut: camera.PhotoOutput; +let cameraArray: Array; +let currentContext: common.BaseContext; + +/** + * 如果获取对象失败,说明相机可能被占用或无法使用。如果被占用,须等到相机被释放后才能重新获取。 + * @param context + * @returns + */ +export function getCameraManager(context: common.BaseContext): camera.CameraManager { + currentContext = context + if (!cameraManager) { + cameraManager = camera.getCameraManager(context); + } + return cameraManager; +} + +/** + * 通过cameraManager类中的getSupportedCameras()方法,获取当前设备支持的相机列表,列表中存储了设备支持的所有相机ID。 + * 若列表不为空,则说明列表中的每个ID都支持独立创建相机对象;否则,说明当前设备无可用相机,不可继续后续操作。 + * @param cameraManager + * @returns + */ +export function getCameraDevices(cameraManager: camera.CameraManager): Array { + cameraArray = cameraManager.getSupportedCameras(); + if (cameraArray != undefined && cameraArray.length <= 0) { + Log.e(TAG + '-CameraUtil', 'cameraManager.getSupportedCameras error'); + return []; + } + for (let index = 0; index < cameraArray.length; index++) { + Log.i(TAG + '-CameraUtil', 'cameraId : ' + cameraArray[index].cameraId); // 获取相机ID + Log.i(TAG + '-CameraUtil', 'cameraPosition : ' + cameraArray[index].cameraPosition); // 获取相机位置 + Log.i(TAG + '-CameraUtil', 'cameraType : ' + cameraArray[index].cameraType); // 获取相机类型 + Log.i(TAG + '-CameraUtil', 'connectionType : ' + cameraArray[index].connectionType); // 获取相机连接类型 + } + return cameraArray; +} + +/** + * 调用cameraManager类中的createCaptureSession()方法创建一个会话 + * @param cameraManager + * @returns + */ +export function getCaptureSession(cameraManager: camera.CameraManager): camera.Session | undefined { + try { + if (!captureSession) { + captureSession = cameraManager.createSession(camera.SceneMode.NORMAL_VIDEO) as camera.PhotoSession; + } + } catch (error) { + let err = error as BusinessError; + Log.e(TAG + '-CameraUtil', 'Failed to create the CaptureSession instance. error: ${JSON.stringify(err)}'); + } + return captureSession; +} + +/** + * 调用captureSession类中的beginConfig()方法配置会话 + * @param captureSession + */ +export function beginConfig(captureSession: camera.Session): void { + try { + captureSession.beginConfig(); + } catch (error) { + let err = error as BusinessError; + Log.e(TAG + '-CameraUtil', 'Failed to beginConfig. error: ${JSON.stringify(err)}'); + } +} + +export function getCameraInput(cameraDevice: camera.CameraDevice, + cameraManager: camera.CameraManager): camera.CameraInput | null { + // 创建相机输入流 + try { + cameraInput = cameraManager.createCameraInput(cameraDevice); + // 监听cameraInput错误信息 + cameraInput.on('error', cameraDevice, (error: BusinessError) => { + Log.i(TAG + '-CameraUtil', 'Camera input error code: ${error.code}'); + }); + } catch (error) { + let err = error as BusinessError; + Log.e(TAG + '-CameraUtil', 'Failed to createCameraInput errorCode = ' + err.code); + } + + return cameraInput; +} + +/** + * 通过getSupportedOutputCapability()方法,获取当前设备支持的所有输出流 + * 输出流在CameraOutputCapability中的各个profile字段中 + * @param cameraDevice + * @param cameraManager + * @returns + */ +export async function getSupportedOutputCapability(cameraDevice: camera.CameraDevice, + cameraManager: camera.CameraManager, + cameraInput: camera.CameraInput): Promise { + // 打开相机 + await cameraInput.open(); + // 获取相机设备支持的输出流能力 + let cameraOutputCapability = cameraManager.getSupportedOutputCapability(cameraDevice, camera.SceneMode.NORMAL_PHOTO); + if (!cameraOutputCapability) { + Log.e(TAG + '-CameraUtil', 'cameraManager.getSupportedOutputCapability error'); + return undefined; + } + Log.i(TAG + '-CameraUtil', 'outputCapability: ' + JSON.stringify(cameraOutputCapability)); + return cameraOutputCapability; +} + +/** + * 获取预览流 + * @param cameraManager + * @param cameraOutputCapability + * @param surfaceId + * @returns + */ +export function getPreviewOutput(cameraManager: camera.CameraManager, + cameraOutputCapability: camera.CameraOutputCapability, + surfaceId: string, cameraPosition: number): camera.PreviewOutput | undefined { + let previewProfilesArray: Array = cameraOutputCapability.previewProfiles; + try { + previewOutput = cameraManager.createPreviewOutput(previewProfilesArray[0], surfaceId); + } catch (error) { + let err = error as BusinessError; + Log.e(TAG + '-CameraUtil', 'Failed to create the PreviewOutput instance. error code: ' + err.code); + } + return previewOutput; +} + +export function setSessionCameraInput(captureSession: camera.Session, cameraInput: camera.CameraInput): void { + try { + captureSession.addInput(cameraInput); + } catch (error) { + let err = error as BusinessError; + Log.e(TAG + '-CameraUtil', 'Failed to addInput. error: ${JSON.stringify(err)}'); + } +} + +export function setSessionPreviewOutput(captureSession: camera.Session, previewOutput: camera.PreviewOutput): void { + try { + captureSession.addOutput(previewOutput); + } catch (error) { + let err = error as BusinessError; + Log.e(TAG + '-CameraUtil', 'Failed to add previewOutput. error: ${JSON.stringify(err)}'); + } +} + +/** + * 调用captureSession类中的commitConfig()和start()方法提交相关配置,并启动会话 + * @param captureSession + * @returns + */ +export async function startSession(captureSession: camera.Session): Promise { + try { + await captureSession.commitConfig(); + } catch (error) { + let err = error as BusinessError; + Log.e(TAG + '-CameraUtil', 'Failed to commitConfig. error: ${JSON.stringify(err)}'); + } + + try { + await captureSession.start() + } catch (error) { + let err = error as BusinessError; + Log.e(TAG + '-CameraUtil', 'Failed to start. error: ${JSON.stringify(err)}'); + } +} + +export async function stopSession(captureSession: camera.Session): Promise { + + try { + await captureSession.stop().then(() => { + Log.i(TAG + '-CameraUtil', 'Promise returned to indicate the session stop success.'); + }).catch((error: BusinessError) => { + Log.e(TAG + '-CameraUtil', 'Failed to stop the session, error code: ${error.code}.'); + }); + } catch (error) { + let err = error as BusinessError; + Log.e(TAG + '-CameraUtil', 'Failed to stop. error: ${JSON.stringify(err)}'); + } +} + +export async function releaseCamera(): Promise { + Log.i(TAG + '-CameraUtil', 'releaseCamera in!'); + if (captureSession) { + captureSession.stop(); + } + if (cameraInput) { + cameraInput.close(); + } + if (previewOutput) { + previewOutput.release(); + } + if (captureSession) { + captureSession.release(); + } + if (photoOutPut) { + photoOutPut.release(); + } + Log.i(TAG + '-CameraUtil', 'releaseCamera out!'); +} + +export async function setPhotoFlashMode(flashMode: number): Promise { + if (captureSession) { + captureSession.setFlashMode(flashMode); + } +} + +export function getFlashMode(): camera.FlashMode | undefined { + let flashMode: camera.FlashMode | undefined = undefined; + try { + getCaptureSession(cameraManager); + flashMode = captureSession.getFlashMode(); + } catch (error) { + // 失败返回错误码error.code并处理 + let err = error as BusinessError; + Log.e(TAG + '-CameraUtil', 'The getFlashMode call failed.error code: ${err.code}'); + } + return flashMode; +} + +export function isTorchSupported(): boolean { + let torchSupport: boolean = false; + try { + torchSupport = cameraManager.isTorchSupported(); + + } catch (error) { + let err = error as BusinessError; + Log.e(TAG + '-CameraUtil', 'Failed to torch. errorCode = ' + err.code); + } + Log.i(TAG + '-CameraUtil', 'Returned with the torch support status:' + torchSupport); + + /* + * 当使用后置摄像头并设置闪光灯模式FlashMode关闭时,手电筒功能无法启用。 + * 这里添加闪光灯模式查询判断,如果是关闭则开启。 + * */ + let isTorchModeSupport: boolean = false; + try { + isTorchModeSupport = cameraManager.isTorchModeSupported(camera.TorchMode.ON); + } catch (error) { + let err = error as BusinessError; + Log.e(TAG + '-CameraUtil', 'Failed to set the torch mode. errorCode = ' + err.code); + } + Log.i(TAG + '-CameraUtil', 'Returned with the isTorchModeSupport :' + isTorchModeSupport); + + return torchSupport; +} + +export function setTorchMode(torchMode: camera.FlashMode): boolean { + try { + getCaptureSession(cameraManager); + captureSession.setFlashMode(torchMode); + return true; + } catch (error) { + // 失败返回错误码error.code并处理 + let err = error as BusinessError; + Log.e(TAG + '-CameraUtil', 'The setFlashMode call failed. error code: ${err.code}'); + return false; + } +} + +export async function switchCamera(cameraPosition: number, surfaceId: string, foldAbleStatus: number): Promise { + Log.i(TAG + '-CameraUtil', 'cameraPosition :' + cameraPosition); + Log.i(TAG + '-CameraUtil', 'surfaceId :' + surfaceId); + try { + /*第一步先释放相机,再通过选择的相机position重新创建相机预览窗口*/ + releaseCamera(); + if (!cameraManager) { + Log.e(TAG + '-CameraUtil', 'cameraManager error'); + return; + } + cameraArray = cameraManager.getSupportedCameras(); + if (cameraArray.length <= 0) { + Log.e(TAG + '-CameraUtil', 'cameraManager.getSupportedCameras error'); + return; + } + cameraInput = cameraManager.createCameraInput(cameraArray[cameraPosition]); + await cameraInput.open(); + let sceneModes: camera.SceneMode[] = cameraManager.getSupportedSceneModes(cameraArray[cameraPosition]); + let cameraOutputCap: camera.CameraOutputCapability = + cameraManager.getSupportedOutputCapability(cameraArray[cameraPosition], camera.SceneMode.NORMAL_PHOTO); + let isSupportPhotoMode: boolean = sceneModes.indexOf(camera.SceneMode.NORMAL_PHOTO) >= 0; + if (!isSupportPhotoMode) { + Log.e(TAG + '-CameraUtil', 'isSupportPhotoMode error'); + return; + } + if (!cameraOutputCap) { + Log.e(TAG + '-CameraUtil', 'cameraManager.getSupportedOutputCapability error'); + return; + } + let previewProfilesArray: camera.Profile[] = cameraOutputCap.previewProfiles; + let photoProfilesArray: camera.Profile[] = cameraOutputCap.photoProfiles; + let previewProfile: undefined | camera.Profile = previewProfilesArray.find((profile: camera.Profile) => { + let screen = display.getDefaultDisplaySync(); + if (screen.width <= 1080) { + return profile.size.height === 1080 && profile.size.width === 1440; + } else if (screen.width <= 1440 && screen.width > 1080) { + return profile.size.height === 1440 && profile.size.width === 1920; + } + return profile.size.height <= screen.width && + profile.size.height >= 1080 && + (profile.size.width / profile.size.height) < (screen.height / screen.width) && + (profile.size.width / profile.size.height) > + (foldAbleStatus === display.FoldStatus.FOLD_STATUS_EXPANDED ? 1 : 4 / 3); + }); + let photoProfile: undefined | camera.Profile = photoProfilesArray.find((profile: camera.Profile) => { + if (previewProfile) { + return profile.size.width <= 4096 && + profile.size.width >= 2448 && + profile.size.height === (foldAbleStatus === display.FoldStatus.FOLD_STATUS_EXPANDED ? 1 : + (previewProfile.size.height / previewProfile.size.width)) * profile.size.width; + } + return undefined; + }); + previewOutput = cameraManager.createPreviewOutput(previewProfile, surfaceId); + if (previewOutput === undefined) { + Log.e(TAG + '-CameraUtil', 'cameraManager.createPreviewOutput error'); + return; + } + photoOutPut = cameraManager.createPhotoOutput(photoProfile); + if (photoOutPut === undefined) { + Log.e(TAG + '-CameraUtil', 'cameraManager.createPhotoOutput error'); + return; + } + captureSession = cameraManager.createSession(camera.SceneMode.NORMAL_PHOTO) as camera.PhotoSession; + if (captureSession === undefined) { + Log.e(TAG + '-CameraUtil', 'cameraManager.createSession error'); + return; + } + Log.i(TAG + '-CameraUtil', 'captureSession.beginConfig()!'); + captureSession.beginConfig(); + captureSession.addInput(cameraInput); + captureSession.addOutput(previewOutput); + captureSession.addOutput(photoOutPut); + Log.i(TAG + '-CameraUtil', 'begin captureSession.commitConfig()!'); + await captureSession.commitConfig(); + await captureSession.start(); + Log.i(TAG + '-CameraUtil', 'after captureSession.start()!'); + + } catch (error) { + // 失败返回错误码error.code并处理 + let err = error as BusinessError; + Log.e(TAG + '-CameraUtil', 'The setFlashMode call failed. error code: ${err.code}'); + } +} + +export async function setZoom(zoomLevel: number): Promise { + Log.i(TAG + '-CameraUtil', 'setZoom in!'); + getCaptureSession(cameraManager); + if (captureSession === undefined) { + Log.e(TAG + '-CameraUtil', 'captureSession is bull'); + return false; + } + captureSession.setZoomRatio(zoomLevel); + Log.i(TAG + '-CameraUtil', 'setZoom out!'); + return true; +} \ No newline at end of file diff --git a/ohos/src/main/ets/utils/ConstraintsArray.ts b/ohos/src/main/ets/utils/ConstraintsArray.ts new file mode 100644 index 0000000000000000000000000000000000000000..c3b9a6fe2afafb1a0fb7e105a127fec369f164c2 --- /dev/null +++ b/ohos/src/main/ets/utils/ConstraintsArray.ts @@ -0,0 +1,127 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { DataMap } from './ObjectType'; +import { DataArray, ObjectType } from './ObjectType'; + +class ConstraintsArray { + private mArray: DataArray; + + constructor(array?: DataArray) { + if (array) { + this.mArray = array; + } else { + this.mArray = new Array(); + } + } + + public size(): number { + return this.mArray.length; + } + + public isNull(index: number): boolean { + return this.mArray[index] === null; + } + + public getBoolean(index: number): boolean { + return this.mArray[index] as boolean; + } + + public getNumber(index: number): number { + return this.mArray[index] as number; + } + + public getString(index: number): string { + return this.mArray[index] as string; + } + + public getByte(index: number): Uint8Array { + return this.mArray[index] as Uint8Array; + } + + public getArray(index: number): ConstraintsArray { + return new ConstraintsArray(this.mArray[index] as DataArray); + } + + public getMap(index: number): DataMap { + return this.mArray[index] as DataMap; + } + + public getType(index: number): ObjectType { + const object = this.mArray[index]; + if (object === null) { + return ObjectType.Null; + } else if (object instanceof Boolean || typeof object === 'boolean') { + return ObjectType.Boolean; + } else if (object instanceof Number || typeof object === 'number') { + return ObjectType.Number; + } else if (object instanceof String || typeof object === 'string') { + return ObjectType.String; + } else if (object instanceof Array) { + return ObjectType.Array; + } else if (object instanceof Map) { + return ObjectType.Map; + } else if (object instanceof Uint8Array) { + return ObjectType.Byte; + } + return ObjectType.Null; + } + + public toArrayList(): DataArray { + return this.mArray; + } + + public pushNull(): void { + this.mArray.push(null); + } + + public pushBoolean(value: boolean): void { + this.mArray.push(value); + } + + public pushNumber(value: number): void { + this.mArray.push(value); + } + + public pushString(value: string): void { + this.mArray.push(value); + } + + public pushArray(array: ConstraintsArray): void { + this.mArray.push(array.toArrayList()); + } + + public pushByte(value: Uint8Array): void { + this.mArray.push(value); + } + + public pushMap(map: DataMap): void { + this.mArray.push(map); + } + + public toString(): string { + return `${JSON.stringify(this.mArray)}`; + } +} + +export { ConstraintsArray }; \ No newline at end of file diff --git a/ohos/src/main/ets/utils/ConstraintsMap.ts b/ohos/src/main/ets/utils/ConstraintsMap.ts new file mode 100644 index 0000000000000000000000000000000000000000..ce082df50b0f90eba742e07aaa5d1b394153e553 --- /dev/null +++ b/ohos/src/main/ets/utils/ConstraintsMap.ts @@ -0,0 +1,151 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { ConstraintsArray } from './ConstraintsArray'; +import { DataMap, DataArray, ObjectType } from './ObjectType'; + +class ConstraintsMap { + private mMap: DataMap; + + constructor(map?: DataMap) { + if (map) { + this.mMap = map; + } else { + this.mMap = new Map(); + } + } + + public toMap(): DataMap { + return this.mMap; + } + + public hasKey(name: string): boolean { + return this.mMap.has(name); + } + + public isNull(name: string): boolean { + return this.mMap.get(name) === null; + } + + public get(name: string): ESObject { + return this.mMap.get(name); + } + + public getBoolean(name: string): boolean { + return this.mMap.get(name) as boolean; + } + + public getDouble(name: string): number { + return this.mMap.get(name) as number; + } + + public getNumber(name: string): number { + if (this.getType(name) === ObjectType.String) { + return parseInt(this.mMap.get(name) as string, 10); + } + return this.mMap.get(name) as number; + } + + public getString(name: string): string { + return this.mMap.get(name) as string; + } + + public getMap(name: string): ConstraintsMap | null { + const value = this.mMap.get(name); + if (!value) { + return null; + } + return new ConstraintsMap(value as DataMap); + } + + public getType(name: string): ObjectType { + const value = this.mMap.get(name); + if (value === null) { + return ObjectType.Null; + } else if (value instanceof Number || typeof value === 'number') { + return ObjectType.Number; + } else if (value instanceof String || typeof value === 'string') { + return ObjectType.String; + } else if (value instanceof Boolean || typeof value === 'boolean') { + return ObjectType.Boolean; + } else if (value instanceof Map) { + return ObjectType.Map; + } else if (value instanceof Array) { + return ObjectType.Array; + } else if (value instanceof Uint8Array) { + return ObjectType.Byte; + } else { + return ObjectType.Null; + } + } + + public putBoolean(key: string, value: boolean): void { + this.mMap.set(key, value); + } + + public putNumber(key: string, value: number): void { + this.mMap.set(key, value); + } + + public putString(key: string, value: string): void { + this.mMap.set(key, value); + } + + public putByte(key: string, value: Uint8Array): void { + this.mMap.set(key, value); + } + + public putNull(key: string): void { + this.mMap.set(key, null); + } + + public putMap(key: string, value: DataMap): void { + this.mMap.set(key, value); + } + + public merge(value: DataMap): void { + value.forEach((v, k) => this.mMap.set(k, v)); + } + + public putArray(key: string, value: DataArray): void { + this.mMap.set(key, value); + } + + public getArray(name: string): ConstraintsArray | null { + const value = this.mMap.get(name); + if (!value) { + return null; + } + return new ConstraintsArray(value as DataArray); + } + + public getListArray(name: string): DataArray { + return this.mMap.get(name) as DataArray; + } + + public toString(): string { + return `${JSON.stringify(Object.fromEntries(this.mMap))}`; + } +} + +export { ConstraintsMap }; \ No newline at end of file diff --git a/ohos/src/main/ets/utils/ObjectType.ts b/ohos/src/main/ets/utils/ObjectType.ts new file mode 100644 index 0000000000000000000000000000000000000000..99d38310fd10e432077f57195606b7378702ee31 --- /dev/null +++ b/ohos/src/main/ets/utils/ObjectType.ts @@ -0,0 +1,43 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +export type DataMap = Map + +export type DataArray = Array + +export enum ObjectType { + Null = 'Null', + Number = 'Number', + String = 'String', + Boolean = 'Boolean', + Map = 'Map', + Array = 'Array', + Byte = 'Byte' +} + +export enum DataChannelStateCode { + CONNECTING = 'connecting', + OPEN = 'open', + CLOSING = 'closing', + CLOSED = 'closed' +} \ No newline at end of file diff --git a/ohos/src/main/ets/utils/OptionsUtils.ets b/ohos/src/main/ets/utils/OptionsUtils.ets new file mode 100644 index 0000000000000000000000000000000000000000..26678cfe84dd33add80225c4b5b1e11051aefe8b --- /dev/null +++ b/ohos/src/main/ets/utils/OptionsUtils.ets @@ -0,0 +1,88 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the 'Software'), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { Any } from '@ohos/flutter_ohos'; +import { AdapterType } from './AdapterType'; +import { ConstraintsMap } from './ConstraintsMap'; +import { DataArray, DataMap, ObjectType } from './ObjectType'; +import { ArrayList } from '@kit.ArkTS'; + +export function getOptionsNetworkIgnoreMask(optionsMap: ConstraintsMap): number { + let networkIgnoreMask: number = AdapterType.ADAPTER_TYPE_UNKNOWN; + if (optionsMap.getType('networkIgnoreMask') === ObjectType.Array) { + let ignoredAdapters = optionsMap.getArray('networkIgnoreMask')!; + ignoredAdapters.toArrayList().forEach((adapter: Any) => { + switch (adapter?.toString()) { + case 'adapterTypeEthernet': + networkIgnoreMask += AdapterType.ADAPTER_TYPE_ETHERNET; + break; + case 'adapterTypeWifi': + networkIgnoreMask += AdapterType.ADAPTER_TYPE_WIFI; + break; + case 'adapterTypeCellular': + networkIgnoreMask += AdapterType.ADAPTER_TYPE_CELLULAR; + break; + case 'adapterTypeVpn': + networkIgnoreMask += AdapterType.ADAPTER_TYPE_VPN; + break; + case 'adapterTypeLoopback': + networkIgnoreMask += AdapterType.ADAPTER_TYPE_LOOPBACK; + break; + case 'adapterTypeAny': + networkIgnoreMask += AdapterType.ADAPTER_TYPE_ANY; + break; + } + }) + } + return networkIgnoreMask; +} + +export function getOptionsForceSWCodec(optionsMap: ConstraintsMap): boolean { + let forceSWCodec: boolean = false; + if (optionsMap.getType('forceSWCodec') === ObjectType.Boolean) { + forceSWCodec = optionsMap.getBoolean('forceSWCodec'); + } + return forceSWCodec; +} + +export function getOptionsForceSWCodecList(optionsMap: ConstraintsMap): ArrayList { + let forceSWCodecList: ArrayList = new ArrayList(); + if (optionsMap.getType('forceSWCodecList') === ObjectType.Array) { + let array: DataArray = optionsMap.getListArray('forceSWCodecList'); + array.forEach((v: Any) => { + forceSWCodecList.add(v?.toString()); + }) + } else { + // disable HW Codec for VP9 by default. + forceSWCodecList.add('VP9'); + } + return forceSWCodecList; +} + +export function getOptionsAudioConfiguration(optionsMap: ConstraintsMap): ConstraintsMap { + let audioConfiguration: ConstraintsMap = new ConstraintsMap() + if (optionsMap.getType('androidAudioConfiguration') === ObjectType.Map) { + audioConfiguration = optionsMap.getMap('androidAudioConfiguration')!; + } + return audioConfiguration; +} diff --git a/ohos/src/main/ets/utils/PCOUtils.ets b/ohos/src/main/ets/utils/PCOUtils.ets new file mode 100644 index 0000000000000000000000000000000000000000..61385b14e66b5d745818152e1edb4c2765e8b493 --- /dev/null +++ b/ohos/src/main/ets/utils/PCOUtils.ets @@ -0,0 +1,161 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { ConstraintsMap } from './ConstraintsMap'; +import { DataMap } from './ObjectType'; +import { + MediaStreamTrack, + RTCDTMFSender, + RTCIceCandidate, + RTCRtpCapabilities, + RTCRtpReceiver, + RTCRtpSender, + RTCRtpSendParameters, + RTCRtpTransceiver +} from 'libohos_webrtc.so'; +import { ConstraintsArray } from './ConstraintsArray'; +import { dataMapToString, isNotEmpty } from './Utils'; + +export function capabilitiesToMap(capabilities: RTCRtpCapabilities): DataMap { + let params: DataMap = new Map(); + let fecMechanismsArr: ConstraintsArray = new ConstraintsArray(); + params.set('codecs', capabilities.codecs); + params.set('headerExtensions', capabilities.headerExtensions); + params.set('fecMechanisms', fecMechanismsArr.toArrayList()); + return params; +} + +export function candidateToMap(candidate?: RTCIceCandidate): DataMap { + let params: DataMap = new Map(); + params.set('sdpMLineIndex', candidate?.sdpMLineIndex); + params.set('sdpMid', candidate?.sdpMid); + params.set('candidate', candidate?.candidate); + return params; +} + +export function transceiverToMap(transceiverId: string, transceiver: RTCRtpTransceiver): DataMap { + let params: DataMap = new Map(); + params.set('transceiverId', transceiverId); + params.set('mid', transceiver.mid ?? ''); + params.set('direction', transceiver.direction); + params.set('sender', rtpSenderToMap(transceiver.sender)); + params.set('receiver', rtpReceiverToMap(transceiver.receiver)); + return params; +} + +export function rtpParametersToMap(rtpParameters: RTCRtpSendParameters): DataMap { + let info: ConstraintsMap = new ConstraintsMap(); + info.putString('transactionId', rtpParameters.transactionId); + let rtcp: ConstraintsMap = new ConstraintsMap(); + // TODO 缺少 degradationPreference + if (isNotEmpty(rtpParameters.rtcp.cname)) { + rtcp.putString('cname', rtpParameters.rtcp.cname); + } + if (isNotEmpty(rtpParameters.rtcp.reducedSize)) { + rtcp.putBoolean('reducedSize', rtpParameters.rtcp.reducedSize); + } + info.putMap('rtcp', rtcp.toMap()); + + let headerExtensions: ConstraintsArray = new ConstraintsArray(); + for (let extension of rtpParameters.headerExtensions) { + let map: DataMap = new Map(); + map.set('uri', extension.uri); + map.set('id', extension.id); + map.set('encrypted', extension.encrypted ?? false); + headerExtensions.pushMap(map); + } + info.putArray('headerExtensions', headerExtensions.toArrayList()); + + let encodings: ConstraintsArray = new ConstraintsArray(); + for (let encoding of rtpParameters.encodings) { + let map: DataMap = new Map(); + // TODO 缺少 minBitrate、numTemporalLayers、ssrc + if (isNotEmpty(encoding.active)) { + map.set('active', encoding.active); + } + if (isNotEmpty(encoding.rid)) { + map.set('rid', encoding.rid); + } + if (isNotEmpty(encoding.maxBitrate)) { + map.set('maxBitrate', encoding.maxBitrate); + } + if (isNotEmpty(encoding.maxFramerate)) { + map.set('maxFramerate', encoding.maxFramerate); + } + if (isNotEmpty(encoding.scaleResolutionDownBy)) { + map.set('scaleResolutionDownBy', encoding.scaleResolutionDownBy); + } + encodings.pushMap(map); + } + info.putArray('encodings', encodings.toArrayList()); + + let codecs: ConstraintsArray = new ConstraintsArray(); + rtpParameters.codecs.forEach(codec => { + let map: DataMap = new Map(); + // TODO 缺少 name、numChannels、parameters、kind + map.set('payloadType', codec.payloadType); + map.set('clockRate', codec.clockRate); + codecs.pushMap(map); + }) + + info.putArray('codecs', codecs.toArrayList()); + return info.toMap(); +} + +export function rtpSenderToMap(sender: RTCRtpSender): DataMap { + let params: DataMap = new Map(); + params.set('senderId', sender.id ?? 'senderId'); + params.set('ownsTrack', true); + params.set('dtmfSender', dtmfSenderToMap(sender.dtmf, sender.id)); + params.set('rtpParameters', rtpParametersToMap(sender.getParameters())); + params.set('track', mediaTrackToMap(sender.track)); + return params; +} + +export function rtpReceiverToMap(receiver: RTCRtpReceiver): DataMap { + let params: DataMap = new Map(); + params.set('receiverId', receiver.id); + params.set('track', mediaTrackToMap(receiver.track)); + params.set('rtpParameters', rtpParametersToMap(receiver.getParameters())); + return params; +} + +export function mediaTrackToMap(track: MediaStreamTrack | null): DataMap { + let params: DataMap = new Map(); + if (!track) { + return params; + } + params.set('id', track.id); + params.set('label', track.kind === 'video' ? 'video' : 'audio'); + params.set('kind', track.kind); + params.set('enabled', track.enabled); + params.set('readyState', track.readyState); + return params; +} + +export function dtmfSenderToMap(dtmfSender: RTCDTMFSender | null, id: string | null): DataMap { + let params: DataMap = new Map(); + params.set('dtmfSenderId', id ?? ''); + // TODO 与鸿蒙不对应,返回空map + return params; +} diff --git a/ohos/src/main/ets/utils/PermissionUtils.ets b/ohos/src/main/ets/utils/PermissionUtils.ets new file mode 100644 index 0000000000000000000000000000000000000000..2bc95a7f44660f6510d277418631d7ea970b7c0a --- /dev/null +++ b/ohos/src/main/ets/utils/PermissionUtils.ets @@ -0,0 +1,128 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import bundleManager from '@ohos.bundle.bundleManager'; +import abilityAccessCtrl, { PermissionRequestResult, Permissions } from '@ohos.abilityAccessCtrl'; +import { BusinessError } from '@ohos.base'; +import { Log } from '@ohos/flutter_ohos'; +import common from '@ohos.app.ability.common'; +import { ConstraintsMap } from './ConstraintsMap'; + +const TAG: string = 'FlutterWebRTCPlugin-PermissionUtils'; + +export type ResultCallback = (errCode: string | null, errDesc: string | null) => void; + +export class PermissionsManager { + private static instance: PermissionsManager; + private commonContext: common.UIAbilityContext = {} as common.UIAbilityContext + private Microphone: Permissions = 'ohos.permission.MICROPHONE'; + private Camera: Permissions = 'ohos.permission.CAMERA'; + + constructor() { + } + + static getInstance(): PermissionsManager { + if (!PermissionsManager.instance) { + PermissionsManager.instance = new PermissionsManager(); + } + return PermissionsManager.instance; + } + + public init(context: common.UIAbilityContext) { + this.commonContext = context; + } + + public async requestPermissions(permissionsList: Array, callback: ResultCallback) { + Log.i(TAG, `requestPermissions start:${JSON.stringify(permissionsList)}`); + const hasCameraPermission: boolean = await this.checkPermissions(permissionsList); + Log.i(TAG, `hasCameraPermission:${hasCameraPermission}`); + if (hasCameraPermission) { + callback(null, null); + return + } + let atManager: abilityAccessCtrl.AtManager = abilityAccessCtrl.createAtManager(); + // requestPermissionsFromUser会判断权限的授权状态来决定是否唤起弹窗 + abilityAccessCtrl.createAtManager() + .requestPermissionsFromUser(this.commonContext, permissionsList) + .then((data: PermissionRequestResult) => { + Log.i(TAG, `grantStatus:${JSON.stringify(data)}`); + let grantStatus: Array = data.authResults; + let length: number = grantStatus.length; + for (let i = 0; i < length; i++) { + if (grantStatus[i] !== 0) { + // 用户拒绝授权,提示用户必须授权才能访问当前页面的功能,并引导用户到系统设置中打开相应的权限 + callback('-1', '未授权相机权限'); + return; + } + } + callback(null, null); + }) + .catch((error: BusinessError) => { + Log.e(TAG, `Failed to request permissions from user. error: ${JSON.stringify(error)}`) + callback(String(error.code), error.message); + }) + } + + public async checkPermissions(permissionsList: Array): Promise { + let grantStatus: abilityAccessCtrl.GrantStatus = await this.checkAccessToken(permissionsList[0]); + return grantStatus === abilityAccessCtrl.GrantStatus.PERMISSION_GRANTED; + } + + public async checkAccessToken(permission: Permissions): Promise { + // 获取应用程序的accessTokenID + let tokenId: number = 0; + let grantStatus: abilityAccessCtrl.GrantStatus = abilityAccessCtrl.GrantStatus.PERMISSION_DENIED; + try { + let bundleInfo: bundleManager.BundleInfo = await bundleManager.getBundleInfoForSelf( + bundleManager.BundleFlag.GET_BUNDLE_INFO_WITH_APPLICATION); + let appInfo: bundleManager.ApplicationInfo = bundleInfo.appInfo; + tokenId = appInfo.accessTokenId; + Log.i(TAG, `checkAccessToken tokenId: ${tokenId}`) + } catch (error) { + Log.e(TAG, `checkAccessToken Failed to get bundle info for self. error: ${JSON.stringify(error)}`) + return abilityAccessCtrl.GrantStatus.PERMISSION_DENIED; + } + const atManager: abilityAccessCtrl.AtManager = abilityAccessCtrl.createAtManager(); + // 校验应用是否被授予权限 + try { + grantStatus = await atManager.checkAccessToken(tokenId, permission); + Log.i(TAG, `checkAccessToken grantStatus: ${grantStatus}`) + } catch (error) { + Log.e(TAG, `checkAccessToken Failed to check access token. error: ${JSON.stringify(error)}`) + } + return grantStatus; + } + + public async processPermissions(constraints: ConstraintsMap, callback: ResultCallback) { + let permissionsList: Array = [] + if (constraints.hasKey('audio')) { + permissionsList.push(this.Microphone) + } + if (constraints.hasKey('video')) { + permissionsList.push(this.Camera) + } + this.requestPermissions(permissionsList, (code, err) => { + callback(code, err); + }) + } +} \ No newline at end of file diff --git a/ohos/src/main/ets/utils/TrackUtils.ets b/ohos/src/main/ets/utils/TrackUtils.ets new file mode 100644 index 0000000000000000000000000000000000000000..2eefef9e9d7c23486e7778dbea5e8434d4144d52 --- /dev/null +++ b/ohos/src/main/ets/utils/TrackUtils.ets @@ -0,0 +1,36 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the 'Software'), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +import { DataMap } from './ObjectType'; +import { MediaStreamTrack } from 'libohos_webrtc.so'; + +export function mediaStreamTrackToMap(track: MediaStreamTrack, remote: boolean = false): DataMap { + let trackMap: DataMap = new Map(); + trackMap.set('enabled', track.enabled); + trackMap.set('id', track.id); + trackMap.set('kind', track.kind); + trackMap.set('label', remote ? track.kind : track.id); + trackMap.set('readyState', remote ? track.readyState : 'live'); + trackMap.set('remote', remote); + return trackMap; +} diff --git a/ohos/src/main/ets/utils/Utils.ts b/ohos/src/main/ets/utils/Utils.ts new file mode 100644 index 0000000000000000000000000000000000000000..714663a699fa6bd9d28083d49e8a0d5adfa801ef --- /dev/null +++ b/ohos/src/main/ets/utils/Utils.ts @@ -0,0 +1,46 @@ +/* MIT License +* +* Copyright (c) 2024 SwanLink (Jiangsu) Technology Development Co., LTD. +* All rights reserved. +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ +import { DataMap } from "./ObjectType"; + +export function isBinaryBuffer(buffer: Uint8Array): boolean { + for (let i = 0; i < buffer.length; i++) { + const byte = buffer[i]; + // 检查是否为 ASCII 控制字符(0-31)或删除字符(127) + if ((byte >= 0 && byte <= 31) || byte === 127) { + return true; + } + } + return false; +} + +export function isEmpty(obj: ESObject): boolean { + return obj === null || obj === undefined; +} + +export function isNotEmpty(obj: ESObject): boolean { + return !isEmpty(obj); +} + +export function dataMapToString(map: DataMap): string { + return `${JSON.stringify(Object.fromEntries(map))}`; +} diff --git a/ohos/src/main/libohos_webrtc/.gitignore b/ohos/src/main/libohos_webrtc/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..eadab4e1522296628f32a70228b2c758ecab4759 --- /dev/null +++ b/ohos/src/main/libohos_webrtc/.gitignore @@ -0,0 +1,7 @@ +/node_modules +/oh_modules +/.preview +/build +/.cxx +/.test +/oh-package-lock.json5 \ No newline at end of file diff --git a/ohos/src/main/libohos_webrtc/index.d.ts b/ohos/src/main/libohos_webrtc/index.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..9164e828c019ed25c6325b8554e9e130e5e330b3 --- /dev/null +++ b/ohos/src/main/libohos_webrtc/index.d.ts @@ -0,0 +1,16 @@ +/** + * Copyright (c) 2024 Archermind Technology (Nanjing) Co. Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export * from './webrtc' diff --git a/ohos/src/main/libohos_webrtc/oh-package.json5 b/ohos/src/main/libohos_webrtc/oh-package.json5 new file mode 100644 index 0000000000000000000000000000000000000000..044495fa4a5f44e2a15bd45808c46899ed91e8ad --- /dev/null +++ b/ohos/src/main/libohos_webrtc/oh-package.json5 @@ -0,0 +1,6 @@ +{ + "name": "libohos_webrtc.so", + "types": "./index.d.ts", + "version": "1.0.0", + "description": "Exported ArkTS interface for native c++ webrtc." +} \ No newline at end of file diff --git a/ohos/src/main/libohos_webrtc/webrtc.d.ts b/ohos/src/main/libohos_webrtc/webrtc.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..e1169df823a74588d42ddf41cfb7c7fdf1099e01 --- /dev/null +++ b/ohos/src/main/libohos_webrtc/webrtc.d.ts @@ -0,0 +1,1030 @@ +/** + * Copyright (c) 2024 Archermind Technology (Nanjing) Co. Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export type RTCErrorDetailType = "data-channel-failure" | "dtls-failure" | "fingerprint-failure" | "hardware-encoder-error" | "hardware-encoder-not-available" | "sctp-failure" | "sdp-syntax-error"; +export type RTCIceProtocol = "tcp" | "udp"; +export type RTCIceCandidateType = "host" | "prflx" | "relay" | "srflx"; +export type RTCIceTcpCandidateType = "active" | "passive" | "so"; +export type RTCIceComponent = "rtp" | "rtcp"; +export type RTCIceGathererState = "complete" | "gathering" | "new"; +export type RTCIceTransportState = "checking" | "closed" | "completed" | "connected" | "disconnected" | "failed" | "new"; +export type RTCIceRole = "unknown" | "controlling" | "controlled"; +export type RTCSdpType = 'offer' | 'answer' | "pranswer" | "rollback"; +export type BinaryType = "blob" | "arraybuffer"; +export type DataChannelState = "closed" | "closing" | "connecting" | "open"; +export type RTCDtlsTransportState = "new" | "connecting" | "connected" | "closed" | "failed"; +export type RTCIceGatheringState = "new" | "gathering" | "complete"; +export type RTCIceConnectionState = "checking" | "closed" | "completed" | "connected" | "disconnected" | "failed" | "new"; +export type RTCSignalingState = "closed" | "have-local-offer" | "have-local-pranswer" | "have-remote-offer" | "have-remote-pranswer" | "stable"; +export type RTCPeerConnectionState = "closed" | "connected" | "connecting" | "disconnected" | "failed" | "new"; +export type RTCBundlePolicy = "balanced" | "max-bundle" | "max-compat"; +export type RTCRtcpMuxPolicy = "require"; +export type RTCIceTransportPolicy = "all" | "relay"; +export type DegradationPreference = "balanced" | "maintain-framerate" | "maintain-resolution"; +// export type RTCPriorityType = "high" | "low" | "medium" | "very-low"; +export type RTCRtpTransceiverDirection = "inactive" | "recvonly" | "sendonly" | "sendrecv" | "stopped"; +export type RTCSctpTransportState = "connecting" | "connected" | "closed"; +export type RTCStatsType = "candidate-pair" | "certificate" | "codec" | "data-channel" | "inbound-rtp" | "local-candidate" | "media-playout" | "media-source" | "outbound-rtp" | "peer-connection" | "remote-candidate" | "remote-inbound-rtp" | "remote-outbound-rtp" | "transport"; +export type RTCStatsIceCandidatePairState = "failed" | "frozen" | "in-progress" | "inprogress" | "succeeded" | "waiting"; +export type MediaStreamTrackState = 'live' | 'ended'; +export type MediaDeviceKind = 'audioinput' | 'audiooutput' | 'videoinput'; +export type MediaSourceState = 'initializing' | 'live' | 'ended' | 'muted'; +export type VideoFacingModeEnum = "user" | "environment" | "left" | "right"; +export type VideoResizeModeEnum = "none" | "crop-and-scale"; +export type AudioErrorType = 'init' | 'start-exception' | 'start-state-mismatch' | 'general'; +export type AudioState = 'start' | 'stop'; + +export type AlgorithmIdentifier = Algorithm | string; +export type HighResTimeStamp = number; +export type EpochTimeStamp = number; +export type ConstrainBoolean = boolean | ConstrainBooleanParameters; +export type ConstrainULong = number | ConstrainULongRange; +export type ConstrainDouble = number | ConstrainDoubleRange; +export type ConstrainString = string | string[] | ConstrainStringParameters; + +export interface ULongRange { + max?: number; + min?: number; +} + +export interface DoubleRange { + max?: number; + min?: number; +} + +export interface ConstrainBooleanParameters { + exact?: boolean; + ideal?: boolean; +} + +export interface ConstrainStringParameters { + exact?: string | string[]; + ideal?: string | string[]; +} + +export interface ConstrainDoubleRange extends DoubleRange { + exact?: number; + ideal?: number; +} + +export interface ConstrainULongRange extends ULongRange { + exact?: number; + ideal?: number; +} + +// event +// base class of events +export interface Event { + readonly type: string; +} + +// declare var Event: { +// prototype: Event; +// new(): Event; +// }; + +export interface EventTarget { + // empty for now +} + +// error +// https://www.w3.org/TR/webrtc/#rtcerrorinit-dictionary +export interface RTCErrorInit { + errorDetail: RTCErrorDetailType; + sdpLineNumber?: number; + sctpCauseCode?: number; + receivedAlert?: number; + sentAlert?: number; +} + +// https://www.w3.org/TR/webrtc/#rtcerror-interface +export interface RTCError extends /*DOMException*/ Error { + readonly errorDetail: RTCErrorDetailType; + readonly sdpLineNumber?: number; + readonly sctpCauseCode?: number; + readonly receivedAlert?: number; + readonly sentAlert?: number; +} + +// https://www.w3.org/TR/webrtc/#rtcerrorevent-interface +export interface RTCErrorEvent extends Event { + readonly error: RTCError; +} + +// https://www.w3.org/TR/webrtc/#rtctrackevent +export interface RTCTrackEvent extends Event { + readonly receiver: RTCRtpReceiver; + readonly track: MediaStreamTrack; + readonly streams: ReadonlyArray; + readonly transceiver: RTCRtpTransceiver; +} + +// https://www.w3.org/TR/webrtc/#rtcpeerconnectioniceevent +export interface RTCPeerConnectionIceEvent extends Event { + readonly candidate?: RTCIceCandidate; + readonly url?: string | null; +} + +// https://www.w3.org/TR/webrtc/#rtcpeerconnectioniceerrorevent +export interface RTCPeerConnectionIceErrorEvent extends Event { + readonly address?: string; + readonly port?: number; + readonly url?: string; + readonly errorCode?: number; + readonly errorText?: string; +} + +// https://www.w3.org/TR/webrtc/#rtcdatachannelevent +export interface RTCDataChannelEvent extends Event { + readonly channel: RTCDataChannel; +} + +// https://www.w3.org/TR/webrtc/#rtcdtmftonechangeevent +export interface RTCDTMFToneChangeEvent extends Event { + readonly tone: string; +} + +// https://html.spec.whatwg.org/multipage/comms.html#the-messageevent-interface +export interface MessageEvent extends Event { + readonly data: T; +} + +// https://www.w3.org/TR/mediacapture-streams/#mediastreamtrackevent +export interface MediaStreamTrackEvent extends Event { + readonly track: MediaStreamTrack; +} + +export interface VideoCapturerStartedEvent extends Event { + readonly success: boolean; +} + +// https://www.w3.org/TR/WebCryptoAPI/#algorithm +export interface Algorithm { + name: string; +} + +// https://www.w3.org/TR/webrtc/#dom-rtcrtptransceiverinit +export interface RTCRtpTransceiverInit { + direction?: RTCRtpTransceiverDirection; + streams?: MediaStream[]; + sendEncodings?: RTCRtpEncodingParameters[]; +} + +// https://www.w3.org/TR/webrtc/#dom-rtcsessiondescriptioninit +export interface RTCSessionDescriptionInit { + sdp?: string; + type: RTCSdpType; +} + +// https://www.w3.org/TR/webrtc/#rtcsessiondescription-class +export interface RTCSessionDescription { + readonly sdp: string; + readonly type: RTCSdpType; + + toJSON(): RTCSessionDescriptionInit; +} + +declare var RTCSessionDescription: { + prototype: RTCSessionDescription; + new(descriptionInitDict: RTCSessionDescriptionInit): RTCSessionDescription; +}; + +// https://www.w3.org/TR/webrtc/#dom-rtcicecandidateinit +export interface RTCIceCandidateInit { + candidate: string; + sdpMLineIndex?: number; + sdpMid?: string; + usernameFragment?: string; +} + +// https://www.w3.org/TR/webrtc/#rtcicecandidate-interface +export interface RTCIceCandidate { + readonly candidate: string; + readonly sdpMid?: string; + readonly sdpMLineIndex?: number; + readonly foundation?: string; + readonly component?: RTCIceComponent; + readonly priority?: number; + readonly address?: string; + readonly protocol?: RTCIceProtocol; + readonly port?: number; + readonly type?: RTCIceCandidateType; + readonly tcpType?: RTCIceTcpCandidateType; + readonly relatedAddress?: string; + readonly relatedPort?: number; + readonly usernameFragment?: string; + + toJSON(): RTCIceCandidateInit; +} + +declare var RTCIceCandidate: { + prototype: RTCIceCandidate; + new(candidateInitDict?: RTCIceCandidateInit): RTCIceCandidate; +}; + +// https://www.w3.org/TR/webrtc/#rtcdatachannel +export interface RTCDataChannel { + readonly label: string; + readonly ordered: boolean; + readonly maxPacketLifeTime?: number; + readonly maxRetransmits?: number; + readonly protocol: string; + readonly negotiated: boolean; + readonly id?: number; + readonly readyState: DataChannelState; + readonly bufferedAmount: number; + bufferedAmountLowThreshold: number; + binaryType: BinaryType; + + onbufferedamountlow: ((this: RTCDataChannel, ev: Event) => any) | null; + onclose: ((this: RTCDataChannel, ev: Event) => any) | null; + onclosing: ((this: RTCDataChannel, ev: Event) => any) | null; + onopen: ((this: RTCDataChannel, ev: Event) => any) | null; + onmessage: ((this: RTCDataChannel, ev: MessageEvent) => any) | null; + onerror: ((this: RTCDataChannel, ev: RTCErrorEvent) => any) | null; + + close(): void; + send(data: string): void; + send(data: ArrayBuffer): void; +} + +declare var RTCDataChannel: { + prototype: RTCDataChannel; + new(): RTCDataChannel; +}; + +// https://www.w3.org/TR/webrtc/#dom-rtcdatachannelinit +export interface RTCDataChannelInit { + ordered?: boolean; + maxPacketLifeTime?: number; + maxRetransmits?: number; + protocol?: string; + negotiated?: boolean; + id?: number; +} + +// https://www.w3.org/TR/webrtc/#configuration +export interface RTCConfiguration { + iceServers?: RTCIceServer[]; + iceTransportPolicy?: RTCIceTransportPolicy; + bundlePolicy?: RTCBundlePolicy; + rtcpMuxPolicy?: RTCRtcpMuxPolicy; + certificates?: RTCCertificate[]; + iceCandidatePoolSize?: number; + // tcpCandidatePolicy + // continualGatheringPolicy +} + +// https://www.w3.org/TR/webrtc/#dom-rtciceserver +export interface RTCIceServer { + urls: string | string[]; + username?: string; + credential?: string; +} + +// https://www.w3.org/TR/webrtc/#dom-rtcofferansweroptions +export interface RTCOfferAnswerOptions { +} + +// https://www.w3.org/TR/webrtc/#dom-rtcofferoptions +export interface RTCOfferOptions extends RTCOfferAnswerOptions { + iceRestart?: boolean; +} + +// https://www.w3.org/TR/webrtc/#dom-rtcansweroptions +export interface RTCAnswerOptions extends RTCOfferAnswerOptions { +} + +// https://www.w3.org/TR/webrtc/#rtcpeerconnection-interface +// https://www.w3.org/TR/webrtc/#rtcpeerconnection-interface-extensions +// https://www.w3.org/TR/webrtc/#rtcpeerconnection-interface-extensions-0 +// https://www.w3.org/TR/webrtc/#rtcpeerconnection-interface-extensions-1 +export interface RTCPeerConnection extends EventTarget { + readonly canTrickleIceCandidates?: boolean; + readonly signalingState: RTCSignalingState; + readonly iceGatheringState: RTCIceGatheringState; + readonly iceConnectionState: RTCIceConnectionState; + readonly connectionState: RTCPeerConnectionState; + readonly localDescription?: RTCSessionDescription; + readonly remoteDescription?: RTCSessionDescription; + readonly currentLocalDescription?: RTCSessionDescription; + readonly currentRemoteDescription?: RTCSessionDescription; + readonly pendingLocalDescription?: RTCSessionDescription; + readonly pendingRemoteDescription?: RTCSessionDescription; + readonly sctp?: RTCSctpTransport; + + onnegotiationneeded: ((this: RTCPeerConnection, ev: Event) => any) | null; + onicecandidate: ((this: RTCPeerConnection, ev: RTCPeerConnectionIceEvent) => any) | null; + onicecandidateerror: ((this: RTCPeerConnection, ev: RTCPeerConnectionIceErrorEvent) => any) | null; + oniceconnectionstatechange: ((this: RTCPeerConnection, ev: Event) => any) | null; + onicegatheringstatechange: ((this: RTCPeerConnection, ev: Event) => any) | null; + onsignalingstatechange: ((this: RTCPeerConnection, ev: Event) => any) | null; + onconnectionstatechange: ((this: RTCPeerConnection, ev: Event) => any) | null; + ontrack: ((this: RTCPeerConnection, ev: RTCTrackEvent) => any) | null; + ondatachannel: ((this: RTCPeerConnection, ev: RTCDataChannelEvent) => any) | null; + + addTrack(track: MediaStreamTrack, ...streams: MediaStream[]): RTCRtpSender; + removeTrack(sender: RTCRtpSender): void; + setLocalDescription(description?: RTCSessionDescriptionInit): Promise; + setRemoteDescription(description: RTCSessionDescriptionInit): Promise; + createOffer(options?: RTCOfferOptions): Promise; + createAnswer(options?: RTCAnswerOptions): Promise; + createDataChannel(label: string, dataChannelDict?: RTCDataChannelInit): RTCDataChannel; + addIceCandidate(candidate?: RTCIceCandidateInit): Promise; + getSenders(): RTCRtpSender[]; + getReceivers(): RTCRtpReceiver[]; + getTransceivers(): RTCRtpTransceiver[]; + getConfiguration(): RTCConfiguration; + restartIce(): void; + setConfiguration(configuration?: RTCConfiguration): void; + addTransceiver(trackOrKind: MediaStreamTrack | string, init?: RTCRtpTransceiverInit): RTCRtpTransceiver; + close(): void; + getStats(selector?: MediaStreamTrack): Promise; +} + +declare var RTCPeerConnection: { + prototype: RTCPeerConnection; + new(configuration?: RTCConfiguration): RTCPeerConnection; + // https://www.w3.org/TR/webrtc/#sec.cert-mgmt + generateCertificate(keygenAlgorithm: AlgorithmIdentifier): Promise; +}; + +// https://www.w3.org/TR/webrtc/#rtcrtpreceiver-interface +export interface RTCRtpReceiver { + readonly track: MediaStreamTrack; + readonly transport: RTCDtlsTransport | null; + readonly id: string | null; + + getParameters(): RTCRtpReceiveParameters; + getStats(): Promise; + getContributingSources(): RTCRtpContributingSource[]; + getSynchronizationSources(): RTCRtpSynchronizationSource[]; +} + +declare var RTCRtpReceiver: { + prototype: RTCRtpReceiver; + new(): RTCRtpReceiver; + getCapabilities(kind: string): RTCRtpCapabilities | null; +}; + +// https://www.w3.org/TR/webrtc/#dom-rtcrtpcodingparameters +export interface RTCRtpCodingParameters { + rid?: string; +} + +// https://www.w3.org/TR/webrtc/#dom-rtcrtpencodingparameters +export interface RTCRtpEncodingParameters extends RTCRtpCodingParameters { + active?: boolean; + maxBitrate?: number; + maxFramerate?: number; + scaleResolutionDownBy?: number; + // networkPriority?: RTCPriorityType; + // priority?: RTCPriorityType; +} + +// https://www.w3.org/TR/webrtc/#dom-rtcrtpcodecparameters +export interface RTCRtpCodecParameters { + clockRate: number; + channels?: number; + mimeType: string; + sdpFmtpLine: string; + payloadType: number; +} + +// https://www.w3.org/TR/webrtc/#dom-rtcrtpheaderextensionparameters +export interface RTCRtpHeaderExtensionParameters { + id: number; + uri: string; + encrypted?: boolean; +} + +// https://www.w3.org/TR/webrtc/#dom-rtcrtcpparameters +export interface RTCRtcpParameters { + cname?: string; + reducedSize?: boolean; +} + +// https://www.w3.org/TR/webrtc/#dom-rtcrtpparameters +export interface RTCRtpParameters { + codecs: RTCRtpCodecParameters[]; + headerExtensions: RTCRtpHeaderExtensionParameters[]; + rtcp: RTCRtcpParameters; +} + +// https://www.w3.org/TR/webrtc/#dom-rtcrtpsendparameters +export interface RTCRtpSendParameters extends RTCRtpParameters { + // degradationPreference?: DegradationPreference; + encodings: RTCRtpEncodingParameters[]; + transactionId: string; +} + +// https://www.w3.org/TR/webrtc/#dom-rtcrtpreceiveparameters +export interface RTCRtpReceiveParameters extends RTCRtpParameters { + encodings: RTCRtpEncodingParameters[]; + transactionId: string; +} + +// https://www.w3.org/TR/webrtc/#dom-rtcrtpcontributingsource +export interface RTCRtpContributingSource { + timestamp: HighResTimeStamp; + source: number; + audioLevel?: number; + rtpTimestamp: number; +} + +// https://www.w3.org/TR/webrtc/#dom-rtcrtpsynchronizationsource +export interface RTCRtpSynchronizationSource extends RTCRtpContributingSource { +} + +// https://www.w3.org/TR/webrtc/#rtcrtpsender-interface +// https://www.w3.org/TR/webrtc/#rtcrtpsender-interface-extensions +export interface RTCRtpSender { + readonly track: MediaStreamTrack | null; + readonly transport: RTCDtlsTransport | null; + readonly dtmf: RTCDTMFSender | null; + readonly id: string | null; + + setParameters(parameters: RTCRtpSendParameters): Promise; + getParameters(): RTCRtpSendParameters; + replaceTrack(withTrack: MediaStreamTrack | null): Promise; + setStreams(...streams: MediaStream[]): void; + getStats(): Promise; +} + +declare var RTCRtpSender: { + prototype: RTCRtpSender; + new(): RTCRtpSender; + /** + * get the most optimistic view of the capabilities of the system for sending media of the given kind. + * @param kind 'audio' or 'video'. + * @returns instance of RTCRtpCapabilities, or null if has no capabilities corresponding to the value of the kind argument. + */ + getCapabilities(kind: string): RTCRtpCapabilities | null; +}; + +// https://www.w3.org/TR/webrtc/#dom-rtcrtpcodec +export interface RTCRtpCodec { + payloadType?: number; + mimeType: string; + clockRate: number; + channels?: number; + sdpFmtpLine?: string; +} + +// https://www.w3.org/TR/webrtc/#rtcrtpheaderextensioncapability +export interface RTCRtpHeaderExtensionCapability { + uri: string; +} + +// https://www.w3.org/TR/webrtc/#rtcrtpcapabilities +export interface RTCRtpCapabilities { + codecs: RTCRtpCodec[]; + headerExtensions: RTCRtpHeaderExtensionCapability[]; +} + +// https://www.w3.org/TR/webrtc/#rtcdtmfsender +export interface RTCDTMFSender extends EventTarget { + readonly canInsertDTMF: boolean; + readonly toneBuffer: string; + + ontonechange: ((this: RTCDTMFSender, ev: RTCDTMFToneChangeEvent) => any) | null; + + insertDTMF(tones: string, duration?: number, interToneGap?: number): void; +} + +declare var RTCDTMFSender: { + prototype: RTCDTMFSender; + new(): RTCDTMFSender; +}; + +// https://www.w3.org/TR/webrtc/#rtcrtptransceiver-interface +export interface RTCRtpTransceiver { + readonly mid: string | null; + readonly sender: RTCRtpSender; + readonly receiver: RTCRtpReceiver; + direction: RTCRtpTransceiverDirection; + readonly currentDirection: RTCRtpTransceiverDirection | null; + + stop(): void; + setCodecPreferences(codecs: RTCRtpCodec[]): void; +} + +declare var RTCRtpTransceiver: { + prototype: RTCRtpTransceiver; + new(): RTCRtpTransceiver; +}; + +// https://www.w3.org/TR/webrtc/#rtcdtlstransport-interface +export interface RTCDtlsTransport extends EventTarget { + readonly iceTransport: RTCIceTransport; + readonly state: RTCDtlsTransportState; + + onstatechange: ((this: RTCDtlsTransport, ev: Event) => any) | null; + onerror: ((this: RTCDtlsTransport, ev: RTCErrorEvent) => any) | null; + + getRemoteCertificates(): ArrayBuffer[]; +} + +declare var RTCDtlsTransport: { + prototype: RTCDtlsTransport; + new(): RTCDtlsTransport; +}; + +// https://www.w3.org/TR/webrtc/#rtcdtlsfingerprint +export interface RTCDtlsFingerprint { + algorithm?: string; + value?: string; +} + +// https://www.w3.org/TR/webrtc/#rtccertificate-interface +export interface RTCCertificate { + readonly expires: EpochTimeStamp; + + getFingerprints(): RTCDtlsFingerprint[]; +} + +declare var RTCCertificate: { + prototype: RTCCertificate; + new(): RTCCertificate; +}; + +// https://www.w3.org/TR/webrtc/#rtcicetransport +export interface RTCIceTransport extends EventTarget { + readonly role: RTCIceRole; + readonly component: RTCIceComponent; + readonly state: RTCIceTransportState; + readonly gatheringState: RTCIceGathererState; + + onstatechange: ((this: RTCIceTransport, ev: Event) => any) | null; + ongatheringstatechange: ((this: RTCIceTransport, ev: Event) => any) | null; + onselectedcandidatepairchange: ((this: RTCIceTransport, ev: Event) => any) | null; + + getSelectedCandidatePair(): RTCIceCandidatePair | null; +} + +declare var RTCIceTransport: { + prototype: RTCIceTransport; + new(): RTCIceTransport; +}; + +// https://www.w3.org/TR/webrtc/#rtcicecandidatepair +export interface RTCIceCandidatePair { + local?: RTCIceCandidate; + remote?: RTCIceCandidate; +} + +// https://www.w3.org/TR/webrtc/#rtcsctptransport-interface +export interface RTCSctpTransport extends EventTarget { + readonly maxChannels?: number; + readonly maxMessageSize: number; + readonly state: RTCSctpTransportState; + readonly transport: RTCDtlsTransport; + + onstatechange: ((this: RTCSctpTransport, ev: Event) => any) | null; +} + +declare var RTCSctpTransport: { + prototype: RTCSctpTransport; + new(): RTCSctpTransport; +}; + +// https://www.w3.org/TR/webrtc/#rtcstats-dictionary +export interface RTCStats { + timestamp: HighResTimeStamp; + type: RTCStatsType; + id: string; +} + +// https://www.w3.org/TR/webrtc/#rtcstatsreport-object +export interface RTCStatsReport { + readonly stats: Map; + // readonly timestamp: HighResTimeStamp; // android + // forEach(callback: (value: RTCStats, key: string, parent: RTCStatsReport) => void): void; +} + +// https://www.w3.org/TR/webrtc-stats/#dom-rtctransportstats +export interface RTCTransportStats extends RTCStats { + packetsSent?: number; + packetsReceived?: number; + bytesSent?: number; + bytesReceived?: number; + iceRole?: RTCIceRole; + iceLocalUsernameFragment?: string; + dtlsState: RTCDtlsTransportState; + iceState?: RTCIceTransportState; + selectedCandidatePairId?: string; + localCertificateId?: string; + remoteCertificateId?: string; + tlsVersion?: string; + dtlsCipher?: string; + // dtlsRole?: RTCDtlsRole; + srtpCipher?: string; + selectedCandidatePairChanges: number; +} + +// https://www.w3.org/TR/webrtc-stats/#dom-rtcrtpstreamstats +export interface RTCRtpStreamStats extends RTCStats { + ssrc: number; + kind: string; + transportId?: string; + codecId?: string; +} + +// https://www.w3.org/TR/webrtc-stats/#dom-rtcicecandidatepairstats +export interface RTCIceCandidatePairStats extends RTCStats { + transportId: string; + localCandidateId: string; + remoteCandidateId: string; + state: RTCStatsIceCandidatePairState; + nominated?: boolean; + packetsSent?: number; + packetsReceived?: number; + bytesSent?: number; + bytesReceived?: number; + lastPacketSentTimestamp?: HighResTimeStamp; + lastPacketReceivedTimestamp?: HighResTimeStamp; + totalRoundTripTime?: number; + currentRoundTripTime?: number; + availableOutgoingBitrate?: number; + availableIncomingBitrate?: number; + requestsReceived?: number; + requestsSent?: number; + responsesReceived?: number; + responsesSent?: number; + consentRequestsSent?: number; + packetsDiscardedOnSend?: number; + bytesDiscardedOnSend?: number; +} + +// https://www.w3.org/TR/mediacapture-streams/#media-track-capabilities +export interface MediaTrackCapabilities { + width?: ULongRange; + height?: ULongRange; + aspectRatio?: DoubleRange; + frameRate?: DoubleRange; + facingMode?: string[]; + resizeMode?: string[]; + sampleRate?: ULongRange; + sampleSize?: ULongRange; + echoCancellation?: boolean[]; + autoGainControl?: boolean[]; + noiseSuppression?: boolean[]; + latency?: DoubleRange; + channelCount?: ULongRange; + deviceId?: string; + groupId?: string; +} + +export interface MediaTrackConstraintSet { + width?: ConstrainULong; + height?: ConstrainULong; + aspectRatio?: ConstrainDouble; + frameRate?: ConstrainDouble; + facingMode?: ConstrainString; + resizeMode?: ConstrainString; + sampleRate?: ConstrainULong; + sampleSize?: ConstrainULong; + echoCancellation?: ConstrainBoolean; + autoGainControl?: ConstrainBoolean; + noiseSuppression?: ConstrainBoolean; + latency?: ConstrainDouble; + channelCount?: ConstrainULong; + deviceId?: ConstrainString; + groupId?: ConstrainString; +} + +// https://www.w3.org/TR/mediacapture-streams/#media-track-constraints +export interface MediaTrackConstraints extends MediaTrackConstraintSet { + advanced?: MediaTrackConstraintSet[]; +} + +// https://www.w3.org/TR/mediacapture-streams/#media-track-settings +export interface MediaTrackSettings { + width?: number; + height?: number; + aspectRatio?: number; + frameRate?: number; + facingMode?: string; + resizeMode?: string; + sampleRate?: number; + sampleSize?: number; + echoCancellation?: boolean; + autoGainControl?: boolean; + noiseSuppression?: boolean; + latency?: number; + channelCount?: number; + deviceId?: string; + groupId?: string; +} + +// https://www.w3.org/TR/mediacapture-streams/#media-track-supported-constraints +export interface MediaTrackSupportedConstraints { + width?: boolean; + height?: boolean; + aspectRatio?: boolean; + frameRate?: boolean; + facingMode?: boolean; + resizeMode?: boolean; + sampleRate?: boolean; + sampleSize?: boolean; + echoCancellation?: boolean; + autoGainControl?: boolean; + noiseSuppression?: boolean; + latency?: boolean; + channelCount?: boolean; + deviceId?: boolean; + groupId?: boolean; +} + +export interface MediaStreamConstraints { + // default is false + video?: boolean | MediaTrackConstraints; + // default is false + audio?: boolean | MediaTrackConstraints; +} + +// https://www.w3.org/TR/screen-capture/#displaymediastreamoptions +export interface DisplayMediaStreamOptions { + video?: boolean | MediaTrackConstraints; + audio?: boolean | MediaTrackConstraints; +} + +export interface MediaSource { + readonly state: MediaSourceState; +} + +export interface AudioSource extends MediaSource { + setVolume(volume: number); +} + +export interface VideoSource extends MediaSource { + oncapturerstarted: ((this: VideoSource, ev: VideoCapturerStartedEvent) => any) | null; + oncapturerstopped: ((this: VideoSource, ev: Event) => any) | null; +} + +// https://www.w3.org/TR/mediacapture-streams/#mediastreamtrack +export interface MediaStreamTrack extends EventTarget { + readonly kind: string; + readonly id: string; + enabled: boolean; + readonly readyState: MediaStreamTrackState; + + stop(): void; +} + +export declare var MediaStreamTrack: { + prototype: MediaStreamTrack; + new(): MediaStreamTrack; +}; + +export interface AudioTrack extends MediaStreamTrack { +} + +export interface VideoTrack extends MediaStreamTrack { +} + +// https://www.w3.org/TR/mediacapture-streams/#mediastream +export interface MediaStream extends EventTarget { + readonly id: string; + readonly active: boolean; + + addTrack(track: MediaStreamTrack): void; + removeTrack(track: MediaStreamTrack): void; + getTrackById(trackId: string): MediaStreamTrack | null; + getTracks(): MediaStreamTrack[]; + getAudioTracks(): MediaStreamTrack[]; + getVideoTracks(): MediaStreamTrack[]; +} + +declare var MediaStream: { + prototype: MediaStream; + new(): MediaStream; + new(stream: MediaStream): MediaStream; + new(tracks: MediaStreamTrack[]): MediaStream; +}; + +export interface MediaDeviceInfo { + readonly deviceId: string; + readonly kind:MediaDeviceKind; + readonly label: string; + readonly groupId: string; +} + +export interface DeviceChangeEvent extends Event { + readonly devices: ReadonlyArray; + readonly userInsertedDevices: ReadonlyArray; +} + +// https://www.w3.org/TR/mediacapture-streams/#mediadevices +export interface MediaDevices extends EventTarget { + enumerateDevices(): Promise; + getSupportedConstraints(): MediaTrackSupportedConstraints; + getUserMedia(constraints?: MediaStreamConstraints): Promise; + getDisplayMedia(options?: DisplayMediaStreamOptions): Promise; +} + +declare var MediaDevices: { + prototype: MediaDevices; + new(): MediaDevices; +}; + +export interface NativeVideoRenderer { + readonly surfaceId?: string; + readonly videoTrack?: MediaStreamTrack; + + init(surfaceId: string): void; + setVideoTrack(videoTrack: MediaStreamTrack | null): void; + setMirror(mirrorHorizontally: boolean): void; + setMirrorVertically(mirrorVertically: boolean): void; + setScalingMode(mode: number): void; + release(): void; + + onFrameResolutionChanged: ((this: NativeVideoRenderer, ev: Event) => any) | null; +} + +declare var NativeVideoRenderer: { + prototype: NativeVideoRenderer; + new(): NativeVideoRenderer; +}; + +export interface AudioError extends Error { + readonly type: AudioErrorType; +} + +export interface AudioErrorEvent extends Event { + readonly error: AudioError; +} + +export interface AudioStateChangeEvent extends Event { + readonly state: AudioState; +} + +export interface AudioCapturerSamplesReadyEvent extends Event { + readonly samples: AudioSamples; +} + +export interface AudioDeviceModuleOptions { + // input source. see ohos.multimedia.audio.SourceType, default is SOURCE_TYPE_VOICE_COMMUNICATION. + audioSource?: number; + + // input format. see ohos.multimedia.audio.AudioSampleFormat, default SAMPLE_FORMAT_S16LE. + audioFormat?: number; + + // input sample rate, default is 48000. + inputSampleRate?: number; + + // Control if stereo input should be used or not. The default is mono. + useStereoInput?: boolean; + + // output sample rate, default is 48000. + outputSampleRate?: number; + + // Control if stereo output should be used or not. The default is mono. + useStereoOutput?: boolean; + + // output audio usage. see ohos.multimedia.audio.StreamUsage, default is STREAM_USAGE_VOICE_COMMUNICATION + rendererUsage?: number; + + // enable low latency capturing and rendering, default is false + useLowLatency?: boolean; + + // Control if the built-in HW acoustic echo canceler should be used or not, default is false. + // It is possible to query support by calling AudioDeviceModule.isBuiltInAcousticEchoCancelerSupported() + useHardwareAcousticEchoCanceler?: boolean; + + // Control if the built-in HW noise suppressor should be used or not, default is false. + // It is possible to query support by calling AudioDeviceModule.isBuiltInNoiseSuppressorSupported() + useHardwareNoiseSuppressor?: boolean; +} + +export interface AudioSamples { + // See ohos.multimedia.audio.AudioSampleFormat + readonly audioFormat: number; + + // See ohos.multimedia.audio.AudioChannel + readonly channelCount: number; + + // See ohos.multimedia.audio.AudioSamplingRate + readonly sampleRate: number; + + // audio data + readonly data: ArrayBuffer; +} + +export interface AudioDeviceModule { + oncapturererror: ((this: any, event: AudioErrorEvent) => void) | null; + oncapturerstatechange: ((this: any, event: AudioStateChangeEvent) => void) | null; + // Called when new audio samples are ready. This should only be set for debug purposes + oncapturersamplesready: ((this: any, event: AudioCapturerSamplesReadyEvent) => void) | null; + onrenderererror: ((this: any, event: AudioErrorEvent) => void) | null; + onrendererstatechange: ((this: any, event: AudioStateChangeEvent) => void) | null; + + setSpeakerMute(mute: boolean): void; + setMicrophoneMute(mute: boolean): void; + setNoiseSuppressorEnabled(enabled: boolean): boolean; +} + +declare var AudioDeviceModule: { + prototype: AudioDeviceModule; + new(options?: AudioDeviceModuleOptions): AudioDeviceModule; + isBuiltInAcousticEchoCancelerSupported(): boolean; + isBuiltInNoiseSuppressorSupported(): boolean; +}; + +// Hold a native webrtc.AudioProcessing instance +export interface AudioProcessing {} + +export interface AudioProcessingFactory { + create(): AudioProcessing; +} + +declare var AudioProcessingFactory: { + prototype: AudioProcessingFactory; + new(): AudioProcessingFactory; +}; + +export interface VideoEncoderFactory {} + +export interface VideoDecoderFactory {} + +export interface HardwareVideoEncoderFactory extends VideoEncoderFactory { + readonly enableH264HighProfile: boolean; +} + +declare var HardwareVideoEncoderFactory: { + prototype: HardwareVideoEncoderFactory; + new(enableH264HighProfile?: boolean): HardwareVideoEncoderFactory; +}; + +export interface SoftwareVideoEncoderFactory extends VideoEncoderFactory { +} + +declare var SoftwareVideoEncoderFactory: { + prototype: SoftwareVideoEncoderFactory; + new(): SoftwareVideoEncoderFactory; +}; + +export interface HardwareVideoDecoderFactory extends VideoDecoderFactory { +} + +declare var HardwareVideoDecoderFactory: { + prototype: HardwareVideoDecoderFactory; + new(): HardwareVideoDecoderFactory; +}; + +export interface SoftwareVideoDecoderFactory extends VideoDecoderFactory { +} + +declare var SoftwareVideoDecoderFactory: { + prototype: SoftwareVideoDecoderFactory; + new(): SoftwareVideoDecoderFactory; +}; + +export interface PeerConnectionFactoryOptions { + adm?: AudioDeviceModule; + videoEncoderFactory?: VideoEncoderFactory; + videoDecoderFactory?: VideoDecoderFactory; + audioProcessing?: AudioProcessing; +} + +export interface PeerConnectionFactory { + createPeerConnection(config: RTCConfiguration): RTCPeerConnection; + createAudioSource(constraints?: MediaTrackConstraints): AudioSource; + createAudioTrack(id: string, source: AudioSource): AudioTrack; + createVideoSource(constraints?: MediaTrackConstraints, isScreencast?: boolean): VideoSource; + createVideoTrack(id: string, source: VideoSource): VideoTrack; + startAecDump(fd: number, max_size_bytes: number): boolean; + stopAecDump(): void; +} + +declare var PeerConnectionFactory: { + prototype: PeerConnectionFactory; + new(options?: PeerConnectionFactoryOptions): PeerConnectionFactory; + getDefault(): PeerConnectionFactory; + setDefault(factory: PeerConnectionFactory): void; +}; + +export interface Loggable { + logMessage(message: string, severity: number, tag: string): void; +} + +export class NativeLogging { + static injectLoggable(loggable: Loggable, severity: number): void; + static deleteLoggable(): void; + static enableLogToDebugOutput(severity): void; + static enableLogThreads(): void; + static enableLogTimeStamps(): void; + static log(message: string, severity: number, tag: string): void; +} diff --git a/ohos/src/main/module.json5 b/ohos/src/main/module.json5 new file mode 100644 index 0000000000000000000000000000000000000000..a635a9a3726beff264cdd90a532fe6987f8a1578 --- /dev/null +++ b/ohos/src/main/module.json5 @@ -0,0 +1,24 @@ +{ + "module": { + "name": "flutter_webrtc", + "type": "har", + "deviceTypes": [ + "default", + "tablet" + ], + "requestPermissions": [ + { + "name" : "ohos.permission.INTERNET", + "reason": "$string:internet" + }, + { + "name" : "ohos.permission.CAMERA", + "reason": "$string:camera" + }, + { + "name": "ohos.permission.MICROPHONE", + "reason": "$string:microphone" + } + ] + } +} \ No newline at end of file diff --git a/ohos/src/main/resources/base/element/string.json b/ohos/src/main/resources/base/element/string.json new file mode 100644 index 0000000000000000000000000000000000000000..b445cdb4712d69fcd45f832a20ed8c4d71fb2a79 --- /dev/null +++ b/ohos/src/main/resources/base/element/string.json @@ -0,0 +1,32 @@ +{ + "string": [ + { + "name": "page_show", + "value": "page from package" + }, + { + "name": "internet", + "value": "internet" + }, + { + "name": "camera", + "value": "camera" + }, + { + "name": "microphone", + "value": "microphone" + }, + { + "name": "reason_media_location", + "value": "For third-party camera to access media information scenarios" + }, + { + "name": "reason_write_imagevideo", + "value": "For third-party camera to save media files" + }, + { + "name": "reason_read_imagevideo", + "value": "For third-party camera to read media files" + } + ] +} diff --git a/ohos/src/main/resources/en_US/element/string.json b/ohos/src/main/resources/en_US/element/string.json new file mode 100644 index 0000000000000000000000000000000000000000..f51a9c8461a55f6312ef950344e3145b7f82d607 --- /dev/null +++ b/ohos/src/main/resources/en_US/element/string.json @@ -0,0 +1,8 @@ +{ + "string": [ + { + "name": "page_show", + "value": "page from package" + } + ] +} diff --git a/ohos/src/main/resources/zh_CN/element/string.json b/ohos/src/main/resources/zh_CN/element/string.json new file mode 100644 index 0000000000000000000000000000000000000000..f51a9c8461a55f6312ef950344e3145b7f82d607 --- /dev/null +++ b/ohos/src/main/resources/zh_CN/element/string.json @@ -0,0 +1,8 @@ +{ + "string": [ + { + "name": "page_show", + "value": "page from package" + } + ] +} diff --git a/ohos/src/test/List.test.ets b/ohos/src/test/List.test.ets new file mode 100644 index 0000000000000000000000000000000000000000..bb5b5c3731e283dd507c847560ee59bde477bbc7 --- /dev/null +++ b/ohos/src/test/List.test.ets @@ -0,0 +1,5 @@ +import localUnitTest from './LocalUnit.test'; + +export default function testsuite() { + localUnitTest(); +} \ No newline at end of file diff --git a/ohos/src/test/LocalUnit.test.ets b/ohos/src/test/LocalUnit.test.ets new file mode 100644 index 0000000000000000000000000000000000000000..165fc1615ee8618b4cb6a622f144a9a707eee99f --- /dev/null +++ b/ohos/src/test/LocalUnit.test.ets @@ -0,0 +1,33 @@ +import { describe, beforeAll, beforeEach, afterEach, afterAll, it, expect } from '@ohos/hypium'; + +export default function localUnitTest() { + describe('localUnitTest', () => { + // Defines a test suite. Two parameters are supported: test suite name and test suite function. + beforeAll(() => { + // Presets an action, which is performed only once before all test cases of the test suite start. + // This API supports only one parameter: preset action function. + }); + beforeEach(() => { + // Presets an action, which is performed before each unit test case starts. + // The number of execution times is the same as the number of test cases defined by **it**. + // This API supports only one parameter: preset action function. + }); + afterEach(() => { + // Presets a clear action, which is performed after each unit test case ends. + // The number of execution times is the same as the number of test cases defined by **it**. + // This API supports only one parameter: clear action function. + }); + afterAll(() => { + // Presets a clear action, which is performed after all test cases of the test suite end. + // This API supports only one parameter: clear action function. + }); + it('assertContain', 0, () => { + // Defines a test case. This API supports three parameters: test case name, filter parameter, and test case function. + let a = 'abc'; + let b = 'b'; + // Defines a variety of assertion methods, which are used to declare expected boolean conditions. + expect(a).assertContain(b); + expect(a).assertEqual(a); + }); + }); +} \ No newline at end of file diff --git a/pubspec.yaml b/pubspec.yaml index 8eafa254daf0224ccfb2b025079700cca49e1a48..a9cd27c04cc64a4d35bc8c8aca832f1d718a1274 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -6,13 +6,18 @@ environment: sdk: '>=2.12.0 <4.0.0' flutter: '>=1.22.0' +publish_to: none dependencies: - collection: ^1.17.0 - dart_webrtc: ^1.2.0 +# collection: ^1.17.0 + dart_webrtc: 1.2.0 + flutter: sdk: flutter - path_provider: ^2.0.2 - webrtc_interface: ^1.1.2 + path_provider: + git: + url: "https://gitee.com/openharmony-sig/flutter_packages.git" + path: "packages/path_provider/path_provider" + webrtc_interface: 1.1.2 # 最新1.2.1版本移除了VideoRenderer会导致报错,故锁定版本 dev_dependencies: flutter_test: @@ -22,6 +27,10 @@ dev_dependencies: pedantic: ^1.11.1 test: any +dependency_overrides: + meta: ^1.11.0 # 临时解决dart-flutter1.2.0版本与flutter-SDK依赖mate版本冲突 + collection: 1.17.1 + flutter: plugin: platforms: @@ -36,3 +45,5 @@ flutter: pluginClass: FlutterWebRTCPlugin linux: pluginClass: FlutterWebRTCPlugin + ohos: + pluginClass: FlutterWebRTCPlugin