Skip to content

Instantly share code, notes, and snippets.

@Gavin0x0
Created February 9, 2023 12:03
Show Gist options
  • Save Gavin0x0/416841209bd185fa8daffa81159e04dd to your computer and use it in GitHub Desktop.
Save Gavin0x0/416841209bd185fa8daffa81159e04dd to your computer and use it in GitHub Desktop.
import 'dart:io';
import 'package:flutter/foundation.dart';
import 'package:flutter/gestures.dart';
import 'package:flutter/material.dart';
import 'package:flutter/rendering.dart';
import 'package:flutter/services.dart';
import 'trtc_cloud_def.dart';
/// @nodoc
/// MethodChannel identifier
String channelType = TRTCCloudDef.TRTC_VideoView_TextureView;
/// @nodoc
/// Flutter Android support two integration modes:Virtual displays and Hybrid composition.
String viewMode = TRTCCloudDef.TRTC_VideoView_Model_Virtual;
/// Video view window, which displays the local video, remote video, or substream
///
/// Parameters:
///
/// onViewCreated: `viewId` generated by callback for view creation
///
/// key: Widget key, which can be left empty
///
/// viewType: this parameter takes effect only for Android and can be left empty
///
/// The type of view component used for Android video rendering. There are two components to choose from: `SurfaceView` and `TextureView` (default value)
///
/// If you want to use `TextureView` for rendering, pass in `TRTCCloudDef.TRTC_VideoView_TextureView` for `viewType`
///
/// If you want to use `SurfaceView` for rendering, pass in `TRTCCloudDef.TRTC_VideoView_SurfaceView` for `viewType`
class TRTCCloudVideoView extends StatefulWidget {
final ValueChanged<int>? onViewCreated;
final String? viewType;
final String? viewMode;
final CustomRender? textureParam;
final Set<Factory<OneSequenceGestureRecognizer>>? gestureRecognizers;
final PlatformViewHitTestBehavior? hitTestBehavior;
const TRTCCloudVideoView(
{Key? key,
this.viewType,
this.viewMode,
this.textureParam,
this.onViewCreated,
this.hitTestBehavior,
this.gestureRecognizers})
: super(key: key);
@override
State<StatefulWidget> createState() =>
TRTCCloudVideoViewState(this.viewType, this.viewMode, this.textureParam);
}
//// @nodoc
class TRTCCloudVideoViewState extends State<TRTCCloudVideoView> {
/// 主视频流ID
int? _textureId;
// /// 子视频流ID
int? _subTextureId;
CustomRender? _textureParam;
/// 子窗口偏移量
Offset _offset = Offset(0, 0);
TRTCCloudVideoViewState(
String? viewType, String? mode, CustomRender? textureParam) {
_textureParam = textureParam;
if (viewType != null) {
channelType = viewType;
}
if (mode != null) {
viewMode = mode;
}
if (kIsWeb ||
(Platform.isIOS &&
viewType == TRTCCloudDef.TRTC_VideoView_SurfaceView)) {
// iOS not support TRTC_VideoView_SurfaceView
channelType = TRTCCloudDef.TRTC_VideoView_TextureView;
}
if (!kIsWeb && (Platform.isWindows || Platform.isMacOS)) {
// MAC / Windows only supports texture rendering
channelType = TRTCCloudDef.TRTC_VideoView_Texture;
}
}
@override
void didUpdateWidget(TRTCCloudVideoView oldWidget) {
super.didUpdateWidget(oldWidget);
if (widget.textureParam != null && Platform.isAndroid) {
if (widget.textureParam!.width != oldWidget.textureParam!.width ||
widget.textureParam!.height != oldWidget.textureParam!.height) {
if (widget.textureParam!.isLocal) {
////Update the width and height when the width and height change. In order to ensure no deformation, there may be black edges. If you don't want black edges, you can call `setVideoEncoderParam` to set the resolution close to the width and height
MethodChannel('trtcCloudChannel').invokeMethod(
'updateLocalVideoRender', {
"width": widget.textureParam!.width,
"height": widget.textureParam!.height
});
} else {
MethodChannel('trtcCloudChannel')
.invokeMethod('updateRemoteVideoRender', {
"textureID": _textureId,
"width": widget.textureParam!.width,
"height": widget.textureParam!.height
});
}
}
}
}
@override
void initState() {
super.initState();
print("🚀 initState TRTCCloudVideoView viewType : [${widget.viewType}]");
if (channelType == TRTCCloudDef.TRTC_VideoView_Texture &&
_textureParam != null) {
print("🚀 初始化 Texture 数据通道 用于 macOS/Windows");
print(
"🚀 纹理参数: ${_textureParam!.width}X${_textureParam!.height} type: ${_textureParam!.streamType}");
if (_textureParam!.isLocal) {
MethodChannel('trtcCloudChannel')
.invokeMethod('setLocalVideoRenderListener', {
"userId": _textureParam!.userId,
"isFront":
_textureParam!.isFront == null ? true : _textureParam!.isFront,
"streamType": _textureParam!.streamType,
"width": _textureParam!.width,
"height": _textureParam!.height
}).then((value) => {
print(
"🚀 setLocalVideoRenderListener 设置本地视频的自定义渲染回调 获取纹理 $value 视频分辨率 ${_textureParam!.width}X${_textureParam!.height}"),
setState(() {
_textureId = value;
})
});
} else {
/// 开启混流副窗口视频渲染
MethodChannel('trtcCloudChannel')
.invokeMethod('setRemoteMixVideoRenderListener', {
"userId": _textureParam!.userId,
"streamType": _textureParam!.streamType,
"width": _textureParam!.width,
"height": _textureParam!.height
}).then((value) {
print(
"🚀 setRemoteVideoRenderListener 设置远端副窗口视频的自定义渲染回调 获取纹理 $value 视频分辨率 ${_textureParam!.width}X${_textureParam!.height}");
/// value 的值为两个int 拼接的str 例如:23 & 24
/// 将value拆分为两个纹理ID
int firstId = int.parse(value.split("&")[0]);
int secondId = int.parse(value.split("&")[1]);
setState(() {
_textureId = firstId;
_subTextureId = secondId;
});
});
}
return;
}
}
@override
void dispose() {
super.dispose();
if (channelType == TRTCCloudDef.TRTC_VideoView_Texture &&
_textureId != null) {
MethodChannel('trtcCloudChannel')
.invokeMethod('unregisterTexture', {"textureID": _textureId});
}
if (channelType == TRTCCloudDef.TRTC_VideoView_Texture &&
_subTextureId != null) {
MethodChannel('trtcCloudChannel')
.invokeMethod('unregisterTexture', {"textureID": _subTextureId});
}
}
@override
Widget build(BuildContext context) {
print(
"🚀 build TRTCCloudVideoView channelType 视图类型: [$channelType] viewMode 视图模式: [$viewMode]");
// 纹理渲染。默认情况下,推拉流程会启动,不会触发 onviewcreated 回调。 MacOS 和 Windows 只支持这种渲染
if (channelType == TRTCCloudDef.TRTC_VideoView_Texture) {
if (_textureId != null) {
return Stack(children: [
Texture(textureId: _textureId!),
if (_subTextureId != null)
Positioned(
left: _offset.dx,
top: _offset.dy,
child: Draggable(
child: _buildSubWindow(_textureId!),
childWhenDragging: Container(),
feedback: _buildSubWindow(_textureId!),
onDragUpdate: (details) {
setState(() {
_offset = Offset(_offset.dx + details.delta.dx,
_offset.dy + details.delta.dy);
});
},
),
)
]);
}
return Container();
}
if (kIsWeb) {
print("🚀 桌面浏览器 使用 PlatformViewLink");
return PlatformViewLink(
viewType: channelType,
surfaceFactory:
(BuildContext context, PlatformViewController controller) {
return PlatformViewSurface(
controller: controller,
hitTestBehavior: PlatformViewHitTestBehavior.transparent,
gestureRecognizers: widget.gestureRecognizers != null
? widget.gestureRecognizers!
: const <Factory<OneSequenceGestureRecognizer>>{},
);
},
onCreatePlatformView: (PlatformViewCreationParams params) {
final controller =
_HtmlElementViewController(params.id, params.viewType);
controller._initialize().then((_) {
params.onPlatformViewCreated(params.id);
_onPlatformViewCreated(params.id);
});
return controller;
},
);
} else if (Platform.isAndroid) {
if (viewMode == TRTCCloudDef.TRTC_VideoView_Model_Virtual) {
print("🚀 安卓 使用 AndroidView");
return AndroidView(
hitTestBehavior: widget.hitTestBehavior == null
? PlatformViewHitTestBehavior.opaque
: widget.hitTestBehavior!,
viewType: channelType,
onPlatformViewCreated: _onPlatformViewCreated,
gestureRecognizers: widget.gestureRecognizers,
);
} else {
print("🚀 使用 PlatformViewLink");
return PlatformViewLink(
viewType: channelType,
surfaceFactory:
(BuildContext context, PlatformViewController controller) {
return PlatformViewSurface(
controller: controller as AndroidViewController,
hitTestBehavior: PlatformViewHitTestBehavior.transparent,
gestureRecognizers: widget.gestureRecognizers != null
? widget.gestureRecognizers!
: const <Factory<OneSequenceGestureRecognizer>>{},
);
},
onCreatePlatformView: (PlatformViewCreationParams params) {
return PlatformViewsService.initSurfaceAndroidView(
id: params.id,
viewType: channelType,
layoutDirection: TextDirection.ltr,
creationParamsCodec: StandardMessageCodec(),
)
..addOnPlatformViewCreatedListener((id) {
params.onPlatformViewCreated(id);
_onPlatformViewCreated(id);
})
..create();
},
);
}
} else if (Platform.isIOS) {
return UiKitView(
hitTestBehavior: widget.hitTestBehavior == null
? PlatformViewHitTestBehavior.opaque
: widget.hitTestBehavior!,
viewType: channelType,
onPlatformViewCreated: _onPlatformViewCreated,
gestureRecognizers: widget.gestureRecognizers,
);
} else {
return Center(
child: Text(
"This platform does not support `Platform View`",
style: TextStyle(color: Colors.red, fontWeight: FontWeight.bold),
),
);
}
}
Widget _buildSubWindow(int textureId) {
return Container(
child: SizedBox(
width: 360,
height: 780,
child: Column(
children: [
Expanded(
child: ClipRRect(
clipper: LeftClipper(),
child: Texture(textureId: textureId),
),
),
Expanded(
child: Transform.translate(
offset: Offset(-180, -465),
child: ClipRRect(
clipper: RightClipper(),
child: Texture(textureId: textureId),
),
),
),
],
)),
);
}
void _onPlatformViewCreated(int id) {
widget.onViewCreated!(id);
}
}
class LeftClipper extends CustomClipper<RRect> {
@override
RRect getClip(Size size) {
final rrect = RRect.fromLTRBAndCorners(
0.0,
250,
size.width / 2,
size.height - 50,
bottomRight: Radius.circular(16.0),
bottomLeft: Radius.circular(16.0),
);
return rrect;
}
@override
bool shouldReclip(CustomClipper<RRect> oldClipper) => false;
}
class RightClipper extends CustomClipper<RRect> {
@override
RRect getClip(Size size) {
final rrect = RRect.fromLTRBAndCorners(
size.width / 2,
230,
size.width,
size.height - 60,
topRight: Radius.circular(16.0),
topLeft: Radius.circular(16.0),
);
return rrect;
}
@override
bool shouldReclip(CustomClipper<RRect> oldClipper) => false;
}
class _HtmlElementViewController extends PlatformViewController
with WidgetsBindingObserver {
_HtmlElementViewController(
this.viewId,
this.viewType,
);
@override
final int viewId;
/// The unique identifier for the HTML view type to be embedded by this widget.
///
/// A PlatformViewFactory for this type must have been registered.
final String viewType;
bool _initialized = false;
Future<void> _initialize() async {
final args = <String, dynamic>{
'id': viewId,
'viewType': viewType,
};
await SystemChannels.platform_views.invokeMethod<void>('create', args);
_initialized = true;
}
@override
Future<void> clearFocus() async {
// Currently this does nothing on Flutter Web.
// Implement this. See https://github.com/flutter/flutter/issues/39496
}
@override
Future<void> dispatchPointerEvent(PointerEvent event) async {
// We do not dispatch pointer events to HTML views because they may contain
// cross-origin iframes, which only accept user-generated events.
}
@override
Future<void> dispose() async {
if (_initialized) {
await SystemChannels.platform_views.invokeMethod<void>('dispose', viewId);
}
}
}
/// @nodoc
class TRTCCloudVideoViewController {
TRTCCloudVideoViewController(int id)
: _channel = new MethodChannel(channelType + '_$id');
final MethodChannel _channel;
/// Enable the preview image of local video
///
/// When the first camera video frame starts to be rendered, you will receive the `onFirstVideoFrame(null)` callback in `TRTCCloudListener`.
///
/// Parameters:
///
/// frontCamera true: front camera; false: rear camera.
Future<void> startLocalPreview(
bool frontCamera, // true: front camera; false: rear camera.
) {
print("🚀 本地视频预览开启");
return _channel.invokeMethod('startLocalPreview', {
"frontCamera": frontCamera,
});
}
/// Update the preview image of local video
///
/// Parameters:
///
/// viewId Control that carries the video image
Future<void> updateLocalView(int viewId) {
return _channel.invokeMethod('updateLocalView', {
"viewId": viewId,
});
}
/// Update the window of remote video image
///
/// Parameters:
///
/// userId `userId` of the specified remote user
///
/// streamType: video stream type of the `userId` specified for watching
///
/// viewId Control that carries the video image
Future<void> updateRemoteView(String userId, int streamType, int viewId) {
return _channel.invokeMethod(
'updateRemoteView',
{"viewId": viewId, "streamType": streamType, "userId": userId},
);
}
/// Display remote video image or substream
///
/// Parameters:
///
/// userId `userId` of the specified remote user
///
/// streamType: video stream type of the `userId` specified for watching:
///
///* HD big image:TRTCCloudDef.TRTC_VIDEO_STREAM_TYPE_BIG
///
///* Smooth big image:TRTCCloudDef.TRTC_VIDEO_STREAM_TYPE_SMALL
///
///* Substream (screen sharing): TRTCCloudDe.TRTC_VIDEO_STREAM_TYPE_SUB
Future<void> startRemoteView(String userId, int streamType) {
print("🚀 远程视频预览开启 通道: $channelType");
return _channel.invokeMethod(
'startRemoteView', {"userId": userId, "streamType": streamType});
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment