Created
August 29, 2024 19:40
-
-
Save tqk2811/29b4a8567d11d5e4cad0d41f1bfcf2ae to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
try | |
{ | |
VideoTextVM.RenderWorkStatus = RenderWorkStatus.Working; | |
if (string.IsNullOrWhiteSpace(VideoOutputConfigure.SaveDir)) | |
throw new InvalidOperationException("Chưa chọn thư mục đầu ra"); | |
Directory.CreateDirectory(VideoTextVM.TmpDir); | |
FFmpegArg ffmpegArg = new FFmpegArg().OverWriteOutput().VSync(VSyncMethod.cfr); | |
FileInfo fileInfo = new FileInfo(this.VideoTextVM.Data.VideoPath); | |
IMediaAnalysis mediaAnalysis = await FFProbe.AnalyseAsync(this.VideoTextVM.Data.VideoPath); | |
if (mediaAnalysis.PrimaryAudioStream is null || mediaAnalysis.PrimaryVideoStream is null) | |
throw new InvalidOperationException($"Đầu vào không có audio hoặc video"); | |
VideoFileInput baseVideoFileInput = new VideoFileInput(this.VideoTextVM.Data.VideoPath); | |
TimeSpan totalTime = mediaAnalysis.PrimaryVideoStream.Duration; | |
if (VideoTextConfigure.VideoSkip != TimeSpan.Zero || mediaAnalysis.PrimaryVideoStream.StartTime != TimeSpan.Zero) | |
{ | |
totalTime -= VideoTextConfigure.VideoSkip; | |
//fix video duration | |
baseVideoFileInput.Ss(VideoTextConfigure.VideoSkip + mediaAnalysis.PrimaryVideoStream.StartTime); | |
} | |
if (_isDemo) | |
{ | |
totalTime = Setting.DemoTime > totalTime ? totalTime : Setting.DemoTime; | |
baseVideoFileInput.T(totalTime); | |
} | |
VideoMap videoMap = ffmpegArg.AddVideoInput(baseVideoFileInput); | |
AudioMap audioMap = videoMap.AudioMaps.First(); | |
if (VideoTextConfigure.PitchFactor != 1.0) | |
{ | |
using AudioFileReader audioFileReader = new AudioFileReader(this.VideoTextVM.Data.VideoPath); | |
SMBPitchShiftingSampleProvider smb = new SMBPitchShiftingSampleProvider(audioFileReader); | |
smb.PitchFactor = (float)VideoTextConfigure.PitchFactor; | |
string filePath = Path.Combine(VideoTextVM.TmpDir, "PitchFactor.mp3"); | |
WaveFileWriter.CreateWaveFile(filePath, new SampleToWaveProvider16(smb)); | |
AudioFileInput audioFileInput = new AudioFileInput(filePath); | |
audioMap = ffmpegArg.AddAudiosInput(audioFileInput).First(); | |
audioMap = audioMap | |
.AsetptsFilter("PTS-STARTPTS") | |
.MapOut; | |
} | |
Rectangle cropSize = new Rectangle( | |
VideoTextConfigure.CropPositionX, | |
VideoTextConfigure.CropPositionY, | |
Math.Min(VideoTextConfigure.CropPositionW, mediaAnalysis.PrimaryVideoStream.Width - VideoTextConfigure.CropPositionX) / 2 * 2, | |
Math.Min(VideoTextConfigure.CropPositionH, mediaAnalysis.PrimaryVideoStream.Height - VideoTextConfigure.CropPositionY) / 2 * 2 | |
); | |
string WorkingDir; | |
ImageMap alphaImageMap; | |
ImageMap? borderImageMap = null; | |
switch (base._configureVM.VideoTextConfigure.FilterColorType) | |
{ | |
case FilterColorType.Hsv: | |
{ | |
WorkingDir = VideoTextVM.TmpDir; | |
{ | |
var gray_files = Directory.GetFiles(VideoTextVM.TmpDir, "o*.png"); | |
if (gray_files.Length == 0) | |
throw new InvalidOperationException("Không thấy file ảnh, hãy chạy xử lý opencv trước"); | |
string txtName = "VideoConcatDemux.txt"; | |
StringBuilder stringBuilder = new StringBuilder(); | |
gray_files | |
.Select(x => new FileInfo(x).Name) | |
.ForEach(x => stringBuilder.AppendLine($"file '{x}'")); | |
File.WriteAllText(Path.Combine(VideoTextVM.TmpDir, txtName), stringBuilder.ToString()); | |
ImageFilesConcatInput opencvVideoCropFileInput = ImageFilesConcatInput.ConcatDemuxer(txtName); | |
opencvVideoCropFileInput.ImageInputAVStream.R(mediaAnalysis.PrimaryVideoStream.FrameRate); | |
if (VideoTextConfigure.VideoSkip != TimeSpan.Zero) | |
{ | |
opencvVideoCropFileInput.Ss(VideoTextConfigure.VideoSkip); | |
} | |
opencvVideoCropFileInput.T(totalTime); | |
alphaImageMap = ffmpegArg.AddImagesInput(opencvVideoCropFileInput).First() | |
.FormatFilter(PixFmt.bgra) | |
.MapOut | |
.ColorKeyFilter() | |
.Color(Color.Black) | |
.MapOut | |
.AlphaextractFilter() | |
.MapOut; | |
} | |
if (VideoTextConfigure.IsHsvBorderColor) | |
{ | |
var gray_border_files = Directory.GetFiles(VideoTextVM.TmpDir, "b*.png"); | |
if (gray_border_files.Length == 0) | |
throw new InvalidOperationException("Không thấy file ảnh border, hãy chạy xử lý opencv lại"); | |
string txtName = "BorderConcatDemux.txt"; | |
StringBuilder stringBuilder = new StringBuilder(); | |
gray_border_files | |
.Select(x => new FileInfo(x).Name) | |
.ForEach(x => stringBuilder.AppendLine($"file '{x}'")); | |
File.WriteAllText(Path.Combine(VideoTextVM.TmpDir, txtName), stringBuilder.ToString()); | |
ImageFilesConcatInput borderVideoCropFileInput = ImageFilesConcatInput.ConcatDemuxer(txtName); | |
borderVideoCropFileInput.ImageInputAVStream.R(mediaAnalysis.PrimaryVideoStream.FrameRate); | |
if (VideoTextConfigure.VideoSkip != TimeSpan.Zero) | |
{ | |
borderVideoCropFileInput.Ss(VideoTextConfigure.VideoSkip); | |
} | |
borderVideoCropFileInput.T(totalTime); | |
ImageMap borderAlphaImageMap = ffmpegArg.AddImagesInput(borderVideoCropFileInput).First() | |
.FormatFilter(PixFmt.bgra) | |
.MapOut | |
.ColorKeyFilter() | |
.Color(Color.Black) | |
.MapOut | |
.AlphaextractFilter() | |
.MapOut; | |
borderImageMap = ffmpegArg.FilterGraph | |
.ColorFilter() | |
.Color(VideoTextConfigure.HsvBorderColor) | |
.Sar(1) | |
.Duration(totalTime) | |
.Size(new Size(cropSize.Width, cropSize.Height)) | |
.Rate(mediaAnalysis.PrimaryVideoStream.FrameRate) | |
.MapOut | |
.FormatFilter(PixFmt.bgra) | |
.MapOut | |
.AlphamergeFilter(borderAlphaImageMap) | |
.MapOut; | |
} | |
} | |
break; | |
case FilterColorType.Yuv: | |
{ | |
WorkingDir = Singleton.ExeDir; | |
alphaImageMap = videoMap.ImageMaps.First() | |
.FormatFilter(PixFmt.yuv420p) | |
.MapOut | |
.CropFilter() | |
.X(cropSize.X) | |
.Y(cropSize.Y) | |
.W(cropSize.Width) | |
.H(cropSize.Height) | |
.MapOut | |
.GeqFilter() | |
.AndSetIf(!string.IsNullOrWhiteSpace(VideoTextConfigure.GeqConfigure.Luminance), x => x.Lum(VideoTextConfigure.GeqConfigure.Luminance)) | |
.AndSetIf(!string.IsNullOrWhiteSpace(VideoTextConfigure.GeqConfigure.ChrominanceBlue), x => x.Cb(VideoTextConfigure.GeqConfigure.ChrominanceBlue)) | |
.AndSetIf(!string.IsNullOrWhiteSpace(VideoTextConfigure.GeqConfigure.ChrominanceRed), x => x.Cr(VideoTextConfigure.GeqConfigure.ChrominanceRed)) | |
.MapOut | |
.HueFilter() | |
.S(0) | |
.MapOut; | |
} | |
break; | |
default: throw new NotSupportedException(base._configureVM.VideoTextConfigure.FilterColorType.ToString()); | |
} | |
ImageMap cropImageMap; | |
if (VideoTextConfigure.IsChangeTextColor) | |
{ | |
cropImageMap = ffmpegArg.FilterGraph | |
.ColorFilter() | |
.Color(VideoTextConfigure.NewTextColor) | |
.Sar(1) | |
.Duration(totalTime) | |
.Size(new Size(cropSize.Width, cropSize.Height)) | |
.Rate(mediaAnalysis.PrimaryVideoStream.FrameRate) | |
.MapOut | |
.FormatFilter(PixFmt.bgra) | |
.MapOut | |
.AlphamergeFilter(alphaImageMap) | |
.MapOut; | |
} | |
else | |
{ | |
cropImageMap = videoMap.ImageMaps.First() | |
.CropFilter() | |
.X(cropSize.X) | |
.Y(cropSize.Y) | |
.W(cropSize.Width) | |
.H(cropSize.Height) | |
.MapOut | |
.FormatFilter(PixFmt.bgra) | |
.MapOut | |
.AlphamergeFilter(alphaImageMap) | |
.MapOut; | |
} | |
if (VideoTextConfigure.VideoSpeed != 1.0) | |
{ | |
if (borderImageMap is not null) | |
{ | |
(borderImageMap, _, _) = ChangeSpeed(VideoTextConfigure.VideoSpeed, borderImageMap, null, null); | |
} | |
#pragma warning disable CS8600 // Converting null literal or possible null value to non-nullable type. | |
(cropImageMap, audioMap, TimeSpan? _totalTime) = ChangeSpeed(VideoTextConfigure.VideoSpeed, cropImageMap, audioMap, totalTime); | |
#pragma warning restore CS8600 // Converting null literal or possible null value to non-nullable type. | |
totalTime = _totalTime!.Value; | |
} | |
cropImageMap = cropImageMap.FixImage(mediaAnalysis.PrimaryVideoStream.FrameRate, PixFmt.rgba); | |
List<VideoBackgroundVM> videoBackgroundVMs = new List<VideoBackgroundVM>(); | |
Dictionary<VideoBackgroundVM, TimeSpan> dict_UsedDuration = new(); | |
ImageMap backgroundImagemap; | |
IMediaAnalysis? videoBackgroundAnalysis = null; | |
{ | |
//background | |
while (videoBackgroundVMs.Sum(x => x.TotalDuration - x.UsedDuration) / VideoBackgroundConfigure.VideoSpeed < totalTime) | |
{ | |
var backgrounds = _configureVM.VideoBackgrounds | |
.Except(videoBackgroundVMs) | |
.Where(x => x.TotalDuration > TimeSpan.Zero && x.UsedDuration + TimeSpan.FromSeconds(1) < x.TotalDuration); | |
VideoBackgroundVM? videoBackgroundVM = VideoBackgroundConfigure.IsSelectRandom ? backgrounds.GetRandomItem() : backgrounds.FirstOrDefault(); | |
if (videoBackgroundVM is null) | |
throw new Exception($"Video nền không đủ thời lượng"); | |
if (VideoBackgroundConfigure.VideoSkip != TimeSpan.Zero && videoBackgroundVM.UsedDuration == TimeSpan.Zero) | |
{ | |
videoBackgroundVM.UsedDuration = VideoBackgroundConfigure.VideoSkip;//skip at first background | |
} | |
videoBackgroundVMs.Add(videoBackgroundVM); | |
if (VideoBackgroundConfigure.IsLoopOneFile) | |
break; | |
} | |
Dictionary<VideoBackgroundVM, IMediaAnalysis> dict_MediaAnalysis = new(); | |
Dictionary<VideoBackgroundVM, ImageMap> dict_imageMap = new(); | |
TimeSpan useDuration = TimeSpan.Zero; | |
await videoBackgroundVMs.ForEachAsync(async x => | |
{ | |
dict_MediaAnalysis[x] = await FFProbe.AnalyseAsync(x.Data.VideoPath); | |
if (videoBackgroundAnalysis is null) | |
{ | |
videoBackgroundAnalysis = dict_MediaAnalysis[x]; | |
} | |
if (dict_MediaAnalysis[x].PrimaryVideoStream is null) | |
{ | |
x.TotalDuration = TimeSpan.Zero; | |
throw new InvalidOperationException($"File {x.Data.VideoPath} không có video"); | |
} | |
ImageFileInput imageFileInput = new ImageFileInput(x.Data.VideoPath); | |
if (VideoBackgroundConfigure.IsLoopOneFile) | |
{ | |
imageFileInput.StreamLoop(-1); | |
dict_UsedDuration[x] = x.TotalDuration; | |
} | |
else | |
{ | |
TimeSpan currentUse = x.TotalDuration - x.UsedDuration; | |
if ((useDuration + currentUse) / VideoBackgroundConfigure.VideoSpeed > totalTime) | |
{ | |
//trim | |
currentUse -= ((useDuration + currentUse) / VideoBackgroundConfigure.VideoSpeed - totalTime) * VideoBackgroundConfigure.VideoSpeed; | |
} | |
imageFileInput.Ss(x.UsedDuration).T(currentUse); | |
useDuration += currentUse; | |
dict_UsedDuration[x] = currentUse; | |
} | |
dict_imageMap[x] = ffmpegArg.AddImagesInput(imageFileInput).First(); | |
Size backgroundNewSize = new Size( | |
(int)(dict_MediaAnalysis[x].PrimaryVideoStream!.Width * VideoBackgroundConfigure.ZoomValue) / 2 * 2, | |
(int)(dict_MediaAnalysis[x].PrimaryVideoStream!.Height * VideoBackgroundConfigure.ZoomValue) / 2 * 2); | |
dict_imageMap[x] = dict_imageMap[x] | |
.ScaleFilter() | |
.W(backgroundNewSize.Width) | |
.H(backgroundNewSize.Height) | |
.MapOut; | |
Size backgroundCropSize = new Size( | |
Math.Min(mediaAnalysis.PrimaryVideoStream.Width, backgroundNewSize.Width), | |
Math.Min(mediaAnalysis.PrimaryVideoStream.Height, backgroundNewSize.Height) | |
); | |
dict_imageMap[x] = dict_imageMap[x] | |
.CropFilter() | |
.X((backgroundNewSize.Width - backgroundCropSize.Width) / 2) | |
.Y((backgroundNewSize.Height - backgroundCropSize.Height) / 2) | |
.W(backgroundCropSize.Width) | |
.H(backgroundCropSize.Height) | |
.MapOut; | |
if (backgroundCropSize.Width < mediaAnalysis.PrimaryVideoStream.Width || | |
backgroundCropSize.Height < mediaAnalysis.PrimaryVideoStream.Height)//add padding | |
{ | |
dict_imageMap[x] = dict_imageMap[x].MakeBlurredBackground( | |
new Size(mediaAnalysis.PrimaryVideoStream.Width, mediaAnalysis.PrimaryVideoStream.Height), | |
mediaAnalysis.PrimaryVideoStream.FrameRate | |
); | |
} | |
else | |
{ | |
dict_imageMap[x] = dict_imageMap[x] | |
.FixImage(mediaAnalysis.PrimaryVideoStream.FrameRate); | |
} | |
}); | |
var concatGroups = videoBackgroundVMs.Select(x => new ConcatGroup(dict_imageMap[x])).ToList(); | |
backgroundImagemap = concatGroups | |
.ConcatFilter() | |
.ImageMapsOut | |
.First(); | |
(backgroundImagemap, _, _) = ChangeSpeed(VideoBackgroundConfigure.VideoSpeed, backgroundImagemap, null, useDuration); | |
backgroundImagemap = backgroundImagemap.FixImage(mediaAnalysis.PrimaryVideoStream.FrameRate, PixFmt.rgba); | |
} | |
backgroundImagemap = backgroundImagemap | |
.DrawBoxFilter() | |
.X(VideoBackgroundConfigure.OverlayPosX == -1 ? 0 : VideoBackgroundConfigure.OverlayPosX) | |
.Y($"min({VideoBackgroundConfigure.OverlayPosY}, ih-{cropSize.Height})") | |
.W(VideoBackgroundConfigure.OverlayPosX == -1 ? "iw" : cropSize.Width) | |
.H(cropSize.Height) | |
.Color(VideoOutputConfigure.DrawBoxColor) | |
.ThicknessFill() | |
.MapOut; | |
ImageMap imageMap = cropImageMap | |
.OverlayFilterOn(backgroundImagemap) | |
.X(VideoBackgroundConfigure.OverlayPosX == -1 ? "main_w-overlay_w" : VideoBackgroundConfigure.OverlayPosX) | |
.Y($"min({VideoBackgroundConfigure.OverlayPosY},main_h-overlay_h)") | |
.Eval(OverlayEval.Frame) | |
.Shortest(true) | |
.MapOut | |
; | |
if (borderImageMap is not null) | |
{ | |
imageMap = borderImageMap | |
.OverlayFilterOn(imageMap.FixImage(null, PixFmt.bgra)) | |
.X(VideoBackgroundConfigure.OverlayPosX == -1 ? "main_w-overlay_w" : VideoBackgroundConfigure.OverlayPosX) | |
.Y($"min({VideoBackgroundConfigure.OverlayPosY},main_h-overlay_h)") | |
.Eval(OverlayEval.Frame) | |
.Shortest(true) | |
.MapOut; | |
} | |
imageMap = imageMap | |
.FixImage(mediaAnalysis.PrimaryVideoStream.FrameRate, PixFmt.bgra); | |
if (File.Exists(VideoOutputConfigure.LogoConfigure.FilePath)) | |
{ | |
ImageFileInput logoInput = new(VideoOutputConfigure.LogoConfigure.FilePath); | |
imageMap = ffmpegArg.AddImagesInput(logoInput).First() | |
.FixImage(null, PixFmt.argb) | |
.ScaleFilter() | |
.W($"round({VideoOutputConfigure.LogoConfigure.Scale}*iw)") | |
.H($"round({VideoOutputConfigure.LogoConfigure.Scale}*ih)") | |
.MapOut | |
.GeqFilter() | |
.R("r(X,Y)") | |
.A($"{1.0 * VideoOutputConfigure.LogoConfigure.Alpha / 255}*alpha(X,Y)") | |
.MapOut | |
.OverlayFilterOn(imageMap) | |
.X($"min({VideoOutputConfigure.LogoConfigure.OverlayPosX},main_w-overlay_w)") | |
.Y($"min({VideoOutputConfigure.LogoConfigure.OverlayPosY},main_h-overlay_h)") | |
.Shortest(false) | |
.Eval(OverlayEval.Frame) | |
.Repeatlast(true) | |
.MapOut; | |
} | |
if (File.Exists(VideoOutputConfigure.McConfigure.FilePath)) | |
{ | |
ImageFileInput mcInput = new(VideoOutputConfigure.McConfigure.FilePath); | |
mcInput.StreamLoop(-1); | |
ImageMap mcImageMap = ffmpegArg.AddImagesInput(mcInput).First() | |
.FixImage(mediaAnalysis.PrimaryVideoStream.FrameRate, PixFmt.argb); | |
if (VideoOutputConfigure.McConfigure.IsColorKey) | |
{ | |
mcImageMap = mcImageMap | |
.ColorKeyFilter() | |
.Color(VideoOutputConfigure.McConfigure.ColorKey) | |
.Similarity((float)VideoOutputConfigure.McConfigure.ColorKeySimilarity) | |
.MapOut; | |
} | |
imageMap = mcImageMap | |
.ScaleFilter() | |
.W($"round({VideoOutputConfigure.McConfigure.Scale}*iw)") | |
.H($"round({VideoOutputConfigure.McConfigure.Scale}*ih)") | |
.MapOut | |
.OverlayFilterOn(imageMap) | |
.X($"min({VideoOutputConfigure.McConfigure.OverlayPosX},main_w-overlay_w)") | |
.Y($"min({VideoOutputConfigure.McConfigure.OverlayPosY},main_h-overlay_h)") | |
.Shortest(true) | |
.Eval(OverlayEval.Frame) | |
.MapOut; | |
} | |
string fileName = fileInfo.Name.Substring(0, fileInfo.Name.Length - fileInfo.Extension.Length); | |
string newName = (_isDemo ? $"[Demo] {fileName}" : fileName) + ".mp4"; | |
int fileCount = 1; | |
while (File.Exists(Path.Combine(VideoOutputConfigure.SaveDir!, newName))) | |
{ | |
newName = $"{(_isDemo ? "[Demo] " : string.Empty)}{fileName} ({fileCount++}).mp4"; | |
} | |
string tmpName = $"{Guid.NewGuid().ToString()}.mp4"; | |
imageMap = imageMap | |
.FixImage(mediaAnalysis.PrimaryVideoStream.FrameRate, PixFmt.yuv420p); | |
VideoFileOutput videoFileOutput = new VideoFileOutput(Path.Combine(VideoOutputConfigure.SaveDir!, tmpName), imageMap, audioMap); | |
videoFileOutput.ImageOutputAVStream | |
.Codec("libx264") | |
.Fps(mediaAnalysis.PrimaryVideoStream.FrameRate); | |
switch (VideoOutputConfigure.OutputBitrateSource) | |
{ | |
case OutputBitrateSource.VideoText: | |
videoFileOutput.ImageOutputAVStream.B((int)mediaAnalysis.PrimaryVideoStream.BitRate); | |
break; | |
case OutputBitrateSource.VideoBackgound: | |
videoFileOutput.ImageOutputAVStream.B((int)videoBackgroundAnalysis!.PrimaryVideoStream!.BitRate); | |
break; | |
case OutputBitrateSource.Custom: | |
videoFileOutput.ImageOutputAVStream.B((int)VideoOutputConfigure.Birate); | |
break; | |
default: | |
throw new NotSupportedException(VideoOutputConfigure.OutputBitrateSource.ToString()); | |
} | |
ffmpegArg.AddOutput(videoFileOutput); | |
RenderService renderService = new RenderService(); | |
int exitcode = await renderService.StartRun(new RenderData() | |
{ | |
FFmpegPath = Singleton.FFmpegPath, | |
RenderItems = new List<RenderItem>() | |
{ | |
new RenderItem() | |
{ | |
Arguments = ffmpegArg.GetFullCommandline(), | |
LogPath = Path.Combine(Singleton.LogDir,$"{DateTime.Now:yyyy-MM-dd HH-mm-ss}_{newName.Substring(0,Math.Min(newName.Length,100))}.log"), | |
Time = totalTime, | |
WorkingDirectory = WorkingDir, | |
} | |
} | |
}); | |
if (exitcode == 0) | |
{ | |
new FileInfo(Path.Combine(VideoOutputConfigure.SaveDir!, tmpName)) | |
.MoveTo(Path.Combine(VideoOutputConfigure.SaveDir!, newName)); | |
VideoTextVM.RenderWorkStatus = RenderWorkStatus.Success; | |
if (!VideoBackgroundConfigure.IsReUse) | |
{ | |
dict_UsedDuration.ForEach(x => x.Key.UsedDuration += x.Value); | |
if (VideoBackgroundConfigure.IsDeleteAfterUsed && !_isDemo) | |
{ | |
await dict_UsedDuration.Keys.ForEachAsync(async x => | |
{ | |
if (x.UsedDuration + TimeSpan.FromSeconds(1) >= x.TotalDuration) | |
{ | |
File.Delete(x.Data.VideoPath); | |
await _configureVM.VideoBackgrounds.RemoveAsync(x); | |
} | |
}); | |
} | |
} | |
if (VideoTextConfigure.IsDeleteSourceAfterUsed && !_isDemo) | |
{ | |
File.Delete(VideoTextVM.Data.VideoPath); | |
VideoTextVM.DeleteTmpDir(); | |
await _configureVM.VideoTexts.RemoveAsync(VideoTextVM); | |
} | |
} | |
else | |
{ | |
VideoTextVM.RenderWorkStatus = RenderWorkStatus.Failed; | |
} | |
} | |
catch (Exception ex) | |
{ | |
_logger.LogError(ex, ""); | |
VideoTextVM.RenderWorkStatus = RenderWorkStatus.Failed; | |
} | |
finally | |
{ | |
if (VideoTextConfigure.IsDeleteTmpAfterUsed) | |
{ | |
try { VideoTextVM.DeleteTmpDir(); } catch { } | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment