I'm trying to make an app that use the camera to record a video and process the images of the video. Here is what I want. First, my app records a 10 second video with Torch. Second, I use a method to playback the video to see what I record.
I'm stuck on three things.
How can I convert my video into individual frames (images)?
Is it possible to asynchronously convert the video while it is being recorded?
When I do convert the video into individual frames, how do I work with them? Are they JPEGs? Can I simply display them as images? Etc.
Main code:
using System;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Navigation;
namespace App3
{
public sealed partial class MainPage : Page
{
DispatcherTimer D;
double basetimer = 0;
public MainPage()
{
this.InitializeComponent();
this.NavigationCacheMode = NavigationCacheMode.Required;
D = new DispatcherTimer();
D.Interval = new TimeSpan(0, 0, 1);
D.Tick += timer_Tick;
txt.Text = basetimer.ToString();
Play.IsEnabled = false;
}
public Library Library = new Library();
public object PreviewImage { get; private set; }
void timer_Tick(object sender, object e)
{
basetimer = basetimer - 1;
txt.Text = basetimer.ToString();
if (basetimer == 0)
{
D.Stop();
Preview.Source = null;
Library.Stop();
Record.IsEnabled = false;
Play.IsEnabled = true;
Clear.IsEnabled = true;
if (Library._tc.Enabled)
{
Library._tc.Enabled = false;
}
}
}
private void Record_Click(object sender, RoutedEventArgs e)
{
if (Library.Recording)
{
Preview.Source = null;
Library.Stop();
Record.Icon = new SymbolIcon(Symbol.Video);
}
else
{
basetimer = 11;
D.Start();
//D.Tick += timer_Tick;
Display.Source = null;
Library.Record(Preview);
Record.Icon = new SymbolIcon(Symbol.VideoChat);
Record.IsEnabled = false;
Play.IsEnabled = false;
}
}
private async void Play_Click(object sender, RoutedEventArgs e)
{
await Library.Play(Dispatcher, Display);
//Extract_Image_From_Video(Library.buffer);
}
private void Clear_Click(object sender, RoutedEventArgs e)
{
Display.Source = null;
Record.Icon = new SymbolIcon(Symbol.Video);
txt.Text = "0";
basetimer= 0;
Play.IsEnabled = false;
Record.IsEnabled =true;
if (Library.capture != null)
{
D.Stop();
Library.Recording = false;
Preview.Source = null;
Library.capture.Dispose();
Library.capture = null;
basetimer = 11;
}
}
}
}
Library Class:
using System;
using System.Diagnostics;
using System.Linq;
using System.Threading.Tasks;
using Windows.Devices.Enumeration;
using Windows.Media.Capture;
using Windows.Media.Devices;
using Windows.Media.MediaProperties;
using Windows.Storage;
using Windows.Storage.Streams;
using Windows.UI.Core;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Media.Imaging;
using Windows.Graphics.Imaging;
using Emgu.CV.Structure;
using Emgu.CV;
using System.Collections.Generic;
public class Library
{
private const string videoFilename = "video.mp4";
private string filename;
public MediaCapture capture;
public InMemoryRandomAccessStream buffer;
public static bool Recording;
public TorchControl _tc;
public int basetimer ;
public async Task<bool> init()
{
if (buffer != null)
{
buffer.Dispose();
}
buffer = new InMemoryRandomAccessStream();
if (capture != null)
{
capture.Dispose();
}
try
{
if (capture == null)
{
var allVideoDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);
DeviceInformation cameraDevice =
allVideoDevices.FirstOrDefault(x => x.EnclosureLocation != null &&
x.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Back);
capture = new MediaCapture();
var mediaInitSettings = new MediaCaptureInitializationSettings { VideoDeviceId = cameraDevice.Id };
// Initialize
try
{
await capture.InitializeAsync(mediaInitSettings);
var videoDev = capture.VideoDeviceController;
_tc = videoDev.TorchControl;
Recording = false;
_tc.Enabled = false;
}
catch (UnauthorizedAccessException)
{
Debug.WriteLine("UnauthorizedAccessExeption>>");
}
catch (Exception ex)
{
Debug.WriteLine("Exception when initializing MediaCapture with {0}: {1}", cameraDevice.Id, ex.ToString());
}
}
capture.Failed += (MediaCapture sender, MediaCaptureFailedEventArgs errorEventArgs) =>
{
Recording = false;
_tc.Enabled = false;
throw new Exception(string.Format("Code: {0}. {1}", errorEventArgs.Code, errorEventArgs.Message));
};
}
catch (Exception ex)
{
if (ex.InnerException != null && ex.InnerException.GetType() == typeof(UnauthorizedAccessException))
{
throw ex.InnerException;
}
throw;
}
return true;
}
public async void Record(CaptureElement preview)
{
await init();
preview.Source = capture;
await capture.StartPreviewAsync();
await capture.StartRecordToStreamAsync(MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Auto), buffer);
if (Recording) throw new InvalidOperationException("cannot excute two records at the same time");
Recording = true;
_tc.Enabled = true;
}
public async void Stop()
{
await capture.StopRecordAsync();
Recording = false;
_tc.Enabled = false;
}
public async Task Play(CoreDispatcher dispatcher, MediaElement playback)
{
IRandomAccessStream video = buffer.CloneStream();
if (video == null) throw new ArgumentNullException("buffer");
StorageFolder storageFolder = Windows.ApplicationModel.Package.Current.InstalledLocation;
if (!string.IsNullOrEmpty(filename))
{
StorageFile original = await storageFolder.GetFileAsync(filename);
await original.DeleteAsync();
}
await dispatcher.RunAsync(CoreDispatcherPriority.Normal, async () =>
{
StorageFile storageFile = await storageFolder.CreateFileAsync(videoFilename, CreationCollisionOption.GenerateUniqueName);
filename = storageFile.Name;
using (IRandomAccessStream fileStream = await storageFile.OpenAsync(FileAccessMode.ReadWrite))
{
await RandomAccessStream.CopyAndCloseAsync(video.GetInputStreamAt(0), fileStream.GetOutputStreamAt(0));
await video.FlushAsync();
video.Dispose();
}
IRandomAccessStream stream = await storageFile.OpenAsync(FileAccessMode.Read);
playback.SetSource(stream, storageFile.FileType);
playback.Play();
});
}
I ended up using MediaToolkit to solve a similar problem after having a ton of trouble with Accord.
I needed to save an image for every second of a video:
using (var engine = new Engine())
{
var mp4 = new MediaFile { Filename = mp4FilePath };
engine.GetMetadata(mp4);
var i = 0;
while (i < mp4.Metadata.Duration.TotalSeconds)
{
var options = new ConversionOptions { Seek = TimeSpan.FromSeconds(i) };
var outputFile = new MediaFile { Filename = string.Format("{0}\\image-{1}.jpeg", outputPath, i) };
engine.GetThumbnail(mp4, outputFile, options);
i++;
}
}
Hope this helps someone some day.
UPDATE for .NET 5:
Recently, I have needed to update this code to work in .NET 5. To do so, I am using MediaToolkit.NetCore, which has been in preview for over a year. Also note: you will need to make the latest ffmpeg, including all 3 .exe files (ffmpeg, ffplay, ffprobe) available to your app.
Without further ado, here is the updated code:
// _env is the injected IWebHostEnvironment
// _tempPath is temporary file storage
var ffmpegPath = Path.Combine(_env.ContentRootPath, "<path-to-ffmpeg.exe>");
var mediaToolkitService = MediaToolkitService.CreateInstance(ffmpegPath);
var metadataTask = new FfTaskGetMetadata(_tempFile);
var metadata = await mediaToolkitService.ExecuteAsync(metadataTask);
var i = 0;
while (i < metadata.Metadata.Streams.First().DurationTs)
{
var outputFile = string.Format("{0}\\image-{1:0000}.jpeg", _imageDir, i);
var thumbTask = new FfTaskSaveThumbnail(_tempFile, outputFile, TimeSpan.FromSeconds(i));
_ = await mediaToolkitService.ExecuteAsync(thumbTask);
i++;
}
I figured this out just yesterday.
Here is full and easy to understand example with picking video file and saving snapshot in 1st second of video.
You can take parts that fits your project and change some of them (i.e. getting video resolution from camera)
1) and 3)
TimeSpan timeOfFrame = new TimeSpan(0, 0, 1);
//pick mp4 file
var picker = new Windows.Storage.Pickers.FileOpenPicker();
picker.SuggestedStartLocation = Windows.Storage.Pickers.PickerLocationId.VideosLibrary;
picker.FileTypeFilter.Add(".mp4");
StorageFile pickedFile = await picker.PickSingleFileAsync();
if (pickedFile == null)
{
return;
}
///
//Get video resolution
List<string> encodingPropertiesToRetrieve = new List<string>();
encodingPropertiesToRetrieve.Add("System.Video.FrameHeight");
encodingPropertiesToRetrieve.Add("System.Video.FrameWidth");
IDictionary<string, object> encodingProperties = await pickedFile.Properties.RetrievePropertiesAsync(encodingPropertiesToRetrieve);
uint frameHeight = (uint)encodingProperties["System.Video.FrameHeight"];
uint frameWidth = (uint)encodingProperties["System.Video.FrameWidth"];
///
//Use Windows.Media.Editing to get ImageStream
var clip = await MediaClip.CreateFromFileAsync(pickedFile);
var composition = new MediaComposition();
composition.Clips.Add(clip);
var imageStream = await composition.GetThumbnailAsync(timeOfFrame, (int)frameWidth, (int)frameHeight, VideoFramePrecision.NearestFrame);
///
//generate bitmap
var writableBitmap = new WriteableBitmap((int)frameWidth, (int)frameHeight);
writableBitmap.SetSource(imageStream);
//generate some random name for file in PicturesLibrary
var saveAsTarget = await KnownFolders.PicturesLibrary.CreateFileAsync("IMG" + Guid.NewGuid().ToString().Substring(0, 4) + ".jpg");
//get stream from bitmap
Stream stream = writableBitmap.PixelBuffer.AsStream();
byte[] pixels = new byte[(uint)stream.Length];
await stream.ReadAsync(pixels, 0, pixels.Length);
using (var writeStream = await saveAsTarget.OpenAsync(FileAccessMode.ReadWrite))
{
var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, writeStream);
encoder.SetPixelData(
BitmapPixelFormat.Bgra8,
BitmapAlphaMode.Premultiplied,
(uint)writableBitmap.PixelWidth,
(uint)writableBitmap.PixelHeight,
96,
96,
pixels);
await encoder.FlushAsync();
using (var outputStream = writeStream.GetOutputStreamAt(0))
{
await outputStream.FlushAsync();
}
}
If you want to display frames in xaml Image, you should use imageStream
BitmapImage bitmapImage = new BitmapImage();
bitmapImage.SetSource(imageStream);
XAMLImage.Source = bitmapImage;
If you want to extract more frames, there is also composition.GetThumbnailsAsync
2) Use your mediaCapture, when your timer is ticking
EDIT:
used includes:
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices.WindowsRuntime;
using System.Threading;
using System.Threading.Tasks;
using Windows.Graphics.Imaging;
using Windows.Media.Editing;
using Windows.Storage;
using Windows.UI.Xaml.Media.Imaging;
Use ffmpeg and install Accord.Video.FFMPEG
using (var vFReader = new VideoFileReader())
{
vFReader.Open("video.mp4");
for (int i = 0; i < vFReader.FrameCount; i++)
{
Bitmap bmpBaseOriginal = vFReader.ReadVideoFrame();
}
vFReader.Close();
}
Another one way to get it :
I used FFMpegCore, official docker image with .net core 3.1 + ubuntu (list of available images)
Dockerfile :
FROM mcr.microsoft.com/dotnet/runtime:3.1-bionic
RUN apt-get update && apt-get install -y ffmpeg libgdiplus
COPY bin/Release/netcoreapp3.1/publish/ App/
WORKDIR /App
ENTRYPOINT ["dotnet", "YouConsoleAppNameHere.dll"]
short code version:
GlobalFFOptions.Configure(new FFOptions { BinaryFolder = "/usr/bin", TemporaryFilesFolder = "/tmp" }); //configuring ffmpeg location
string filePath = AppContext.BaseDirectory + "sample.mp4";
FFMpegArguments.FromFileInput(filePath).OutputToFile("tmp/Video/Frame%05d.png", true, Options => { Options.WithVideoCodec(VideoCodec.Png); }).ProcessSynchronously();
extended version (with some console logs):
using FFMpegCore;
using FFMpegCore.Enums;
...
GlobalFFOptions.Configure(new FFOptions { BinaryFolder = "/usr/bin", TemporaryFilesFolder = "/tmp" }); //configuring ffmpeg location
string filePath = AppContext.BaseDirectory + "sample.mp4";
Console.WriteLine(filePath) ;
Console.WriteLine(File.Exists(filePath));
var mediaInfo = FFProbe.Analyse(filePath);
Console.WriteLine("mp4 duration : " + mediaInfo.Duration);
Directory.CreateDirectory("tmp");
Directory.CreateDirectory("tmp/Video");
Console.WriteLine("started " + DateTime.Now.ToLongTimeString());
FFMpegArguments.FromFileInput(filePath).OutputToFile("tmp/Video/Frame%05d.png", true, Options => { Options.WithVideoCodec(VideoCodec.Png); }).ProcessSynchronously();
Console.WriteLine("processed " + DateTime.Now.ToLongTimeString());
Console.WriteLine(string.Join(", ", Directory.EnumerateFiles("tmp/Video/")));
As a result - png files will be extracted to tmp/Video folder. Of course, you can do the same without docker if you need.
Related
I'm stuck with an App that is running on Windows 10 IoT Core. All Classes are working fine, except for the one that is creating a CSV File via JSON and is supposed to send it as an Email.
When the Code reaches the "ReturnToMainPage()" Function the Exception "System.Exception: The application called an interface that was marshalled for a different thread" is thrown.
The "funny" thing is, the Mail is being send and i recieve it but the Program won't switch to back to the Main Page as intendet after sending the Email.
Here is the Code of the Class:
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Text;
using Windows.Storage;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using EASendMail;
namespace PratschZahlstation
{
public sealed partial class MailChoice : Page
{
private TextBlock _headerText;
private ComboBox _mailComboBox;
private Button _execute;
private Button _abort;
private EnDecode _coder = EnDecode.get_EnDecodeSingleton();
private string _mailto = null;
public MailChoice()
{
this.InitializeComponent();
Init();
}
private void Init()
{
_headerText = HeaderText;
_mailComboBox = MailAdresses;
_mailComboBox.Items.Add("---");
_mailComboBox.Items.Add("dummy#mail.com");
_mailComboBox.SelectedIndex = 0;
_execute = DoFunction;
_abort = DoExit;
}
private void DoFunction_Click(object sender, RoutedEventArgs e)
{
string selectedMail = this._mailComboBox.SelectedItem.ToString();
if(selectedMail == "---")
{
_headerText.Text = "Bitte eine Emailadresse aus der Liste auswählen.";
}
else
{
_headerText.Text = "CSV wird erstellt und per Mail versendet!";
_execute.IsEnabled = false;
_abort.IsEnabled = false;
_mailComboBox.IsEnabled = false;
_mailto = selectedMail;
DateTime date = DateTime.Now;
string strippedDate = date.ToString("yyyy-MM-dd") + " 00:00:01";
GetDataForCSV(strippedDate);
}
}
private async void GetDataForCSV(string dateAsString)
{
string correctedDate = "2019-07-01 00:00:01";//dateAsString;
string date = _coder.Base64Encode(correctedDate);
HttpClient _client = new HttpClient();
Uri _uri = new Uri("URI TO JSON-API");
_client.BaseAddress = _uri;
var request = new HttpRequestMessage(HttpMethod.Post, _uri);
var keyValues = new List<KeyValuePair<string, string>>();
keyValues.Add(new KeyValuePair<string, string>("mode", "10"));
keyValues.Add(new KeyValuePair<string, string>("date", date));
request.Content = new FormUrlEncodedContent(keyValues);
var response = await _client.SendAsync(request);
string sContent = await response.Content.ReadAsStringAsync();
keyValues = null;
if (sContent != null)
{
byte[] bytes = Encoding.UTF8.GetBytes(sContent);
string json = Encoding.UTF8.GetString(bytes);
if (!json.Contains("success"))
{
List<CSV_SQL_Json_Object> _Json = JsonConvert.DeserializeObject<List<CSV_SQL_Json_Object>>(json);
response.Dispose();
request.Dispose();
_client.Dispose();
if (_Json.Count == 0)
{
}
else
{
CreateCSV(_Json);
}
}
else
{
List<JSON_Status> _Json = JsonConvert.DeserializeObject<List<JSON_Status>>(json);
_headerText.Text = "Es ist der Folgender Fehler aufgetreten - Errorcode: \"" + _coder.Base64Decode(_Json[0].success) + "\"\r\nFehlermeldung: \"" + _coder.Base64Decode(_Json[0].message) + "\"";
_Json.Clear();
response.Dispose();
request.Dispose();
_client.Dispose();
}
}
}
private async void CreateCSV(List<CSV_SQL_Json_Object> contentForCSV)
{
DateTime date = DateTime.Now;
string csvName = date.ToString("yyyy-MM-dd") + ".csv";
StorageFolder storageFolder = KnownFolders.MusicLibrary;
StorageFile csvFile = await storageFolder.CreateFileAsync(csvName, CreationCollisionOption.OpenIfExists).AsTask().ConfigureAwait(false);
await FileIO.WriteTextAsync(csvFile, "Column1;Column2;Column3;Column4;\n");
foreach (var item in contentForCSV)
{
await FileIO.AppendTextAsync(csvFile, _coder.Base64Decode(item.Object1) + ";" + _coder.aesDecrypt(_coder.Base64Decode(item.Object2)) + ";" + _coder.aesDecrypt(_coder.Base64Decode(item.Object3)) + ";" + _coder.aesDecrypt(_coder.Base64Decode(item.Object4)) + "\n");
}
SendEmail(_mailto, csvName);
}
private async void SendEmail(string mailto, string csvName)
{
try
{
SmtpMail oMail = new SmtpMail("Mail");
SmtpClient oSmtp = new SmtpClient();
oMail.From = new MailAddress("noreply#dummy.com");
oMail.To.Add(new MailAddress(mailto));
oMail.Subject = "The Subject";
oMail.HtmlBody = "<font size=5>MailText</font>";
StorageFile file = await KnownFolders.MusicLibrary.GetFileAsync(csvName).AsTask().ConfigureAwait(false);
string attfile = file.Path;
Attachment oAttachment = await oMail.AddAttachmentAsync(attfile);
SmtpServer oServer = new SmtpServer("mail.dummy.com");
oServer.User = "dummyuser";
oServer.Password = "dummypass";
oServer.Port = 587;
oServer.ConnectType = SmtpConnectType.ConnectSSLAuto;
await oSmtp.SendMailAsync(oServer, oMail);
}
catch (Exception ex)
{
string error = ex.ToString();
_abort.IsEnabled = true;
}
ReturnToMainPage(); //This is where the Error Happens
}
private void ReturnToMainPage()
{
this.Frame.Navigate(typeof(MainPage));
}
private void DoExit_Click(object sender, RoutedEventArgs e)
{
this.Frame.Navigate(typeof(MainPage));
}
}
}
This could be an Threading issue. Navigation is only possible on the main-Thread.
You may want to try to marshal the call in:
Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync(CoreDispatcherPriority.Normal,
() =>
{
// Your UI update code goes here!
}
);
Source:
The application called an interface that was marshalled for a different thread - Windows Store App
Like Tobonaut said, you can use the Dispatcher.RunAsync to call the Navigation, it worked.
But your problem may not be this.
I copied your code and reproduced your problem and found that you have problems with the calls to read and write files:
// Your code
StorageFile csvFile = await storageFolder.CreateFileAsync(csvName, CreationCollisionOption.OpenIfExists).AsTask().ConfigureAwait(false);
StorageFile file = await KnownFolders.MusicLibrary.GetFileAsync(csvName).AsTask().ConfigureAwait(false);
The Navigation will be work if you delete the .AsTask().ConfigureAwait(false).
Best regards.
There is a simple video editor, saving video to a file is implemented in the background, implementation of the documentation https://learn.microsoft.com/en-us/windows/uwp/audio-video-camera/process-media-files-in-the-Background . The program works, but there is a nuance - saving the video in the background occurs only when the main stream is inactive, that is, when the application is minimized to the taskbar or closed. If the application is deployed then the background video save task is suspended. Tell me how to implement the background task when the main application is active? Thank you!
Class background tasks:
using Windows.ApplicationModel.Background;
using Windows.Storage;
using Windows.UI.Notifications;
using Windows.Data.Xml.Dom;
using Windows.Media.MediaProperties;
using Windows.Media.Transcoding;
using System.Threading;
using System;
using System.Diagnostics;
using System.Threading.Tasks;
using Windows.Media.Editing;
using Windows.Foundation;
using Windows.UI.Core;
using Lumia.Imaging;
using Lumia.Imaging.Adjustments;
using Lumia.Imaging.Artistic;
using System.Collections.Generic;
using Windows.Foundation.Collections;
using VideoEffectComponent;
namespace MediaProcessingBackgroundTask
{
public sealed class MediaProcessingTask : IBackgroundTask
{
IBackgroundTaskInstance backgroundTaskInstance;
BackgroundTaskDeferral deferral;
CancellationTokenSource cancelTokenSource = new CancellationTokenSource();
MediaTranscoder transcoder;
MediaComposition composition;
MediaClip clip;
EffectList effList = new EffectList();
PropertySet configurationPropertySet = new PropertySet();
PropertySet DustPropertySet = new PropertySet();
PropertySet ScretcchPropertySet = new PropertySet();
Windows.Media.Effects.VideoEffectDefinition videoEffect;
BrightnessEffect brightnessEff = new BrightnessEffect();
ContrastEffect contrastEff = new ContrastEffect();
HueSaturationEffect saturationEff = new HueSaturationEffect();
public async void Run(IBackgroundTaskInstance taskInstance)
{
Debug.WriteLine("In background task Run method");
backgroundTaskInstance = taskInstance;
taskInstance.Canceled += new BackgroundTaskCanceledEventHandler(OnCanceled);
taskInstance.Progress = 0;
deferral = taskInstance.GetDeferral();
Debug.WriteLine("Background " + taskInstance.Task.Name + " is called # " + (DateTime.Now).ToString());
try
{
await TranscodeFileAsync();
ApplicationData.Current.LocalSettings.Values["TranscodingStatus"] = "Completed Successfully";
SendToastNotification("File transcoding complete.");
}
catch (Exception e)
{
Debug.WriteLine("Exception type: {0}", e.ToString());
ApplicationData.Current.LocalSettings.Values["TranscodingStatus"] = "Error ocurred: " + e.ToString();
}
deferral.Complete();
}
private async Task TranscodeFileAsync()
{
transcoder = new MediaTranscoder();
try
{
var settings = ApplicationData.Current.LocalSettings;
settings.Values["TranscodingStatus"] = "Started";
var inputFileName = ApplicationData.Current.LocalSettings.Values["InputFileName"] as string;
var outputFileName = ApplicationData.Current.LocalSettings.Values["OutputFileName"] as string;
var redCurve = ApplicationData.Current.LocalSettings.Values["CurvRed"] as Point[];
var greenCurve = ApplicationData.Current.LocalSettings.Values["CurvGreen"] as Point[];
var blueCurve = ApplicationData.Current.LocalSettings.Values["CurvBlue"] as Point[];
var sat = ApplicationData.Current.LocalSettings.Values["SatVal"];
var brid = ApplicationData.Current.LocalSettings.Values["BridVal"];
var con = ApplicationData.Current.LocalSettings.Values["ContrVal"];
var dust = ApplicationData.Current.LocalSettings.Values["dustCVal"];
var scetch = ApplicationData.Current.LocalSettings.Values["scetchCVal"];
saturationEff.Saturation = (double)sat;
brightnessEff.Level = (double)brid;
contrastEff.Level = (double)con;
CurvesEffect curves = new CurvesEffect();
Curve RedC = new Curve();
Curve GreenC = new Curve();
Curve BlueC = new Curve();
RedC.Points = redCurve;
GreenC.Points = greenCurve;
BlueC.Points = blueCurve;
curves.Blue = BlueC;
curves.Green = GreenC;
curves.Red = RedC;
if (inputFileName == null || outputFileName == null)
{
return;
}
var inputFile = await Windows.Storage.StorageFile.GetFileFromPathAsync(inputFileName);
var outputFile = await Windows.Storage.StorageFile.GetFileFromPathAsync(outputFileName);
composition = await MediaComposition.LoadAsync(inputFile);
clip = composition.Clips[0];
effList.Add(saturationEff);
effList.Add(brightnessEff);
effList.Add(contrastEff);
effList.Add(curves);
configurationPropertySet.Add(new KeyValuePair<string, object>("Effect", effList));
DustPropertySet = new PropertySet();
DustPropertySet["DustCount"] = dust;
ScretcchPropertySet = new PropertySet();
ScretcchPropertySet["ScetchAmount"] = scetch;
videoEffect = new Windows.Media.Effects.VideoEffectDefinition("Lumia.Imaging.VideoEffect", configurationPropertySet);
clip.VideoEffectDefinitions.Add(new Windows.Media.Effects.VideoEffectDefinition(typeof(Vignet).FullName, VignetPropertySet));
clip.VideoEffectDefinitions.Add(new Windows.Media.Effects.VideoEffectDefinition(typeof(ExampleVideoEffect).FullName, ScretcchPropertySet));
clip.VideoEffectDefinitions.Add(new Windows.Media.Effects.VideoEffectDefinition(typeof(Dust).FullName, DustPropertySet));
clip.VideoEffectDefinitions.Add(videoEffect);
MediaEncodingProfile mp = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.HD1080p);
Debug.WriteLine("PrepareFileTranscodeAsync");
settings.Values["TranscodingStatus"] = "Preparing to transcode ";
var startTime = TimeSpan.FromMilliseconds(DateTime.Now.Millisecond);
Debug.WriteLine("Starting transcoding #" + startTime);
var progressT = new Progress<double>(TranscodeProgress);
settings.Values["TranscodingStatus"] = "Transcoding ";
settings.Values["ProcessingFileName"] = inputFileName;
var saveOperation = composition.RenderToFileAsync(outputFile, MediaTrimmingPreference.Precise, mp);// AsTask(cancelTokenSource.Token, progressT);
saveOperation.Completed = (info, status) =>
{
SendToastNotification("Video saved.");
clip.VideoEffectDefinitions.Clear();
composition = null;
deferral.Complete();
if (status != AsyncStatus.Completed)
{
// ShowErrorMessage("Error saving composition");
}
};
await saveOperation.AsTask(cancelTokenSource.Token, progressT);
Debug.WriteLine("Source content could not be transcoded.");
var endTime = TimeSpan.FromMilliseconds(DateTime.Now.Millisecond);
Debug.WriteLine("End time = " + endTime);
}
catch (Exception e)
{
Debug.WriteLine("Exception type: {0}", e.ToString());
throw;
}
}
void TranscodeProgress(double percent)
{
Debug.WriteLine("Transcoding progress: " + percent.ToString().Split('.')[0] + "%");
backgroundTaskInstance.Progress = (uint)percent;
}
private void SendToastNotification(string toastMessage)
{
ToastTemplateType toastTemplate = ToastTemplateType.ToastText01;
XmlDocument toastXml = ToastNotificationManager.GetTemplateContent(toastTemplate);
XmlNodeList toastTextElements = toastXml.GetElementsByTagName("text");
toastTextElements[0].AppendChild(toastXml.CreateTextNode(toastMessage));
ToastNotification toast = new ToastNotification(toastXml);
ToastNotificationManager.CreateToastNotifier().Show(toast);
}
private void OnCanceled(IBackgroundTaskInstance sender, BackgroundTaskCancellationReason reason)
{
Debug.WriteLine("Background " + sender.Task.Name + " Cancel Requested..." + reason.ToString());
}
}
}
Register and launch the background task
MediaProcessingTrigger mediaProcessingTrigger;
string backgroundTaskBuilderName = "TranscodingBackgroundTask";
BackgroundTaskRegistration taskRegistration;
private void RegisterBackgroundTask()
{
// New a MediaProcessingTrigger
mediaProcessingTrigger = new MediaProcessingTrigger();
var builder = new BackgroundTaskBuilder();
builder.Name = backgroundTaskBuilderName;
builder.TaskEntryPoint = "MediaProcessingBackgroundTask.MediaProcessingTask";
builder.SetTrigger(mediaProcessingTrigger);
// unregister old ones
foreach (var cur in BackgroundTaskRegistration.AllTasks)
{
if (cur.Value.Name == backgroundTaskBuilderName)
{
cur.Value.Unregister(true);
}
}
taskRegistration = builder.Register();
taskRegistration.Progress += new BackgroundTaskProgressEventHandler(OnProgress);
taskRegistration.Completed += new BackgroundTaskCompletedEventHandler(OnCompleted);
return;
}
private void OnProgress(IBackgroundTaskRegistration task, BackgroundTaskProgressEventArgs args)
{
string progress = "Progress: " + args.Progress + "%";
Debug.WriteLine(progress);
var ignored = Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
{
TextSave.Text = progress;
ProgressSave.Value = args.Progress;
});
}
private void OnCompleted(IBackgroundTaskRegistration task, BackgroundTaskCompletedEventArgs args)
{
Debug.WriteLine(" background task complete");
var ignored = Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
{
TasckCompleted();
});
}
private async void LaunchBackgroundTask()
{
var success = true;
if (mediaProcessingTrigger != null)
{
MediaProcessingTriggerResult activationResult;
activationResult = await mediaProcessingTrigger.RequestAsync();
switch (activationResult)
{
case MediaProcessingTriggerResult.Allowed:
// Task starting successfully
break;
case MediaProcessingTriggerResult.CurrentlyRunning:
// Already Triggered
case MediaProcessingTriggerResult.DisabledByPolicy:
// Disabled by system policy
case MediaProcessingTriggerResult.UnknownError:
// All other failures
success = false;
break;
}
if (!success)
{
// Unregister the media processing trigger background task
taskRegistration.Unregister(true);
}
}
}
saving the video in the background occurs only when the main stream is inactive , that is, when the application is minimized to the taskbar or closed.
In your above code snippet, the background task is triggered by LaunchBackgroundTask() method. When the background task occurred depends on where you invoke this method that you didn't show the relative code snippet. According to your description that background task only triggered when main stream is inactive, I think you invoke this method inside the event handle which is fired once app inactive, for example, you invoked the method inside EnteredBackground event. In that case, you may need to add LaunchBackgroundTask() method invoking foreground to meet your requirements, for example, just invoke it in a button click event.
private void btnlaunch_Click(object sender, RoutedEventArgs e)
{
LaunchBackgroundTask();
}
The MediaProcessingTask is out-of-process background task, once the background task is triggered, no matter the app is active or inactive it will continue running. But if you mean re-deployed, by testing on my side, this will uninstall the app firstly which will un-register the background task and force it stopped.
i'm currently trying to create a very basic test app which should:
1) Broadcast "sometext" on port "1234"
2) Wait a second for answers
3) Return all answers
While the solution posted below works fine for the first time, every subsequent call blocks forever at:
stream = await socket.GetOutputStreamAsync(...)
Till now i tried every possible way of cleaning up (since thats where i suppose the failure), even wrapping everything in using(...) statements.
The problem occurs with the emulator as well as a hardware device using Windows Phone 8.1
Thanks in advance!
The code to start the "discovery":
private void Button_Click(object sender, RoutedEventArgs e)
{
PluginUDP pudp = new PluginUDP();
var task = pudp.scan("asf");
task.Wait();
foreach (string s in task.Result)
output.Text += s + "\r\n";
}
The code for the "discovery" itself:
using System;
using Windows.Networking;
using Windows.Networking.Sockets;
using Windows.Storage.Streams;
using System.Text;
using System.IO;
using System.Threading.Tasks;
using System.Collections.Concurrent;
using namespace whatever
{
public class PluginUDP
{
private static readonly HostName BroadcastAddress = new HostName("255.255.255.255");
private static readonly string BroadcastPort = "1234";
private static readonly byte[] data = Encoding.UTF8.GetBytes("00wlan-ping00");
ConcurrentBag<string> receivers;
public async System.Threading.Tasks.Task<string[]> scan(string options)
{
receivers = new ConcurrentBag<string>();
receivers.Add("ok");
DatagramSocket socket = null;
IOutputStream stream = null;
DataWriter writer = null;
try
{
socket = new DatagramSocket();
socket.MessageReceived += MessageReceived;
await socket.BindServiceNameAsync("");
stream = await socket.GetOutputStreamAsync(BroadcastAddress, BroadcastPort);
writer = new DataWriter(stream);
writer.WriteBytes(data);
await writer.StoreAsync();
Task.Delay(1000).Wait();
}
catch (Exception exception)
{
receivers.Add(exception.Message);
}
finally
{
if (writer != null)
{
writer.DetachStream();
writer.Dispose();
}
if(stream != null)
stream.Dispose();
if(socket != null)
socket.Dispose();
}
return receivers.ToArray(); ;
}
private async void MessageReceived(DatagramSocket socket, DatagramSocketMessageReceivedEventArgs args)
{
try
{
var result = args.GetDataStream();
var resultStream = result.AsStreamForRead(1024);
using (var reader = new StreamReader(resultStream))
{
var text = await reader.ReadToEndAsync();
if (text.Contains("pong"))
{
receivers.Add(args.RemoteAddress.ToString());
}
}
}
catch (Exception exception)
{
receivers.Add("ERRCV");
}
}
}
}
Your problem starts here:
task.Wait();
You're blocking on async code, which leads you to a deadlock.
You want:
private async void Button_Click(object sender, RoutedEventArgs e)
{
PluginUDP pudp = new PluginUDP();
string[] result = await pudp.scan("asf");
foreach (string s in result)
output.Text += s + "\r\n";
}
You also want to do:
await Task.Delay(1000);
Instead of:
Task.Delay(1000).Wait();
Im currently writing my first app for WP8.1. The app just need to scan a barcode.
public async void ScanBarcodeAsync(Windows.Storage.StorageFile Afile)
{
WriteableBitmap bitmap;
BitmapDecoder decoder;
using (IRandomAccessStream str = await Afile.OpenReadAsync())
{
decoder = await BitmapDecoder.CreateAsync(str);
bitmap = new WriteableBitmap(Convert.ToInt32(decoder.PixelWidth),
Convert.ToInt32(decoder.PixelHeight));
await bitmap.SetSourceAsync(str);
}
ZXing.BarcodeReader reader = new BarcodeReader();
/*reader.Options.PossibleFormats = new ZXing.BarcodeFormat[]
{
ZXing.BarcodeFormat.CODE_128,
ZXing.BarcodeFormat.CODE_39
};*/
reader.Options.TryHarder = true;
reader.AutoRotate = true;
var results = reader.Decode(bitmap);
if (results != null)
{
edtBarcode.Text = results.Text;
}
else
{
edtBarcode.Text = "Error";
}
}
The file for this method is created this way
async void StartCapture()
{
var cameraID = await GetCameraID(Windows.Devices.Enumeration.Panel.Back);
MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings();
settings.PhotoCaptureSource = Windows.Media.Capture.PhotoCaptureSource.VideoPreview;
settings.StreamingCaptureMode = Windows.Media.Capture.StreamingCaptureMode.Video;
settings.AudioDeviceId = string.Empty;
settings.VideoDeviceId = cameraID.Id;
captureManager = new MediaCapture(); //Define MediaCapture object
await captureManager.InitializeAsync(settings); //Initialize MediaCapture and
var focusSettings = new FocusSettings();
focusSettings.AutoFocusRange = AutoFocusRange.Macro;
focusSettings.Mode = FocusMode.Auto;
focusSettings.WaitForFocus = true;
focusSettings.DisableDriverFallback = false;
captureManager.VideoDeviceController.FocusControl.Configure(focusSettings);
captureManager.SetPreviewRotation(VideoRotation.Clockwise90Degrees);
capturePreview.Source = captureManager; //Start preiving on CaptureElement
await captureManager.StartPreviewAsync(); //Start camera capturing
}
async private void Capture_Photo_Click(object sender, RoutedEventArgs e)
{
//Create JPEG image Encoding format for storing image in JPEG type
ImageEncodingProperties imgFormat = ImageEncodingProperties.CreateJpeg();
// create storage file in local app storage
if (ImageIndex > 0)
{
StorageFile delFile = await ApplicationData.Current.LocalFolder.GetFileAsync("Photo" + Convert.ToString(ImageIndex - 1) + ".jpg");
await delFile.DeleteAsync();
}
file = await ApplicationData.Current.LocalFolder.CreateFileAsync("Photo" + Convert.ToString(ImageIndex++) + ".jpg", CreationCollisionOption.ReplaceExisting);
// take photo and store it on file location.
await captureManager.CapturePhotoToStorageFileAsync(imgFormat, file);
//// create storage file in Picture Library
//StorageFile file = await KnownFolders.PicturesLibrary.CreateFileAsync("Photo.jpg",CreationCollisionOption.GenerateUniqueName);
// Get photo as a BitmapImage using storage file path.
bmpImage = new BitmapImage();
bmpImage.CreateOptions = BitmapCreateOptions.None;
bmpImage.UriSource = new Uri(file.Path);
ImageTaken = true;
Frame.Navigate(typeof(MainPage));
Frame.Navigate(typeof(MainPage));
}
I dont get an error running this code. But the result from ZXing is always null. The ScanBarcodeAsync-Function is directly run after taking the picture. The image the app takes is not sharp. Can this cause the problem?
Im happy to get any suggestions to solve my problem.
On my desktop pc i have one gmail account.
On my laptop here i have another differenet gmail account.
My JSON file: client_secrets contain on both laptop and desktop pc same one gmail account the one on my desktop pc. But when i'm uploading the video file from my laptop it's uploading it to my gmail account on the laptop. And i want it to upload it to my gmail youtube account on my desktop pc.
First why on the laptopn it's uploading to the gmail account on the laptop and not the gmail on the desktop pc ?
Second how can i change it ? Even if on laptop and desktop pc i'm using same client_secrets file on laptopn it's uploading it to the gmail on the laptop.
another thing is that on my laptop i'm connected to my gmail account of the laptop and on the desktop pc connected to the gmail account of the desktop pc.
But i was sure that the gmail account on the JSON file set where the file will be upload to.
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using Google.Apis.Auth.OAuth2;
using Google.Apis.Services;
using Google.Apis.Upload;
using Google.Apis.Util.Store;
using Google.Apis.YouTube.v3;
using Google.Apis.YouTube.v3.Data;
using Google.GData.Client;
using Google.GData.Extensions;
using System.Reflection;
using System.IO;
using System.Threading;
using System.Net;
namespace Youtubeupload
{
public partial class Youtube_Uploader : Form
{
public class ComboboxItem
{
public string Text { get; set; }
public object Value { get; set; }
public override string ToString()
{
return Text;
}
}
YouTubeService service;
string apiKey = "myapikey";
string FileNameToUpload = "";
string[] stringProgressReport = new string[5];
long totalBytes = 0;
DateTime dt;
public static string fileuploadedsuccess = "";
Upload upload;
public Youtube_Uploader(string filetoupload)
{
InitializeComponent();
FileNameToUpload = #"C:\Users\tester\Videos\test.mp4";
service = AuthenticateOauth(apiKey);
var videoCatagories = service.VideoCategories.List("snippet");
videoCatagories.RegionCode = "IL";
var result = videoCatagories.Execute();
MakeRequest();
backgroundWorker1.RunWorkerAsync();
}
public static string uploadstatus = "";
Video objects = null;
private void videosInsertRequest_ResponseReceived(Video obj)
{
System.Timers.Timer aTimer;
aTimer = new System.Timers.Timer();
aTimer.Elapsed += aTimer_Elapsed;
aTimer.Interval = 10000;
aTimer.Enabled = false;
uploadstatus = obj.Status.UploadStatus;
if (uploadstatus == "uploaded")
{
fileuploadedsuccess = "file uploaded successfully";
}
if (uploadstatus == "Completed")
{
fileuploadedsuccess = "completed";
}
objects = obj;
}
void aTimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e)
{
}
double mbSent = 0;
int percentComplete = 0;
VideoProcessingDetailsProcessingProgress vp = new VideoProcessingDetailsProcessingProgress();
private void videosInsertRequest_ProgressChanged(IUploadProgress obj)
{
ulong? ul = vp.TimeLeftMs;
stringProgressReport[1] = obj.Status.ToString();
mbSent = ((double)obj.BytesSent) / (1 << 20);
stringProgressReport[2] = mbSent.ToString();
percentComplete = (int)Math.Round(((double)obj.BytesSent) / totalBytes * 100);
stringProgressReport[3] = percentComplete.ToString();
if (obj.BytesSent != 0)
{
var currentTime = DateTime.Now;
TimeSpan diff = currentTime - dt;
double diffSeconds = (DateTime.Now - dt).TotalSeconds;
double averageSpeed = obj.BytesSent / diffSeconds;
double MBunits = ConvertBytesToMegabytes((long)averageSpeed);
stringProgressReport[4] = string.Format("{0:f2} MB/s", MBunits);
}
}
public static YouTubeService AuthenticateOauth(string apiKey)
{
try
{
YouTubeService service = new YouTubeService(new YouTubeService.Initializer()
{
ApiKey = apiKey,
ApplicationName = "YouTube Uploader",
});
return service;
}
catch (Exception ex)
{
Console.WriteLine(ex.InnerException);
return null;
}
}
private void MakeRequest()
{
var searchListRequest = service.Search.List("snippet");
searchListRequest.Q = "daniel lipman gta"; // Replace with your search term.
searchListRequest.RegionCode = "IL";
searchListRequest.MaxResults = 50;
// Call the search.list method to retrieve results matching the specified query term.
var searchListResponse = searchListRequest.Execute();
List<string> videos = new List<string>();
List<string> channels = new List<string>();
List<string> playlists = new List<string>();
// matching videos, channels, and playlists.
foreach (var searchResult in searchListResponse.Items)
{
switch (searchResult.Id.Kind)
{
case "youtube#video":
videos.Add(String.Format("{0} ({1})", searchResult.Snippet.Title, searchResult.Id.VideoId));
break;
case "youtube#channel":
channels.Add(String.Format("{0} ({1})", searchResult.Snippet.Title, searchResult.Id.ChannelId));
break;
case "youtube#playlist":
playlists.Add(String.Format("{0} ({1})", searchResult.Snippet.Title, searchResult.Id.PlaylistId));
break;
}
}
}
static Video video = new Video();
private void UploadVideo(string FileName, string VideoTitle, string VideoDescription)
{
try
{
UserCredential credential;
using (FileStream stream = new FileStream(#"D:\C-Sharp\Youtube-Manager\Youtube-Manager\Youtube-Manager\bin\Debug\client_secrets.json", FileMode.Open, FileAccess.Read))
{
credential = GoogleWebAuthorizationBroker.AuthorizeAsync(
GoogleClientSecrets.Load(stream).Secrets,
new[] { YouTubeService.Scope.Youtube, YouTubeService.Scope.YoutubeUpload },
"user",
CancellationToken.None,
new FileDataStore("YouTube.Auth.Store")).Result;
}
var youtubeService = new YouTubeService(new BaseClientService.Initializer()
{
HttpClientInitializer = credential,
ApplicationName = Assembly.GetExecutingAssembly().GetName().Name
});
video.Snippet = new VideoSnippet();
video.Snippet.Title = VideoTitle;
video.Snippet.Description = VideoDescription;
video.Snippet.Tags = new string[] { "tag1", "tag2" };
video.Status = new VideoStatus();
video.Status.PrivacyStatus = "public";
using (var fileStream = new FileStream(FileName, FileMode.Open))
{
const int KB = 0x400;
var minimumChunkSize = 256 * KB;
var videosInsertRequest = youtubeService.Videos.Insert(video,
"snippet,status", fileStream, "video/*");
videosInsertRequest.ProgressChanged +=
videosInsertRequest_ProgressChanged;
videosInsertRequest.ResponseReceived +=
videosInsertRequest_ResponseReceived;
// The default chunk size is 10MB, here will use 1MB.
videosInsertRequest.ChunkSize = minimumChunkSize * 3;
dt = DateTime.Now;
videosInsertRequest.Upload();
}
}
catch (Exception errors)
{
string errorss = errors.ToString();
}
}
static double ConvertBytesToMegabytes(long bytes)
{
return (bytes / 1024f) / 1024f;
}
private void Youtube_Uploader_Load(object sender, EventArgs e)
{
}
private void backgroundWorker1_DoWork(object sender, DoWorkEventArgs e)
{
UploadVideo(FileNameToUpload, "Gta v ps4", "Testing gta v ps4");
}
private void backgroundWorker1_ProgressChanged(object sender, ProgressChangedEventArgs e)
{
int eventIndex = 0;
try
{
eventIndex = (int)e.UserState;
}
catch
{
MessageBox.Show(e.UserState == null ? "null" : e.UserState.GetType().FullName);
throw;
}
if (eventIndex == 0) // upload status.
{
label14.Text = stringProgressReport[0];
}
else if (eventIndex == 1) // obj.Status
{
label16.Text = stringProgressReport[1];
}
else if (eventIndex == 2) // mb sent so far
{
stringProgressReport[2];
label5.Text = stringProgressReport[2];
}
else if (eventIndex == 3) // percent complete
{
progressBar1.Value = Int32.Parse(stringProgressReport[3]);
}
else if (eventIndex == 4) // percent complete
{
label8.Text = stringProgressReport[4];
}
else
{
throw new Exception("Invalid event index: " + eventIndex);
}
}
private void backgroundWorker1_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e)
{
}
private void timer1_Tick(object sender, EventArgs e)
{
}
}
}
What i want is that it will upload the video files to my gmail account that is on the desktop pc even if i'm uploading it from the laptop pc and there i'm connected to another gmail account.