i am beginning in develop winphone and nokia imaging sdk. i have two function.
firstly, i call the function below to change image to gray color
private async void PickImageCallback(object sender, PhotoResult e)
{
if (e.TaskResult != TaskResult.OK || e.ChosenPhoto == null)
{
return;
}
using (var source = new StreamImageSource(e.ChosenPhoto))
{
using (var filters = new FilterEffect(source))
{
var sampleFilter = new GrayscaleFilter();
filters.Filters = new IFilter[] { sampleFilter };
var target = new WriteableBitmap((int)CartoonImage.ActualWidth, (int)CartoonImage.ActualHeight);
var renderer = new WriteableBitmapRenderer(filters, target);
{
await renderer.RenderAsync();
_thumbnailImageBitmap = target;
CartoonImage.Source = target;
}
}
}
SaveButton.IsEnabled = true;
}
then i call function to change image to binary color
private async void Binary(WriteableBitmap bm_image)
{
var target = new WriteableBitmap((int)CartoonImage.ActualWidth, (int)CartoonImage.ActualHeight);
MemoryStream stream= new MemoryStream();
bm_image.SaveJpeg(stream, bm_image.PixelWidth, bm_image.PixelHeight, 0, 100);
using (var source = new StreamImageSource(stream))
{
using (var filters = new FilterEffect(source))
{
var sampleFilter = new StampFilter(5, 0.7);
filters.Filters = new IFilter[] { sampleFilter };
var renderer1 =new WriteableBitmapRenderer(filters, target);
{
await renderer1.RenderAsync();
CartoonImage.Source = target;
}
}
}
}
but when it run to " await renderer1.RenderAsync();" in the second function, it doesn't work. How can i solve it. And you can explain for me about how "await" and "async" work ?
thank you very much!
I'm mostly guessing here since I do not know what error you get, but I'm pretty sure your problem lies in setting up the source. Have you made sure the memory stream position is set to the beginning (0) before creating an StreamImageSource?
Try adding:
stream.Position = 0;
before creating the StreamImageSource.
Instead of trying to create a memory stream from the writeable bitmap I suggest doing:
using Nokia.InteropServices.WindowsRuntime;
...
using (var source = new BitmapImageSource(bm_image.AsBitmap())
{
...
}
Related
I am building a screen recording app in C# using Windows Graphics Capture API. I am using this script. I can select monitor and can record it to mp4 file. I can also select a Window and record it too. But how can we record a region with this? Ideally I need to give x,y coordinates along with width and height to record that specific region.
Here are the functions which return GraphicsCaptureItem for Window or Monitor hwnd, which can be used to record.
public static GraphicsCaptureItem CreateItemForWindow(IntPtr hwnd)
{
var factory = WindowsRuntimeMarshal.GetActivationFactory(typeof(GraphicsCaptureItem));
var interop = (IGraphicsCaptureItemInterop)factory;
var temp = typeof(GraphicsCaptureItem);
var itemPointer = interop.CreateForWindow(hwnd, GraphicsCaptureItemGuid);
var item = Marshal.GetObjectForIUnknown(itemPointer) as GraphicsCaptureItem;
Marshal.Release(itemPointer);
return item;
}
public static GraphicsCaptureItem CreateItemForMonitor(IntPtr hmon)
{
var factory = WindowsRuntimeMarshal.GetActivationFactory(typeof(GraphicsCaptureItem));
var interop = (IGraphicsCaptureItemInterop)factory;
var temp = typeof(GraphicsCaptureItem);
var itemPointer = interop.CreateForMonitor(hmon, GraphicsCaptureItemGuid);
var item = Marshal.GetObjectForIUnknown(itemPointer) as GraphicsCaptureItem;
Marshal.Release(itemPointer);
return item;
}
And this is Recording function
private async void RecordScreen(GraphicsCaptureItem item)
{
_device = Direct3D11Helpers.CreateDevice();
// Get our encoder properties
uint frameRate = 30;
uint bitrate = 3 * 1000000;
var width = (uint)item.Size.Width;
var height = (uint)item.Size.Height;
// Kick off the encoding
try
{
newFile = GetTempFile();
using (var stream = new FileStream(newFile, FileMode.CreateNew).AsRandomAccessStream())
using (_encoder = new Encoder(_device, item))
{
await _encoder.EncodeAsync(
stream,
width, height, bitrate,
frameRate);
}
}
catch (Exception ex)
{}
}
I achieved this by passing a custom region to CopySubresourceRegion in WaitForNewFrame method.
public SurfaceWithInfo WaitForNewFrame()
{
.....
using (var multithreadLock = new MultithreadLock(_multithread))
using (var sourceTexture = Direct3D11Helpers.CreateSharpDXTexture2D(_currentFrame.Surface))
{
.....
using (var copyTexture = new SharpDX.Direct3D11.Texture2D(_d3dDevice, description))
{
.....
var region = new SharpDX.Direct3D11.ResourceRegion(
_region.Left,
_region.Top,
0,
_region.Left + _region.Width,
_region.Top + _region.Height,
1
);
_d3dDevice.ImmediateContext.CopyResource(_blankTexture, copyTexture);
_d3dDevice.ImmediateContext.CopySubresourceRegion(sourceTexture, 0, region, copyTexture, 0);
result.Surface = Direct3D11Helpers.CreateDirect3DSurfaceFromSharpDXTexture(copyTexture);
}
}
....
}
I'm trying to find a working sample to record videos with IOS (using xamarin) but there's always something missing or not working for me.
My best try using several forum posts and samples is the following :
using System;
using CoreGraphics;
using Foundation;
using UIKit;
using AVFoundation;
using CoreVideo;
using CoreMedia;
using CoreFoundation;
using System.IO;
using AssetsLibrary;
namespace avcaptureframes {
public partial class AppDelegate : UIApplicationDelegate {
public static UIImageView ImageView;
UIViewController vc;
AVCaptureSession session;
OutputRecorder outputRecorder;
DispatchQueue queue;
public override bool FinishedLaunching (UIApplication application, NSDictionary launchOptions)
{
ImageView = new UIImageView (new CGRect (10f, 10f, 200f, 200f));
ImageView.ContentMode = UIViewContentMode.Top;
vc = new UIViewController {
View = ImageView
};
window.RootViewController = vc;
window.MakeKeyAndVisible ();
window.BackgroundColor = UIColor.Black;
if (!SetupCaptureSession ())
window.AddSubview (new UILabel (new CGRect (20f, 20f, 200f, 60f)) {
Text = "No input device"
});
return true;
}
bool SetupCaptureSession ()
{
// configure the capture session for low resolution, change this if your code
// can cope with more data or volume
session = new AVCaptureSession {
SessionPreset = AVCaptureSession.PresetMedium
};
// create a device input and attach it to the session
var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
if (captureDevice == null) {
Console.WriteLine ("No captureDevice - this won't work on the simulator, try a physical device");
return false;
}
//Configure for 15 FPS. Note use of LockForConigfuration()/UnlockForConfiguration()
NSError error = null;
captureDevice.LockForConfiguration (out error);
if (error != null) {
Console.WriteLine (error);
captureDevice.UnlockForConfiguration ();
return false;
}
if (UIDevice.CurrentDevice.CheckSystemVersion (7, 0))
captureDevice.ActiveVideoMinFrameDuration = new CMTime (1, 15);
captureDevice.UnlockForConfiguration ();
var input = AVCaptureDeviceInput.FromDevice (captureDevice);
if (input == null) {
Console.WriteLine ("No input - this won't work on the simulator, try a physical device");
return false;
}
session.AddInput (input);
// create a VideoDataOutput and add it to the sesion
var settings = new CVPixelBufferAttributes {
PixelFormatType = CVPixelFormatType.CV32BGRA
};
using (var output = new AVCaptureVideoDataOutput { WeakVideoSettings = settings.Dictionary }) {
queue = new DispatchQueue ("myQueue");
outputRecorder = new OutputRecorder ();
output.SetSampleBufferDelegate (outputRecorder, queue);
session.AddOutput (output);
}
session.StartRunning ();
return true;
}
public override void OnActivated (UIApplication application)
{
}
public class OutputRecorder : AVCaptureVideoDataOutputSampleBufferDelegate
{
AVAssetWriter writer=null;
AVAssetWriterInput writerinput= null;
CMTime lastSampleTime;
int frame=0;
NSUrl url;
public OutputRecorder()
{
string tempFile = Path.Combine(Path.GetTempPath(), "NewVideo.mp4");
if (File.Exists(tempFile)) File.Delete(tempFile);
url = NSUrl.FromFilename(tempFile);
NSError assetWriterError;
writer = new AVAssetWriter(url, AVFileType.Mpeg4, out assetWriterError);
var outputSettings = new AVVideoSettingsCompressed()
{
Height = 300,
Width = 300,
Codec = AVVideoCodec.H264,
CodecSettings = new AVVideoCodecSettings
{
AverageBitRate = 1000000
}
};
writerinput = new AVAssetWriterInput(mediaType: AVMediaType.Video, outputSettings: outputSettings);
writerinput.ExpectsMediaDataInRealTime = false;
writer.AddInput(writerinput);
}
public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
{
try
{
lastSampleTime = sampleBuffer.PresentationTimeStamp;
var image = ImageFromSampleBuffer(sampleBuffer);
if (frame == 0)
{
writer.StartWriting();
writer.StartSessionAtSourceTime(lastSampleTime);
frame = 1;
}
String infoString = "";
if (writerinput.ReadyForMoreMediaData)
{
if (!writerinput.AppendSampleBuffer(sampleBuffer))
{
infoString = "Failed to append sample buffer";
}
else
{
infoString = String.Format("{0} frames captured", frame++);
}
}
else
{
infoString = "Writer not ready";
}
Console.WriteLine(infoString);
ImageView.BeginInvokeOnMainThread(() => ImageView.Image = image);
}
catch (Exception e)
{
Console.WriteLine(e);
}
finally
{
sampleBuffer.Dispose();
}
}
UIImage ImageFromSampleBuffer (CMSampleBuffer sampleBuffer)
{
// Get the CoreVideo image
using (var pixelBuffer = sampleBuffer.GetImageBuffer () as CVPixelBuffer)
{
// Lock the base address
pixelBuffer.Lock (CVOptionFlags.None);
// Get the number of bytes per row for the pixel buffer
var baseAddress = pixelBuffer.BaseAddress;
var bytesPerRow = (int)pixelBuffer.BytesPerRow;
var width = (int)pixelBuffer.Width;
var height = (int)pixelBuffer.Height;
var flags = CGBitmapFlags.PremultipliedFirst | CGBitmapFlags.ByteOrder32Little;
// Create a CGImage on the RGB colorspace from the configured parameter above
using (var cs = CGColorSpace.CreateDeviceRGB ())
{
using (var context = new CGBitmapContext (baseAddress, width, height, 8, bytesPerRow, cs, (CGImageAlphaInfo)flags))
{
using (CGImage cgImage = context.ToImage ())
{
pixelBuffer.Unlock (CVOptionFlags.None);
return UIImage.FromImage (cgImage);
}
}
}
}
}
void TryDispose (IDisposable obj)
{
if (obj != null)
obj.Dispose ();
}
}
}
}
This works displaying live camera image and I get "frames captured" message in consol but I don't find how to record to file.
I read somewhere about adding VideoCapture but I don't know how to link with my code.
Any help will is welcome.
From your code, in the construct of class OutputRecorder you have defined the url where you want to save the recording:
string tempFile = Path.Combine(Path.GetTempPath(), "NewVideo.mp4");
if (File.Exists(tempFile)) File.Delete(tempFile);
url = NSUrl.FromFilename(tempFile);
It means you want to save the video in the tmp folder in the app's sandbox. If you want to use the video sometime later, I recommend you to change the folder to documents by using:
string filePath = Path.Combine(NSSearchPath.GetDirectories(NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomain.User)[0], "NewVideo.mp4");
I notice that you have called session.StartRunning(); in the method bool SetupCaptureSession() to start recording. please add session.StopRunning(); to end recording then the video will be saved in the path we just defined above.
Moreover, you can retrieve the video with the path like:
NSData videoData = NSData.FromFile(filePath);
I want to send a photo using the Telegram.Bot library, but it's not working!
Here is my code:
private void btnSendImage_Click(object sender, RoutedEventArgs e)
{
var Bot = new Telegram.Bot.Api(token);
Task<Telegram.Bot.Types.Update[]> res = Bot.GetUpdates();
List<string> users = GetIds();
foreach (var update in res.Result)
{
if (!users.Contains("" + update.Message.Chat.Id))
{
AddId("" + update.Message.Chat.Id);
}
}
users = GetIds();
foreach (var item in users)
{
if (item.Length > 0)
{
var rep = Bot.SendPhoto(Convert.ToInt32(item), txtImagePath.Text, txtMessage.Text);
}
}
}
and txtImagePath.text= "D:\Projects\Telegram Bot\Telegram Bot\bin\Debug\4.jpg";
I tested it with Bot.SendMessage and it worked fine, but I can't send a photo!
I used this code and it's worked!
var FileUrl = #"C:\\Users\\User\\Documents\\20160201_204055.jpg";
using (var stream = System.IO.File.Open(FileUrl, FileMode.Open))
{
FileToSend fts = new FileToSend();
fts.Content = stream;
fts.Filename = FileUrl.Split('\\').Last();
var test = await bot.SendPhoto("#channel Name or chat_id", fts, "My Text");
}
you need to pass a Stream to the function if you want to send an new image.
using (var stream = File.Open(txtImagePath.Text, FileMode.Open))
{
var rep = await Bot.SendPhoto(Convert.ToInt32(item), stream, txtMessage.Text);
}
I have listbox where I am binding image + text. When I add 15 items no problem but when I add more items 20 and more I get System.IndexOutOfRangeException. I find somewhere listbox can store only 32kb so I try use listview and I get the same System.IndexOutOfRangeException. Im using ObservableCollection can be overflow here or anyone have idea why I get System.IndexOutOfRangeException? I get this exception when items are adding to listbox.
private ObservableCollection<BindingData> _rsMessages = new ObservableCollection<BindingData>();
public ObservableCollection<BindingData> RSMessages
{
get { return _rsMessages; }
set { _rsMessages = value; }
}
public void initializeListboxRS()
{
foreach (var items in UniDB.returnListOfRSItems())
{
_rsMessages.Add(new BindingData
{
rssMessageText = items.tile,
rssMessageDateTime = items.dateTime.ToString("dd.MM.yyyy - hh:mm"),
rssMessageImage = ByteArrayToBitmapImage(items.image),
rssMessageLink = items.link
});
OnPropertyChanged("RSMessages");
}
}
private BitmapImage ByteArrayToBitmapImage(byte[] byteArray)
{
BitmapImage bitmapImage = new BitmapImage();
InMemoryRandomAccessStream ms = new InMemoryRandomAccessStream();
ms.WriteAsync(byteArray.AsBuffer()); //exception here
ms.FlushAsync().AsTask().Wait();
ms.Seek(0);
bitmapImage.SetSource(ms);
return bitmapImage;
}
Your method implementation lacks await keyword. Try the below code.
private async Task<BitmapImage> ByteArrayToBitmapImage(byte[] byteArray)
{
var bitmapImage = new BitmapImage();
var stream = new InMemoryRandomAccessStream();
await stream.WriteAsync(byteArray.AsBuffer());
stream.Seek(0);
bitmapImage.SetSource(stream);
return bitmapImage;
}
private ObservableCollection<BindingData> _rsMessages = new ObservableCollection<BindingData>();
public ObservableCollection<BindingData> RSMessages
{
get { return _rsMessages; }
set { _rsMessages = value; }
}
public async Task initializeListboxRS()
{
foreach (var items in UniDB.returnListOfRSItems())
{
_rsMessages.Add(new BindingData
{
rssMessageText = items.tile,
rssMessageDateTime = items.dateTime.ToString("dd.MM.yyyy - hh:mm"),
rssMessageImage = await ByteArrayToBitmapImage(items.image),
rssMessageLink = items.link
});
OnPropertyChanged("RSMessages");
}
}
Whenever you call method initializeListboxRS() use await prefix.
So, I have these two methods, which I am using to serialize and deserialize Images:
private static Image GetImageFromString(string image)
{
using (var stream = new MemoryStream(Convert.FromBase64String(image)))
{
return Image.FromStream(stream);
}
}
private static string GetImageAsString(Image image)
{
using (var stream = new MemoryStream())
{
image.Save(stream, System.Drawing.Imaging.ImageFormat.Jpeg);
return Convert.ToBase64String(stream.GetBuffer());
}
}
If I do something Like this:
public Form1()
{
InitializeComponent();
var image = Image.FromFile(#"F:\phpide.png");
pictureBox1.Image = image;
var serialized = GetImageAsString(image);
var secondImage = GetImageFromString(serialized);
pictureBox2.Image = secondImage;
}
It works as expected
Although, If I do something like this:
//client
public void GetImage(JObject o)
{
var imageFile = o["file"].ToString();
if (!File.Exists(imageFile))
{
SendMessage("File does not exist");
return;
}
using (var image = Image.FromFile(imageFile))
{
var serialized = GetImageAsString(image);
var ob = new JObject
{
{ COMMAND, (int) Command.GetImage },
{ "content", serialized }
};
Send(ob);
ob = null;
serialized = null;
}
}
//server
public void ReceiveImage(JObject o)
{
var content = o["content"].ToString();
var image = GetImageFromString(content);
var form = new ImagePreviewForm(image);
form.Show();
}
//server
public ImagePreviewForm(Image image)
{
InitializeComponent();
pictureBox1.Image = image;
}
The image is just blank.
I have checked and the image is being received correctly, with no data loss.
What could be going wrong here? Where should I look?
This is at least one problem:
return Convert.ToBase64String(stream.GetBuffer());
You shouldn't use MemoryStream.GetBuffer here - you should use ToArray. The GetBuffer method returns the underlying buffer as-is... complete with junk data at the end of the buffer, beyond the logical current length of the stream.
Additionally, you shouldn't close the stream when you call Image.FromStream. From the docs:
You must keep the stream open for the lifetime of the Image.
So get rid of the using statement in GetImageFromString.
With the using statement you are disposing the image before the UI thread can display the image properly. Take the code out of the using block and add a Dispose() statement to the Form.Close() method.