Hello there,
I'm trying to port this Xamarin.iOS example to Xamarin.Mac sadly without success...
The "DidOutputSampleBuffer" gets called and I manage to repeatedly load an NSImage in a NSImageView, but if I try to load the webcam frame, nothing happens, no errors, no frames.
Should I modify the "ImageFromSampleBuffer" method?
Here is the relevant code:
public class OutputRecorder : AVCaptureVideoDataOutputSampleBufferDelegate
{
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
{
try
{
var image = ImageFromSampleBuffer(sampleBuffer);
imgView.BeginInvokeOnMainThread(() => {
var img = new NSImage(image, new CGSize(image.Width, image.Height));
TryDispose(imgView.Image);
// imgView.Image = NSImage.ImageNamed("testimage"); // This works!!!
imgView.Image = img; // This doesn't work :-(
});
}
catch (Exception e)
{
Console.WriteLine(e);
}
finally
{
sampleBuffer.Dispose();
}
}
CGImage ImageFromSampleBuffer(CMSampleBuffer sampleBuffer)
{
// Get the CoreVideo image
using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer)
{
// Lock the base address
pixelBuffer.Lock(CVPixelBufferLock.None);
// Get the number of bytes per row for the pixel buffer
var baseAddress = pixelBuffer.BaseAddress;
var bytesPerRow = (int)pixelBuffer.BytesPerRow;
var width = (int)pixelBuffer.Width;
var height = (int)pixelBuffer.Height;
var flags = CGBitmapFlags.PremultipliedFirst | CGBitmapFlags.ByteOrder32Little;
// Create a CGImage on the RGB colorspace from the configured parameter above
using (var cs = CGColorSpace.CreateDeviceRGB())
{
using (var context = new CGBitmapContext(baseAddress, width, height, 8, bytesPerRow, cs, (CGImageAlphaInfo)flags))
{
using (CGImage cgImage = context.ToImage())
{
pixelBuffer.Unlock(CVPixelBufferLock.None);
return cgImage;
}
}
}
}
}
void TryDispose(IDisposable obj)
{
if (obj != null)
obj.Dispose();
}
}