EDIT:: Reading through this again now realize it isn’t very clear as to how the pictures were captured, how they’re being displayed, and why/what makes them different.
To that end— We are using AVCapture library to manually take photos within our app. We have a preview display in the app so the user can see what the image they’re taking looks like, just how any standard photo app these days does it. So what these two images are showing are the preview of the image on the screen, before the image is captured and the captured image. This was done by taking a screenshot of the preview and then a screenshot of the resulting capture image.
All this to say the captured image appears to be returned with differing dimensions or scaling attributes. We are displaying the preview and the resulting captures using a native iOS preview view and a Xamarin.Image respectively.
Below details our attempts at addressing the issue by changing sizing, layering, and stretching attributes to no avail.
To that end we’ve created a support ticket with MSFT regarding this issue.
These two images are the camera preview and the resulting capture (in that order, respectively [taken via screenshots]). We want the captured photo to match the preview/vice versa. How can we address this?
Tried manipulating the CALayer
containing the photo data to size the image like how a Xamarin.Forms' image sizes with AspectFit
by assigning the ContentsGravity
with various options like kCAGravityResizeAspect
. Fiddled with other Contents
options such as ContentsRect
and ContentsScale
but no dice. Below is the View
and its corresponding Renderer
. So how to address the sizing issue?
Native Camera View
namespace App.iOS.Views
{
public class NativeCameraView : UIView
{
AVCaptureVideoPreviewLayer previewLayer;
CameraOptions cameraOptions;
public AVCaptureSession CaptureSession { get; private set; }
public AVCaptureStillImageOutput CaptureOutput { get; set; }
public bool IsPreviewing { get; set; }
public NativeCameraPreview(CameraOptions options)
{
cameraOptions = options;
IsPreviewing = false;
Initialize();
}
public override void LayoutSubviews()
{
base.LayoutSubviews();
UIDevice device = UIDevice.CurrentDevice;
UIDeviceOrientation orientation = device.Orientation;
AVCaptureConnection previewLayerConnection = this.previewLayer.Connection;
if (previewLayerConnection.SupportsVideoOrientation)
{
switch (orientation)
{
case UIDeviceOrientation.Portrait:
UpdatePreviewLayer(previewLayerConnection,
AVCaptureVideoOrientation.Portrait);
break;
case UIDeviceOrientation.LandscapeRight:
UpdatePreviewLayer(previewLayerConnection,
AVCaptureVideoOrientation.LandscapeLeft);
break;
case UIDeviceOrientation.LandscapeLeft:
UpdatePreviewLayer(previewLayerConnection,
AVCaptureVideoOrientation.LandscapeRight);
break;
case UIDeviceOrientation.PortraitUpsideDown:
UpdatePreviewLayer(previewLayerConnection,
AVCaptureVideoOrientation.PortraitUpsideDown);
break;
default:
UpdatePreviewLayer(previewLayerConnection,
AVCaptureVideoOrientation.Portrait);
break;
}
}
}
private void UpdatePreviewLayer(AVCaptureConnection layer,
AVCaptureVideoOrientation orientation)
{
layer.VideoOrientation = orientation;
previewLayer.Frame = this.Bounds;
}
public async Task CapturePhoto()
{
var videoConnection = CaptureOutput.ConnectionFromMediaType(AVMediaType.Video);
var sampleBuffer = await CaptureOutput.CaptureStillImageTaskAsync(videoConnection);
var jpegData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer);
var photo = new UIImage(jpegData);
var rotatedPhoto = RotateImage(photo, 180f);
CALayer layer = new CALayer
{
//ContentsGravity = "kCAGravityResizeAspect",
//ContentsRect = rect,
//GeometryFlipped = true,
ContentsScale = 1.0f,
Frame = Bounds,
Contents = rotatedPhoto.CGImage //Contents = photo.CGImage,
};
MainPage.UpdateSource(UIImageFromLayer(layer).AsJPEG().AsStream());
MainPage.UpdateImage(UIImageFromLayer(layer).AsJPEG().AsStream());
}
public UIImage RotateImage(UIImage image, float degree)
{
float Radians = degree * (float)Math.PI / 180;
UIView view = new UIView(frame: new CGRect(0, 0, image.Size.Width, image.Size.Height));
CGAffineTransform t = CGAffineTransform.MakeRotation(Radians);
view.Transform = t;
CGSize size = view.Frame.Size;
UIGraphics.BeginImageContext(size);
CGContext context = UIGraphics.GetCurrentContext();
context.TranslateCTM(size.Width / 2, size.Height / 2);
context.RotateCTM(Radians);
context.ScaleCTM(1, -1);
context.DrawImage(new CGRect(-image.Size.Width / 2, -image.Size.Height / 2, image.Size.Width, image.Size.Height), image.CGImage);
UIImage imageCopy = UIGraphics.GetImageFromCurrentImageContext();
UIGraphics.EndImageContext();
return imageCopy;
}
UIImage ImageFromLayer(CALayer layer)
{
UIGraphics.BeginImageContextWithOptions(
layer.Frame.Size,
layer.Opaque,
0);
layer.RenderInContext(UIGraphics.GetCurrentContext());
var outputImage = UIGraphics.GetImageFromCurrentImageContext();
UIGraphics.EndImageContext();
return outputImage;
}
void Initialize()
{
CaptureSession = new AVCaptureSession();
CaptureSession.SessionPreset = AVCaptureSession.PresetPhoto;
previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession)
{
Frame = Bounds,
VideoGravity = AVLayerVideoGravity.ResizeAspectFill
};
var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back;
var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition);
if (device == null)
{
return;
}
NSError error;
var input = new AVCaptureDeviceInput(device, out error);
var dictionary = new NSMutableDictionary();
dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
CaptureOutput = new AVCaptureStillImageOutput()
{
OutputSettings = new NSDictionary()
};
CaptureSession.AddOutput(CaptureOutput);
CaptureSession.AddInput(input);
Layer.AddSublayer(previewLayer);
CaptureSession.StartRunning();
IsPreviewing = true;
}
}
}
Native Camera Renderer
[assembly: ExportRenderer(typeof(CameraView), typeof(CameraViewRenderer))]
namespace App.iOS.Renderers
{
public class CameraViewRenderer : ViewRenderer<CameraView, NativeCameraView>
{
NativeCameraView uiCameraView;
protected override void OnElementChanged(ElementChangedEventArgs<CameraView> e)
{
base.OnElementChanged(e);
if (Control == null)
{
uiCameraView = new NativeCameraView(e.NewElement.Camera);
SetNativeControl(uiCameraView);
}
if (e.OldElement != null)
{
// Unsubscribe
uiCameraView.Tapped -= OnCameraViewTapped;
}
if (e.NewElement != null)
{
// Subscribe
uiCameraView.Tapped += OnCameraViewTapped;
}
}
async void OnCameraViewTapped(object sender, EventArgs e)
{
await uiCameraView.CapturePhoto();
}
}
}
NOTE A similar question appears to have been asked quite some time ago.