Xamarin:无法在Android/iOS中提取照片的中心正方形



我正在尝试使用相机获取图像。图像是256x256,我希望它来自使用手机摄像头拍摄的照片的中心。我在以下位置找到此代码:https://forums.xamarin.com/discussion/37647/cross-platform-crop-image-view

我正在使用此代码为Android。。。

public byte[] CropPhoto(byte[] photoToCropBytes, Rectangle rectangleToCrop, double outputWidth, double outputHeight)
{
using (var photoOutputStream = new MemoryStream())
{
// Load the bitmap
var inSampleSize = CalculateInSampleSize((int)rectangleToCrop.Width, (int)rectangleToCrop.Height, (int)outputWidth, (int)outputHeight);
var options = new BitmapFactory.Options();
options.InSampleSize = inSampleSize;
//options.InPurgeable = true;   see http://developer.android.com/reference/android/graphics/BitmapFactory.Options.html
using (var photoToCropBitmap = BitmapFactory.DecodeByteArray(photoToCropBytes, 0, photoToCropBytes.Length, options))
{
var matrix = new Matrix();
var martixScale = outputWidth / rectangleToCrop.Width * inSampleSize;
matrix.PostScale((float)martixScale, (float)martixScale);
using (var photoCroppedBitmap = Bitmap.CreateBitmap(photoToCropBitmap, (int)(rectangleToCrop.X / inSampleSize), (int)(rectangleToCrop.Y / inSampleSize), (int)(rectangleToCrop.Width / inSampleSize), (int)(rectangleToCrop.Height / inSampleSize), matrix, true))
{
photoCroppedBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, photoOutputStream);
}
}
return photoOutputStream.ToArray();
}
}
public static int CalculateInSampleSize(int inputWidth, int inputHeight, int outputWidth, int outputHeight) 
{
//see http://developer.android.com/training/displaying-bitmaps/load-bitmap.html
int inSampleSize = 1;       //default
if (inputHeight > outputHeight || inputWidth > outputWidth) {
int halfHeight = inputHeight / 2;
int halfWidth = inputWidth / 2;
// Calculate the largest inSampleSize value that is a power of 2 and keeps both
// height and width larger than the requested height and width.
while ((halfHeight / inSampleSize) > outputHeight && (halfWidth / inSampleSize) > outputWidth) 
{
inSampleSize *= 2;
}
}
return inSampleSize;
}

这个代码适用于iOS。。。

public byte[] CropPhoto(byte[] photoToCropBytes, Xamarin.Forms.Rectangle 
rectangleToCrop, double outputWidth, double outputHeight)
{
byte[] photoOutputBytes;
using (var data = NSData.FromArray(photoToCropBytes))
{
using (var photoToCropCGImage = UIImage.LoadFromData(data).CGImage)
{
//crop image
using (var photoCroppedCGImage = photoToCropCGImage.WithImageInRect(new CGRect((nfloat)rectangleToCrop.X, (nfloat)rectangleToCrop.Y, (nfloat)rectangleToCrop.Width, (nfloat)rectangleToCrop.Height)))
{
using (var photoCroppedUIImage = UIImage.FromImage(photoCroppedCGImage))
{
//create a 24bit RGB image to the output size
using (var cGBitmapContext = new CGBitmapContext(IntPtr.Zero, (int)outputWidth, (int)outputHeight, 8, (int)(4 * outputWidth), CGColorSpace.CreateDeviceRGB(), CGImageAlphaInfo.PremultipliedFirst))
{
var photoOutputRectangleF = new RectangleF(0f, 0f, (float)outputWidth, (float)outputHeight);
// draw the cropped photo resized 
cGBitmapContext.DrawImage(photoOutputRectangleF, photoCroppedUIImage.CGImage);
//get cropped resized photo
var photoOutputUIImage = UIKit.UIImage.FromImage(cGBitmapContext.ToImage());
//convert cropped resized photo to bytes and then stream
using (var photoOutputNsData = photoOutputUIImage.AsJPEG())
{
photoOutputBytes = new Byte[photoOutputNsData.Length];
System.Runtime.InteropServices.Marshal.Copy(photoOutputNsData.Bytes, photoOutputBytes, 0, Convert.ToInt32(photoOutputNsData.Length));
}
}
}
}
}
}
return photoOutputBytes;
}

我正在努力弄清楚调用函数的确切参数是什么。

目前,我正在做以下工作:

double cropSize = Math.Min(DeviceDisplay.MainDisplayInfo.Width, DeviceDisplay.MainDisplayInfo.Height);
double left = (DeviceDisplay.MainDisplayInfo.Width - cropSize) / 2.0;
double top = (DeviceDisplay.MainDisplayInfo.Height - cropSize) / 2.0;
// Get a square resized and cropped from the top image as a byte[]
_imageData = mediaService.CropPhoto(_imageData, new Rectangle(left, top, cropSize, cropSize), 256, 256);

我希望这能将图像裁剪到中心正方形(在人像模式下,边长是照片的宽度(,然后将其缩小到256x256的图像。但它从不选择图像的中心。

有人使用过这个代码吗?可以告诉我需要为"rectangleToCrop"参数传递什么吗?

注意:Android和iOS都给出了相同的图像,只是不是我所期望的中心部分。

以下是我使用的两个例程:

安卓:

public byte[] ResizeImageAndCropToSquare(byte[] rawPhoto, int outputSize)
{
// Create object of bitmapfactory's option method for further option use
BitmapFactory.Options options = new BitmapFactory.Options();
// InPurgeable is used to free up memory while required
options.InPurgeable = true;
// Get the original image
using (var originalImage = BitmapFactory.DecodeByteArray(rawPhoto, 0, rawPhoto.Length, options))
{
// The shortest edge will determine the size of the square image
int cropSize = Math.Min(originalImage.Width, originalImage.Height);
int left = (originalImage.Width - cropSize) / 2;
int top = (originalImage.Height - cropSize) / 2;
using (var squareImage = Bitmap.CreateBitmap(originalImage, left, top, cropSize, cropSize))
{
// Resize the square image to the correct size of an Avatar
using (var resizedImage = Bitmap.CreateScaledBitmap(squareImage, outputSize, outputSize, true))
{
// Return the raw data of the resized image
using (MemoryStream resizedImageStream = new MemoryStream())
{
// Resize the image maintaining 100% quality
resizedImage.Compress(Bitmap.CompressFormat.Png, 100, resizedImageStream);
return resizedImageStream.ToArray();
}
}
}
}
}

iOS:private const int BitsPerComponent=8;

public byte[] ResizeImageAndCropToSquare(byte[] rawPhoto, int outputSize)
{
using (var data = NSData.FromArray(rawPhoto))
{
using (var photoToCrop = UIImage.LoadFromData(data).CGImage)
{
nint photoWidth = photoToCrop.Width;
nint photoHeight = photoToCrop.Height;
nint cropSize = photoWidth < photoHeight ? photoWidth : photoHeight;
nint left = (photoWidth - cropSize) / 2;
nint top = (photoHeight - cropSize) / 2;
// Crop image
using (var photoCropped = photoToCrop.WithImageInRect(new CGRect(left, top, cropSize, cropSize)))
{
using (var photoCroppedUIImage = UIImage.FromImage(photoCropped))
{
// Create a 24bit RGB image of output size
using (var cGBitmapContext = new CGBitmapContext(IntPtr.Zero, outputSize, outputSize, BitsPerComponent, outputSize << 2, CGColorSpace.CreateDeviceRGB(), CGImageAlphaInfo.PremultipliedFirst))
{
var photoOutputRectangleF = new RectangleF(0f, 0f, outputSize, outputSize);
// Draw the cropped photo resized 
cGBitmapContext.DrawImage(photoOutputRectangleF, photoCroppedUIImage.CGImage);
// Get cropped resized photo
var photoOutputUIImage = UIImage.FromImage(cGBitmapContext.ToImage());
// Convert cropped resized photo to bytes and then stream
using (var photoOutputNsData = photoOutputUIImage.AsPNG())
{
var rawOutput = new byte[photoOutputNsData.Length];
Marshal.Copy(photoOutputNsData.Bytes, rawOutput, 0, Convert.ToInt32(photoOutputNsData.Length));
return rawOutput;
}
}
}
}
}
}
}

最新更新