I am creating an iPhone app that detects Rectangle and captures image using Camera. I create an overlay over the biggest rectangle detected and once captured i have 4 CGPoints
using CIRectangleFeature
and an Image.
All 4 points in CIRectangleFeature
are in landscape and my app is in Portrait.
When i display image in UIImageView
on next controller the the coordinates they are disturbed. The Image View is in AspectFit Mode. I searched and found a few of solutions, one was
extension CGPoint {
func scalePointByCeficient(ƒ_x: CGFloat, ƒ_y: CGFloat, viewWidth: CGSize, imageWidth: CGSize) -> CGPoint {
let scale: CGFloat;
scale = min(ƒ_x, ƒ_y)
var p: CGPoint = CGPoint(x: self.x, y: self.y)
p.x *= scale
p.y *= scale
p.x += (viewWidth.width - imageWidth.width * scale) / 2.0
p.y += (viewWidth.height - imageWidth.height * scale) / 2.0
return p
}
func reversePointCoordinates() -> CGPoint {
return CGPoint(x: self.y, y: self.x)
}
func sumPointCoordinates(add: CGPoint) -> CGPoint {
return CGPoint(x: self.x + add.x, y: self.y + add.y)
}
func substractPointCoordinates(sub: CGPoint) -> CGPoint {
return CGPoint(x: self.x - sub.x, y: self.y - sub.y)
}}
class ObyRectangleFeature : NSObject {
public var topLeft: CGPoint
public var topRight: CGPoint
public var bottomLeft: CGPoint
public var bottomRight: CGPoint
var myRect: CIRectangleFeature?
public var viewWidth: CGSize
public var imageWidth: CGSize
var centerPoint_OLD : CGPoint{
get {
myRect?.topLeft = self.topLeft
myRect?.topRight = self.topRight
myRect?.bottomLeft = self.bottomLeft
myRect?.bottomRight = self.bottomRight
let superCenter: CGPoint = CGPoint(x: (myRect?.bounds().midX)!, y: (myRect?.bounds().midY)!)
return superCenter
}
}
var centerPoint : CGPoint{
get {
myRect?.topLeft = self.topLeft
myRect?.topRight = self.topRight
myRect?.bottomLeft = self.bottomLeft
myRect?.bottomRight = self.bottomRight
let superCenter: CGPoint = CGPoint(x: (myRect?.bounds().midX)!, y: (myRect?.bounds().midY)!)
return superCenter
}
}
convenience init(rectObj rectangleFeature: CIRectangleFeature) {
self.init()
myRect = rectangleFeature
topLeft = rectangleFeature.topLeft
topRight = rectangleFeature.topRight
bottomLeft = rectangleFeature.bottomLeft
bottomRight = rectangleFeature.bottomRight
}
override init() {
self.topLeft = CGPoint.zero
self.topRight = CGPoint.zero
self.bottomLeft = CGPoint.zero
self.bottomRight = CGPoint.zero
self.viewWidth = CGSize.zero
self.imageWidth = CGSize.zero
super.init()
}
public func rotate90Degree() -> Void {
let centerPoint = self.centerPoint
// /rotate cos(90)=0, sin(90)=1
topLeft = CGPoint(x: centerPoint.x + (topLeft.y - centerPoint.y), y: centerPoint.y + (topLeft.x - centerPoint.x))
topRight = CGPoint(x: centerPoint.x + (topRight.y - centerPoint.y), y: centerPoint.y + (topRight.x - centerPoint.x))
bottomLeft = CGPoint(x: centerPoint.x + (bottomLeft.y - centerPoint.y), y: centerPoint.y + (bottomLeft.x - centerPoint.x))
bottomRight = CGPoint(x: centerPoint.x + (bottomRight.y - centerPoint.y), y: centerPoint.y + (bottomRight.x - centerPoint.x))
print(self.centerPoint)
}
public func scaleRectWithCoeficient(ƒ_x: CGFloat, ƒ_y: CGFloat) -> Void {
topLeft = topLeft.scalePointByCeficient(ƒ_x: ƒ_x, ƒ_y: ƒ_y, viewWidth: self.viewWidth, imageWidth: self.imageWidth)
topRight = topRight.scalePointByCeficient(ƒ_x: ƒ_x, ƒ_y: ƒ_y, viewWidth: self.viewWidth, imageWidth: self.imageWidth)
bottomLeft = bottomLeft.scalePointByCeficient(ƒ_x: ƒ_x, ƒ_y: ƒ_y, viewWidth: self.viewWidth, imageWidth: self.imageWidth)
bottomRight = bottomRight.scalePointByCeficient(ƒ_x: ƒ_x, ƒ_y: ƒ_y, viewWidth: self.viewWidth, imageWidth: self.imageWidth)
}
public func correctOriginPoints() -> Void {
let deltaCenter = self.centerPoint.reversePointCoordinates().substractPointCoordinates(sub: self.centerPoint)
let TL = topLeft
let TR = topRight
let BL = bottomLeft
let BR = bottomRight
topLeft = BL.sumPointCoordinates(add: deltaCenter)
topRight = TL.sumPointCoordinates(add: deltaCenter)
bottomLeft = BR.sumPointCoordinates(add: deltaCenter)
bottomRight = TR.sumPointCoordinates(add: deltaCenter)
print(self.centerPoint)
}}
Its calling is like
ObyRectangleFeature *scaledRect = [[ObyRectangleFeature alloc] initWithRectObj:(id)rect_rect];
float f_x = _sourceImageView.frame.size.width / _sourceImageView.image.size.width;
float f_y = _sourceImageView.frame.size.height / _sourceImageView.image.size.height;
[scaledRect setViewWidth:_sourceImageView.bounds.size];
[scaledRect setImageWidth:_sourceImageView.image.size];
[scaledRect scaleRectWithCoeficientWithƒ_x:f_y ƒ_y:f_x];
[scaledRect rotate90Degree];
[scaledRect correctOriginPoints];
Basically it checks scale factor and convert points to UIImageView
coordinates and then considering Landscape factor it rotates it by 90 degree or more according to requirement. But the result i get is a bit problemetic
As you can see the rect that is made is displaced below the card. Any Ideas on how to solve this problem?