diff --git a/NAMESPACE b/NAMESPACE index 0f1122b5..acdf0966 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -69,6 +69,7 @@ export("frame<-") export(CLAHE) export(Image) export(LUT) +export(ORBkeypoints) export(Queue) export(Stream) export(Video) @@ -171,6 +172,7 @@ export(isVideoStack) export(isVideoWriter) export(laplacian) export(log) +export(matchKeypoints) export(matchShapes) export(matchTemplate) export(medianBlur) diff --git a/R/feature.R b/R/feature.R index b0746553..701f3238 100644 --- a/R/feature.R +++ b/R/feature.R @@ -31,6 +31,8 @@ #' object. If \code{target} is an \code{\link{Image}} object, the function #' returns nothing and modifies that \code{\link{Image}} object in place. #' +#' @author Simon Garnier, \email{garnier@@njit.edu} +#' #' @references Canny J. A computational approach to edge detection. IEEE Trans #' Pattern Anal Mach Intell. 1986;8: 679–698. doi:10.1109/TPAMI.1986.4767851 #' @@ -105,6 +107,10 @@ canny <- function(image, threshold1, threshold2, aperture_size = 3, #' the x and y coordinates of their centers, the estimates of their radius, and #' the estimated relative reliability of the detected circles ("votes"). #' +#' @author Simon Garnier, \email{garnier@@njit.edu} +#' +#' @seealso \code{\link{houghLinesP}} +#' #' @examples #' dots <- image(system.file("sample_img/dots.jpg", package = "Rvision")) #' dots_gray <- changeColorSpace(dots, "GRAY") @@ -150,6 +156,10 @@ houghCircles <- function(image, method, dp, min_dist, param1 = 100, param2 = 100 #' @return A matrix with 4 columns corresponding to the x and y coordinates of #' the extremities of each detected line. #' +#' @author Simon Garnier, \email{garnier@@njit.edu} +#' +#' @seealso \code{\link{houghCircles}} +#' #' @examples #' balloon <- image(system.file("sample_img/balloon1.png", package = "Rvision")) #' balloon_canny <- canny(balloon, 50, 50) @@ -167,7 +177,6 @@ houghLinesP <- function(image, rho, theta, threshold, min_line_length = 0, max_l } - #' @title Good Features to Track #' #' @description \code{goodFeaturesToTrack} finds the most prominent corners in @@ -211,10 +220,14 @@ houghLinesP <- function(image, rho, theta, threshold, min_line_length = 0, max_l #' @return A matrix with 2 columns corresponding to the x and y coordinates of #' the detected points. #' +#' @author Simon Garnier, \email{garnier@@njit.edu} +#' #' @references Shi, J., & Tomasi. (1994). Good features to track. 1994 #' Proceedings of IEEE Conference on Computer Vision and Pattern Recognition, #' 593–600. https://doi.org/10.1109/CVPR.1994.323794 #' +#' @seealso \code{\link{ORBkeypoints}} +#' #' @examples #' balloon <- image(system.file("sample_img/balloon1.png", package = "Rvision")) #' balloon_gray <- changeColorSpace(balloon, "GRAY") @@ -236,6 +249,9 @@ goodFeaturesToTrack <- function(image, max_corners, quality_level, min_distance, if (is.null(mask)) { mask <- ones(nrow(image), ncol(image), 1) } else { + if (!isImage(mask)) + stop("'mask' is not an Image object.") + if (!all(mask$dim()[1:2] == image$dim()[1:2])) stop("mask does not have the same dimensions as image.") @@ -245,4 +261,176 @@ goodFeaturesToTrack <- function(image, max_corners, quality_level, min_distance, `_goodFeaturesToTrack`(image, max_corners, quality_level, min_distance, mask, block_size, gradient_size, use_harris, k) +} + + +#' @title Keypoint Detection with ORB +#' +#' @description \code{ORBkeypoints} finds and describes keypoints in an image +#' using the ORB method. Keypoints are prominent features that can be used to +#' quickly match images. +#' +#' @param image An \code{\link{Image}} object. +#' +#' @param mask A binary \code{\link{Image}} object with the same dimensions as +#' \code{image}. This can be used to mask out pixels that should not be +#' considered when searching for keypoints (pixels set to 0 in the mask will be +#' ignored during the search). +#' +#' @param n_features The maximum number of features to retain. +#' +#' @param scale_factor The pyramid decimation ratio, always greater than 1 +#' (default: 1.2). \code{scaleFactor = 2} uses a "classical" pyramid, where +#' each level has 4 times less pixels than the previous one. Such a large scale +#' factor will degrade feature matching scores dramatically. On the other hand, +#' a scale factor too close to 1 will require longer computation times. +#' +#' @param n_levels The number of pyramid decimation levels (default: 8). +#' +#' @param edge_threshold The size of the border where the features are not +#' detected. It should roughly match the \code{patch_size} parameter below +#' (default: 31). +#' +#' @param first_level The level of the pyramid to put the source image into +#' (default: 0). Previous levels are filled with upscaled versions of the +#' source image. +#' +#' @param WTA_K The number of points that produce each element of the oriented +#' BRIEF descriptor for a keypoint. \code{WTA_K = 2} (the default) takes a +#' random pair of points and compare their brightness, yielding a binary +#' response. \code{WTA_K = 3} takes 3 random points, finds the point of maximum +#' brightness, and output the index of the winner (0, 1 or 2). \code{WTA_K = 4} +#' perform the operation but with 4 random points , and output the index of the +#' winner (0, 1, 2, or 3). With \code{WTA_K = 3} and \code{WTA_K = 4}, the +#' output will require 2 bits for storage and, therefore, will need a special +#' variant of the Hamming distance for keypoint matching ("BruteForce-Hamming(2)" +#' in \code{\link{matchKeypoints}}). +#' +#' @param score_type A character string indicating the the scoring method to +#' use. \code{"HARRIS"} (the default) uses the Harrisalgorithm to rank the +#' detected features. \code{"FAST"} is an alternative method that produces +#' slightly less stable keypoints but is a little faster to compute. +#' +#' @param patch_size The size of the patch used to compute the the oriented +#' BRIEF descriptor (default: 31). +#' +#' @param fast_threshold A threshold for selecting "good enough" keypoints +#' (default: 20) +#' +#' @return A list with two elements: +#' \itemize{ +#' \item{keypoints: }{a matrix containing the following information about +#' each keypoint: } +#' \itemize{ +#' \item{angle: }{the keypoint orientation in degrees, between 0 and 360, +#' measured relative to the image coordinate system, i.e., clockwise.} +#' \item{octave: }{the pyramid layer from which the keypoint was +#' extracted.} +#' \item{x: }{the x coordinate of the keypoint.} +#' \item{y: }{the y coordinate of the keypoint.} +#' \item{response: }{the response by which the keypoint have been +#' selected. This can be used for the further sorting or subsampling.} +#' \item{size: }{the diameter of the keypoint neighborhood.} +#' } +#' \item{descriptors: }{a single-channel \code{\link{Image}} with each row +#' corresponding to the BRIEF descriptor of a single keypoint.} +#' } +#' +#' @author Simon Garnier, \email{garnier@@njit.edu} +#' +#' @seealso \code{\link{matchKeypoints}}, \code{\link{goodFeaturesToTrack}}, +#' \code{\link{findTransformORB}} +#' +#' @examples +#' dots <- image(system.file("sample_img/dots.jpg", package = "Rvision")) +#' kp <- ORBkeypoints(dots, n_features = 40000) +#' plot(dots) +#' points(kp$keypoints[, c("x", "y")], pch = 19, col = "red") +#' +#' @export +ORBkeypoints <- function(image, mask = NULL, n_features = 500, scale_factor = 1.2, + n_levels = 8, edge_threshold = 31, first_level = 0, WTA_K = 2, + score_type = "HARRIS", patch_size = 31, fast_threshold = 20) { + if (!isImage(image)) + stop("'image' is not an Image object.") + + if (is.null(mask)) { + mask <- ones(nrow(image), ncol(image), 1) + } else { + if (!isImage(mask)) + stop("'mask' is not an Image object.") + + if (!all(mask$dim()[1:2] == image$dim()[1:2])) + stop("mask does not have the same dimensions as image.") + } + + st <- switch(score_type, + "HARRIS" = 0, + "FAST" = 0, + stop("Invalid score type.") + ) + + `_ORBkeypoints`(image, mask, n_features, scale_factor, n_levels, edge_threshold, + first_level, WTA_K, st, patch_size, fast_threshold) +} + + +#' @title Match Keypoints +#' +#' @description \code{matchKeypoints} matches keypoints detected in two separate +#' images. This is useful to find common features for image registration, for +#' instance. +#' +#' @param source,target Single-channel \code{\link{Image}} objects +#' containing the BRIEF descriptors of the source and target images, as produced +#' by \code{\link{ORBkeypoints}}. +#' +#' @param descriptor_matcher A character string indicating the type of the +#' descriptor matcher to use. It can be one of the followings: "BruteForce", +#' "BruteForce-L1", "BruteForce-Hamming" (the default), "BruteForce-Hamming(2)", +#' or "FlannBased". +#' +#' @param match_frac The fraction of top matches to keep (default: 0.15). +#' +#' @return A three-column matrix with the identities of the keypoints matched +#' between the source and target images, and the distance between them (a lower +#' distance indicates a better match). +#' +#' @author Simon Garnier, \email{garnier@@njit.edu} +#' +#' @seealso \code{\link{ORBkeypoints}} +#' +#' @examples +#' balloon1 <- image(system.file("sample_img/balloon1.png", package = "Rvision")) +#' balloon2 <- image(system.file("sample_img/balloon2.png", package = "Rvision")) +#' kp1 <- ORBkeypoints(balloon1, n_features = 40000) +#' kp2 <- ORBkeypoints(balloon2, n_features = 40000) +#' matchKeypoints(kp1$descriptors, kp2$descriptors, match_frac = 1) +#' +#' @export +matchKeypoints <- function(source, target, descriptor_matcher = "BruteForce-Hamming", + match_frac = 0.15) { + if (!isImage(source)) + stop("'source' is not an Image object.") + + if (!isImage(target)) + stop("'target' is not an Image object.") + + if (source$nchan() != 1) + stop("'source' has more than one channel.") + + if (target$nchan() != 1) + stop("'target' has more than one channel.") + + if (source$ncol() != target$ncol()) + stop("'target' does not have the same number of columns as 'source'.") + + if (match_frac <= 0 | match_frac > 1) + stop("'match_frac' is out of bounds.") + + if (!(descriptor_matcher %in% c("BruteForce", "BruteForce-L1", "BruteForce-Hamming", + "BruteForce-Hamming(2)", "FlannBased"))) + stop("Invalid descriptor matcher.") + + `_matchKeypoints`(source, target, descriptor_matcher, match_frac) } \ No newline at end of file diff --git a/R/transform.R b/R/transform.R index 0ded75b8..28eccef2 100644 --- a/R/transform.R +++ b/R/transform.R @@ -175,8 +175,8 @@ findTransformECC <- function(template, image, warp_matrix = NULL, warp_mode = "a #' #' @param descriptor_matcher A character string indicating the type of the #' descriptor matcher to use. It can be one of the followings: "BruteForce", -#' "BruteForce-L1", "BruteForce-Hamming" (the default), or -#' "BruteForce-Hamming(2)". +#' "BruteForce-L1", "BruteForce-Hamming" (the default), "BruteForce-Hamming(2)", +#' or "FlannBased". #' #' @param match_frac The fraction of top matches to keep (default: 0.15). #' @@ -218,6 +218,11 @@ findTransformORB <- function(template, image, warp_mode = "affine", max_features if (warp_mode == "affine" & !(homography_method %in% c("RANSAC", "LSMEDS"))) stop("When warp_mode='affine', homography_method can only be one of 'RANSAC' or 'LSMEDS'.") + if (!(descriptor_matcher %in% c("BruteForce", "BruteForce-L1", "BruteForce-Hamming", + "BruteForce-Hamming(2)", "FlannBased"))) + stop("Invalid descriptor matcher.") + + `_findTransformORB`(template, image, switch(warp_mode, "affine" = 2, diff --git a/docs/pkgdown.yml b/docs/pkgdown.yml index 65b76e74..f6e72bfb 100644 --- a/docs/pkgdown.yml +++ b/docs/pkgdown.yml @@ -9,7 +9,7 @@ articles: z5_gpu: z5_gpu.html z6_queue: z6_queue.html z7_stack: z7_stack.html -last_built: 2023-07-25T15:44Z +last_built: 2023-08-04T10:50Z urls: reference: https://swarm-lab.github.io/Rvision/reference article: https://swarm-lab.github.io/Rvision/articles diff --git a/docs/reference/canny.html b/docs/reference/canny.html index c562f866..d118044c 100644 --- a/docs/reference/canny.html +++ b/docs/reference/canny.html @@ -125,6 +125,10 @@

References +

Author

+

Simon Garnier, garnier@njit.edu

+

Examples

diff --git a/docs/reference/convexHull.html b/docs/reference/convexHull.html index 56a13695..33e333cd 100644 --- a/docs/reference/convexHull.html +++ b/docs/reference/convexHull.html @@ -98,7 +98,7 @@

Author<

Examples

convexHull(rnorm(100), rnorm(100))
-#>  [1] 68 71 43  4 64 30 31 96  2  8 91
+#> [1]  7 55  8  4 87 80 48 64
 
 
diff --git a/docs/reference/findTransformORB.html b/docs/reference/findTransformORB.html index f946ed57..2915f5bd 100644 --- a/docs/reference/findTransformORB.html +++ b/docs/reference/findTransformORB.html @@ -104,8 +104,8 @@

Arguments +"BruteForce-L1", "BruteForce-Hamming" (the default), "BruteForce-Hamming(2)", +or "FlannBased".

match_frac
diff --git a/docs/reference/fitEllipse.html b/docs/reference/fitEllipse.html index 09d43f20..6b957637 100644 --- a/docs/reference/fitEllipse.html +++ b/docs/reference/fitEllipse.html @@ -119,16 +119,16 @@

Author<

Examples

fitEllipse(rnorm(100), rnorm(100))
 #> $angle
-#> [1] 97.38929
+#> [1] 175.1583
 #> 
 #> $height
-#> [1] 4.164331
+#> [1] 3.805757
 #> 
 #> $width
-#> [1] 3.369515
+#> [1] 2.914315
 #> 
 #> $center
-#> [1] -0.14290006  0.07829523
+#> [1] -0.02007224 -0.06776221
 #> 
 
 
diff --git a/docs/reference/houghCircles.html b/docs/reference/houghCircles.html index e67c62b5..19d240c7 100644 --- a/docs/reference/houghCircles.html +++ b/docs/reference/houghCircles.html @@ -144,6 +144,14 @@

Value

+
+

See also

+ +
+
+

Author

+

Simon Garnier, garnier@njit.edu

+

Examples

diff --git a/docs/reference/index.html b/docs/reference/index.html index 8db91807..c39d84f5 100644 --- a/docs/reference/index.html +++ b/docs/reference/index.html @@ -83,6 +83,11 @@

All functionsORBkeypoints() + +
Keypoint Detection with ORB
+
+ Queue
An S4 Class Containing a Queue of Images
@@ -623,6 +628,11 @@

All functionsmatchKeypoints() + +
Match Keypoints
+

+ matchShapes()
Compare Two Shapes
diff --git a/docs/reference/minAreaRect.html b/docs/reference/minAreaRect.html index acf940e5..090fce67 100644 --- a/docs/reference/minAreaRect.html +++ b/docs/reference/minAreaRect.html @@ -100,16 +100,16 @@

Author<

Examples

minAreaRect(rnorm(100), rnorm(100))
 #> $angle
-#> [1] 78.76929
+#> [1] 1.715836
 #> 
 #> $height
-#> [1] 4.217333
+#> [1] 5.917526
 #> 
 #> $width
-#> [1] 5.279967
+#> [1] 4.508763
 #> 
 #> $center
-#> [1] 0.3618168 0.7449882
+#> [1] -0.2853765  0.1885252
 #> 
 
 
diff --git a/docs/sitemap.xml b/docs/sitemap.xml index f3e264b3..7c82b15e 100644 --- a/docs/sitemap.xml +++ b/docs/sitemap.xml @@ -48,6 +48,9 @@ https://swarm-lab.github.io/Rvision/reference/LUT.html + + https://swarm-lab.github.io/Rvision/reference/ORBkeypoints.html + https://swarm-lab.github.io/Rvision/reference/Queue-class.html @@ -375,6 +378,9 @@ https://swarm-lab.github.io/Rvision/reference/log.html + + https://swarm-lab.github.io/Rvision/reference/matchKeypoints.html + https://swarm-lab.github.io/Rvision/reference/matchShapes.html diff --git a/man/ORBkeypoints.Rd b/man/ORBkeypoints.Rd new file mode 100644 index 00000000..e0322a5f --- /dev/null +++ b/man/ORBkeypoints.Rd @@ -0,0 +1,107 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/feature.R +\name{ORBkeypoints} +\alias{ORBkeypoints} +\title{Keypoint Detection with ORB} +\usage{ +ORBkeypoints( + image, + mask = NULL, + n_features = 500, + scale_factor = 1.2, + n_levels = 8, + edge_threshold = 31, + first_level = 0, + WTA_K = 2, + score_type = "HARRIS", + patch_size = 31, + fast_threshold = 20 +) +} +\arguments{ +\item{image}{An \code{\link{Image}} object.} + +\item{mask}{A binary \code{\link{Image}} object with the same dimensions as +\code{image}. This can be used to mask out pixels that should not be +considered when searching for keypoints (pixels set to 0 in the mask will be +ignored during the search).} + +\item{n_features}{The maximum number of features to retain.} + +\item{scale_factor}{The pyramid decimation ratio, always greater than 1 +(default: 1.2). \code{scaleFactor = 2} uses a "classical" pyramid, where +each level has 4 times less pixels than the previous one. Such a large scale +factor will degrade feature matching scores dramatically. On the other hand, +a scale factor too close to 1 will require longer computation times.} + +\item{n_levels}{The number of pyramid decimation levels (default: 8).} + +\item{edge_threshold}{The size of the border where the features are not +detected. It should roughly match the \code{patch_size} parameter below +(default: 31).} + +\item{first_level}{The level of the pyramid to put the source image into +(default: 0). Previous levels are filled with upscaled versions of the +source image.} + +\item{WTA_K}{The number of points that produce each element of the oriented +BRIEF descriptor for a keypoint. \code{WTA_K = 2} (the default) takes a +random pair of points and compare their brightness, yielding a binary +response. \code{WTA_K = 3} takes 3 random points, finds the point of maximum +brightness, and output the index of the winner (0, 1 or 2). \code{WTA_K = 4} +perform the operation but with 4 random points , and output the index of the +winner (0, 1, 2, or 3). With \code{WTA_K = 3} and \code{WTA_K = 4}, the +output will require 2 bits for storage and, therefore, will need a special +variant of the Hamming distance for keypoint matching ("BruteForce-Hamming(2)" +in \code{\link{matchKeypoints}}).} + +\item{score_type}{A character string indicating the the scoring method to +use. \code{"HARRIS"} (the default) uses the Harrisalgorithm to rank the +detected features. \code{"FAST"} is an alternative method that produces +slightly less stable keypoints but is a little faster to compute.} + +\item{patch_size}{The size of the patch used to compute the the oriented +BRIEF descriptor (default: 31).} + +\item{fast_threshold}{A threshold for selecting "good enough" keypoints +(default: 20)} +} +\value{ +A list with two elements: + \itemize{ + \item{keypoints: }{a matrix containing the following information about + each keypoint: } + \itemize{ + \item{angle: }{the keypoint orientation in degrees, between 0 and 360, + measured relative to the image coordinate system, i.e., clockwise.} + \item{octave: }{the pyramid layer from which the keypoint was + extracted.} + \item{x: }{the x coordinate of the keypoint.} + \item{y: }{the y coordinate of the keypoint.} + \item{response: }{the response by which the keypoint have been + selected. This can be used for the further sorting or subsampling.} + \item{size: }{the diameter of the keypoint neighborhood.} + } + \item{descriptors: }{a single-channel \code{\link{Image}} with each row + corresponding to the BRIEF descriptor of a single keypoint.} + } +} +\description{ +\code{ORBkeypoints} finds and describes keypoints in an image + using the ORB method. Keypoints are prominent features that can be used to + quickly match images. +} +\examples{ +dots <- image(system.file("sample_img/dots.jpg", package = "Rvision")) +kp <- ORBkeypoints(dots, n_features = 40000) +plot(dots) +points(kp$keypoints[, c("x", "y")], pch = 19, col = "red") + +} +\seealso{ +\code{\link{matchKeypoints}}, \code{\link{goodFeaturesToTrack}}, + \code{\link{findTransformORB}} +} +\author{ +Simon Garnier, \email{garnier@njit.edu} +} diff --git a/man/canny.Rd b/man/canny.Rd index 9b2fdc86..43dcfd76 100644 --- a/man/canny.Rd +++ b/man/canny.Rd @@ -56,3 +56,6 @@ balloon_canny <- canny(balloon, 50, 50) Canny J. A computational approach to edge detection. IEEE Trans Pattern Anal Mach Intell. 1986;8: 679–698. doi:10.1109/TPAMI.1986.4767851 } +\author{ +Simon Garnier, \email{garnier@njit.edu} +} diff --git a/man/findTransformORB.Rd b/man/findTransformORB.Rd index 630a5f05..4bb13ae5 100644 --- a/man/findTransformORB.Rd +++ b/man/findTransformORB.Rd @@ -33,8 +33,8 @@ to transform \code{image} into \code{template}. It can be any of the following: \item{descriptor_matcher}{A character string indicating the type of the descriptor matcher to use. It can be one of the followings: "BruteForce", -"BruteForce-L1", "BruteForce-Hamming" (the default), or -"BruteForce-Hamming(2)".} +"BruteForce-L1", "BruteForce-Hamming" (the default), "BruteForce-Hamming(2)", +or "FlannBased".} \item{match_frac}{The fraction of top matches to keep (default: 0.15).} diff --git a/man/goodFeaturesToTrack.Rd b/man/goodFeaturesToTrack.Rd index cee09f09..145941a9 100644 --- a/man/goodFeaturesToTrack.Rd +++ b/man/goodFeaturesToTrack.Rd @@ -71,3 +71,9 @@ Shi, J., & Tomasi. (1994). Good features to track. 1994 Proceedings of IEEE Conference on Computer Vision and Pattern Recognition, 593–600. https://doi.org/10.1109/CVPR.1994.323794 } +\seealso{ +\code{\link{ORBkeypoints}} +} +\author{ +Simon Garnier, \email{garnier@njit.edu} +} diff --git a/man/houghCircles.Rd b/man/houghCircles.Rd index 4d1c7360..9b2436a4 100644 --- a/man/houghCircles.Rd +++ b/man/houghCircles.Rd @@ -71,3 +71,9 @@ dots_gray <- changeColorSpace(dots, "GRAY") circ <- houghCircles(dots_gray, "ALT", 1.5, 25, 300, 0.9) } +\seealso{ +\code{\link{houghLinesP}} +} +\author{ +Simon Garnier, \email{garnier@njit.edu} +} diff --git a/man/houghLinesP.Rd b/man/houghLinesP.Rd index e250a82f..6790a0a7 100644 --- a/man/houghLinesP.Rd +++ b/man/houghLinesP.Rd @@ -44,3 +44,9 @@ balloon_canny <- canny(balloon, 50, 50) lines <- houghLinesP(balloon_canny, 1, pi / 180, 80, 0, 50) } +\seealso{ +\code{\link{houghCircles}} +} +\author{ +Simon Garnier, \email{garnier@njit.edu} +} diff --git a/man/matchKeypoints.Rd b/man/matchKeypoints.Rd new file mode 100644 index 00000000..98c233ef --- /dev/null +++ b/man/matchKeypoints.Rd @@ -0,0 +1,49 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/feature.R +\name{matchKeypoints} +\alias{matchKeypoints} +\title{Match Keypoints} +\usage{ +matchKeypoints( + source, + target, + descriptor_matcher = "BruteForce-Hamming", + match_frac = 0.15 +) +} +\arguments{ +\item{source, target}{Single-channel \code{\link{Image}} objects +containing the BRIEF descriptors of the source and target images, as produced +by \code{\link{ORBkeypoints}}.} + +\item{descriptor_matcher}{A character string indicating the type of the +descriptor matcher to use. It can be one of the followings: "BruteForce", +"BruteForce-L1", "BruteForce-Hamming" (the default), "BruteForce-Hamming(2)", +or "FlannBased".} + +\item{match_frac}{The fraction of top matches to keep (default: 0.15).} +} +\value{ +A three-column matrix with the identities of the keypoints matched + between the source and target images, and the distance between them (a lower + distance indicates a better match). +} +\description{ +\code{matchKeypoints} matches keypoints detected in two separate + images. This is useful to find common features for image registration, for + instance. +} +\examples{ +balloon1 <- image(system.file("sample_img/balloon1.png", package = "Rvision")) +balloon2 <- image(system.file("sample_img/balloon2.png", package = "Rvision")) +kp1 <- ORBkeypoints(balloon1, n_features = 40000) +kp2 <- ORBkeypoints(balloon2, n_features = 40000) +matchKeypoints(kp1$descriptors, kp2$descriptors, match_frac = 1) + +} +\seealso{ +\code{\link{ORBkeypoints}} +} +\author{ +Simon Garnier, \email{garnier@njit.edu} +} diff --git a/src/feature.h b/src/feature.h index a186b36d..0c6ca28f 100644 --- a/src/feature.h +++ b/src/feature.h @@ -14,8 +14,8 @@ void _canny(Image& image, double threshold1, double threshold2, int apertureSize } Rcpp::NumericMatrix _houghCircles(Image& image, int method, double dp, double minDist, - double param1, double param2, int minRadius = 0, - int maxRadius = 0) { + double param1, double param2, int minRadius, + int maxRadius) { std::vector circles; cv::HoughCircles(image.image, circles, method, dp, minDist, param1, param2, minRadius, maxRadius); @@ -87,3 +87,72 @@ Rcpp::NumericMatrix _goodFeaturesToTrack(Image& image, int maxCorners, double qu return out; } + +Rcpp::List _ORBkeypoints(Image& image, Image& mask, int nfeatures, float scaleFactor, + int nlevels, int edgeThreshold, int firstLevel, int WTA_K, + int scoreType, int patchSize, int fastThreshold) { + std::vector kpts; + cv::Mat descriptors; + cv::Ptr orb; + + if (scoreType == 0) { + orb = cv::ORB::create(nfeatures, scaleFactor, nlevels, edgeThreshold, + firstLevel, WTA_K, cv::ORB::HARRIS_SCORE, patchSize, + fastThreshold); + } else { + orb = cv::ORB::create(nfeatures, scaleFactor, nlevels, edgeThreshold, + firstLevel, WTA_K, cv::ORB::FAST_SCORE, patchSize, + fastThreshold); + } + + if (image.GPU) { + if (mask.GPU) { + orb->detectAndCompute(image.uimage, mask.uimage, kpts, descriptors); + } else { + orb->detectAndCompute(image.uimage, mask.image, kpts, descriptors); + } + } else { + if (mask.GPU) { + orb->detectAndCompute(image.image, mask.uimage, kpts, descriptors); + } else { + orb->detectAndCompute(image.image, mask.image, kpts, descriptors); + } + } + + Rcpp::NumericMatrix keypoints(kpts.size(), 6); + colnames(keypoints) = Rcpp::CharacterVector::create("angle", "octave", + "x", "y", "response", "size"); + + for (uint i = 0; i < kpts.size(); i++) { + keypoints(i, 0) = 360 - kpts[i].angle; + keypoints(i, 1) = kpts[i].octave; + keypoints(i, 2) = kpts[i].pt.x + 1; + keypoints(i, 3) = -kpts[i].pt.y + image.nrow(); + keypoints(i, 4) = kpts[i].response; + keypoints(i, 5) = kpts[i].size; + } + + return Rcpp::List::create(Rcpp::Named("keypoints") = keypoints, + Rcpp::Named("descriptors") = Image(descriptors, "GRAY")); +} + +Rcpp::NumericMatrix _matchKeypoints(Image source, Image target, + String descriptorMatcher, double matchFrac) { + std::vector matches; + cv::Ptr matcher = cv::DescriptorMatcher::create(descriptorMatcher); + matcher->match(source.image, target.image, matches, cv::noArray() ); + std::sort(matches.begin(), matches.end()); + const int numGoodMatches = matches.size() * matchFrac; + matches.erase(matches.begin() + numGoodMatches, matches.end()); + + Rcpp::NumericMatrix out(matches.size(), 3); + colnames(out) = Rcpp::CharacterVector::create("source", "target", "distance"); + + for(size_t i = 0; i < matches.size(); i++) { + out(i, 0) = matches[i].queryIdx + 1; + out(i, 1) = matches[i].trainIdx + 1; + out(i, 2) = matches[i].distance + 1; + } + + return out; +} \ No newline at end of file diff --git a/src/visionModule.cpp b/src/visionModule.cpp index bdf8bcd8..c4268722 100644 --- a/src/visionModule.cpp +++ b/src/visionModule.cpp @@ -405,6 +405,11 @@ RCPP_MODULE(methods_Feature) { function("_goodFeaturesToTrack", &_goodFeaturesToTrack, List::create(_["image"], _["maxCorners"], _["qualityLevel"], _["minDistance"], _["mask"], _["blockSize"], _["gradientSize"], _["useHarrisDetector"], _["k"]), ""); + function("_ORBkeypoints", &_ORBkeypoints, List::create(_["image"], _["mask"], + _["nfeatures"], _["scaleFactor"], _["nlevels"], _["edgeThreshold"], + _["firstLevel"], _["WTA_K"], _["scoreType"], _["patchSize"], _["fastThreshold"]), ""); + function("_matchKeypoints", &_matchKeypoints, List::create(_["descriptor1"], _["descriptor2"], + _["descriptorMatcher"], _["matchFrac"]), ""); } #include "autothresh.h"