@@ -120,6 +120,66 @@ extension SwiftAutoGUI {
120120 return CGPoint ( x: rect. midX, y: rect. midY)
121121 }
122122
123+ /// Locate all instances of an image on the screen and return their positions
124+ ///
125+ /// - Parameters:
126+ /// - imagePath: Path to the image file to search for
127+ /// - grayscale: Convert to grayscale for faster matching (currently ignored, for future implementation)
128+ /// - confidence: Matching confidence threshold (0.0-1.0). If nil, uses exact matching (0.95 by default)
129+ /// - region: Limit search to specific screen region. If nil, searches entire screen
130+ /// - Returns: Array of CGRect with locations (x, y, width, height) of all found instances, empty array if none found
131+ ///
132+ /// This method uses OpenCV's template matching algorithm to find all instances of an image on the screen.
133+ /// It applies non-maximum suppression to avoid duplicate detections of the same object.
134+ ///
135+ /// Example:
136+ /// ```swift
137+ /// // Find all buttons on screen
138+ /// let buttons = SwiftAutoGUI.locateAllOnScreen("button.png")
139+ /// for (index, button) in buttons.enumerated() {
140+ /// print("Button \(index) found at: \(button)")
141+ /// SwiftAutoGUI.move(to: CGPoint(x: button.midX, y: button.midY))
142+ /// SwiftAutoGUI.leftClick()
143+ /// Thread.sleep(forTimeInterval: 0.5)
144+ /// }
145+ ///
146+ /// // Find all matches with confidence threshold
147+ /// let closeButtons = SwiftAutoGUI.locateAllOnScreen("close_button.png", confidence: 0.85)
148+ /// print("Found \(closeButtons.count) close buttons")
149+ ///
150+ /// // Search in specific region for better performance
151+ /// let searchRegion = CGRect(x: 0, y: 0, width: 800, height: 600)
152+ /// let icons = SwiftAutoGUI.locateAllOnScreen("icon.png", region: searchRegion)
153+ /// ```
154+ public static func locateAllOnScreen(
155+ _ imagePath: String ,
156+ grayscale: Bool = false ,
157+ confidence: Double ? = nil ,
158+ region: CGRect ? = nil
159+ ) -> [ CGRect ] {
160+ // Load the needle image
161+ guard let needleImage = NSImage ( contentsOfFile: imagePath) else {
162+ print ( " SwiftAutoGUI: Could not load image from path: \( imagePath) " )
163+ return [ ]
164+ }
165+
166+ // Take screenshot of the region or entire screen
167+ let screenshot : NSImage ?
168+ if let region = region {
169+ screenshot = self . screenshot ( region: region)
170+ } else {
171+ screenshot = self . screenshot ( )
172+ }
173+
174+ guard let haystackImage = screenshot else {
175+ print ( " SwiftAutoGUI: Could not capture screenshot " )
176+ return [ ]
177+ }
178+
179+ // Perform image matching to find all instances
180+ return findAllImagesInImage ( needle: needleImage, haystack: haystackImage, confidence: confidence, searchRegion: region)
181+ }
182+
123183 // MARK: Private Helper Methods
124184
125185 /// Find needle image within haystack image using OpenCV template matching
@@ -204,6 +264,124 @@ extension SwiftAutoGUI {
204264
205265 return nil
206266 }
267+
268+ /// Find all instances of needle image within haystack image using OpenCV template matching
269+ private static func findAllImagesInImage(
270+ needle: NSImage ,
271+ haystack: NSImage ,
272+ confidence: Double ? ,
273+ searchRegion: CGRect ?
274+ ) -> [ CGRect ] {
275+ // Convert NSImages to OpenCV Mat format
276+ guard let needleMat = needle. toMat ( ) ,
277+ let haystackMat = haystack. toMat ( ) else {
278+ print ( " SwiftAutoGUI: Could not convert images to OpenCV Mat " )
279+ return [ ]
280+ }
281+
282+ // Apply search region if specified
283+ let searchMat : Mat
284+ let regionOffset : CGPoint
285+
286+ if let region = searchRegion {
287+ let rect = Rect2i (
288+ x: Int32 ( region. origin. x) ,
289+ y: Int32 ( region. origin. y) ,
290+ width: Int32 ( region. width) ,
291+ height: Int32 ( region. height)
292+ )
293+ searchMat = Mat ( mat: haystackMat, rect: rect)
294+ regionOffset = region. origin
295+ } else {
296+ searchMat = haystackMat
297+ regionOffset = . zero
298+ }
299+
300+ // Perform template matching using OpenCV
301+ let result = Mat ( )
302+ Imgproc . matchTemplate (
303+ image: searchMat,
304+ templ: needleMat,
305+ result: result,
306+ method: TemplateMatchModes . TM_CCOEFF_NORMED // Normalized correlation coefficient
307+ )
308+
309+ let threshold = confidence ?? 0.95
310+ var matches : [ CGRect ] = [ ]
311+
312+ // Get screen scale factor
313+ let screen = NSScreen . main ?? NSScreen . screens [ 0 ]
314+ let scaleFactor = screen. backingScaleFactor
315+
316+ // Find all matches above threshold
317+ var resultData = [ Float] ( repeating: 0 , count: Int ( result. rows ( ) * result. cols ( ) ) )
318+ _ = try ? result. get ( row: 0 , col: 0 , data: & resultData)
319+
320+ let templateWidth = needleMat. cols ( )
321+ let templateHeight = needleMat. rows ( )
322+
323+ // Create a copy of result to track which areas we've already processed
324+ var processedMask = Array ( repeating: false , count: resultData. count)
325+
326+ while true {
327+ // Find the maximum value and its location
328+ var maxVal : Float = - 1
329+ var maxIdx = - 1
330+
331+ for i in 0 ..< resultData. count {
332+ if !processedMask[ i] && resultData [ i] > maxVal {
333+ maxVal = resultData [ i]
334+ maxIdx = i
335+ }
336+ }
337+
338+ // If no more matches above threshold, break
339+ if maxVal < Float ( threshold) || maxIdx == - 1 {
340+ break
341+ }
342+
343+ // Calculate coordinates from the index
344+ let y = maxIdx / Int( result. cols ( ) )
345+ let x = maxIdx % Int( result. cols ( ) )
346+
347+ // Convert OpenCV coordinates to CGRect
348+ let pixelRect = CGRect (
349+ x: CGFloat ( x) + regionOffset. x,
350+ y: CGFloat ( y) + regionOffset. y,
351+ width: CGFloat ( templateWidth) ,
352+ height: CGFloat ( templateHeight)
353+ )
354+
355+ // Convert from pixels to points (logical coordinates)
356+ let pointRect = CGRect (
357+ x: pixelRect. origin. x / scaleFactor,
358+ y: pixelRect. origin. y / scaleFactor,
359+ width: pixelRect. width / scaleFactor,
360+ height: pixelRect. height / scaleFactor
361+ )
362+
363+ matches. append ( pointRect)
364+
365+ // Apply non-maximum suppression: mark nearby pixels as processed
366+ // to avoid detecting the same object multiple times
367+ let suppressionRadius = min ( templateWidth, templateHeight) / 2
368+
369+ for dy in - suppressionRadius... suppressionRadius {
370+ for dx in - suppressionRadius... suppressionRadius {
371+ let ny = y + Int( dy)
372+ let nx = x + Int( dx)
373+
374+ if ny >= 0 && ny < Int ( result. rows ( ) ) && nx >= 0 && nx < Int ( result. cols ( ) ) {
375+ let idx = ny * Int( result. cols ( ) ) + nx
376+ processedMask [ idx] = true
377+ }
378+ }
379+ }
380+ }
381+
382+ print ( " SwiftAutoGUI: Found \( matches. count) matches with confidence >= \( threshold) " )
383+ return matches
384+ }
207385}
208386
209387// MARK: - NSImage to OpenCV Mat conversion
0 commit comments