Swiftui: Detect Finger Position on MAC Trackpad

SwiftUI: Detect finger position on Mac trackpad

Always split your task into smaller ones and do them one by one. Ask in the same way and avoid broad questions touching lot of topics.

Goal

  • Track pad view (gray rectangle)
  • Circles on top of it showing fingers physical position

Sample Image

Step 1 - AppKit

  • SwiftUI doesn't provide all the required information
  • AppKit & NSTouch does - normalizedPosition.

First step is to create a simple AppKitTouchesView forwarding required touches via a delegate.

import SwiftUI
import AppKit

protocol AppKitTouchesViewDelegate: AnyObject {
// Provides `.touching` touches only.
func touchesView(_ view: AppKitTouchesView, didUpdateTouchingTouches touches: Set<NSTouch>)
}

final class AppKitTouchesView: NSView {
weak var delegate: AppKitTouchesViewDelegate?

override init(frame frameRect: NSRect) {
super.init(frame: frameRect)
// We're interested in `.indirect` touches only.
allowedTouchTypes = [.indirect]
// We'd like to receive resting touches as well.
wantsRestingTouches = true
}

required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}

private func handleTouches(with event: NSEvent) {
// Get all `.touching` touches only (includes `.began`, `.moved` & `.stationary`).
let touches = event.touches(matching: .touching, in: self)
// Forward them via delegate.
delegate?.touchesView(self, didUpdateTouchingTouches: touches)
}

override func touchesBegan(with event: NSEvent) {
handleTouches(with: event)
}

override func touchesEnded(with event: NSEvent) {
handleTouches(with: event)
}

override func touchesMoved(with event: NSEvent) {
handleTouches(with: event)
}

override func touchesCancelled(with event: NSEvent) {
handleTouches(with: event)
}
}

Step 2 - Simplified touch structure

Second step is to create a simple custom Touch structure which holds all the required information only and is SwiftUI compatible (not flipped y).

struct Touch: Identifiable {
// `Identifiable` -> `id` is required for `ForEach` (see below).
let id: Int
// Normalized touch X position on a device (0.0 - 1.0).
let normalizedX: CGFloat
// Normalized touch Y position on a device (0.0 - 1.0).
let normalizedY: CGFloat

init(_ nsTouch: NSTouch) {
self.normalizedX = nsTouch.normalizedPosition.x
// `NSTouch.normalizedPosition.y` is flipped -> 0.0 means bottom. But the
// `Touch` structure is meants to be used with the SwiftUI -> flip it.
self.normalizedY = 1.0 - nsTouch.normalizedPosition.y
self.id = nsTouch.hash
}
}

Step 3 - Wrap it for the SwiftUI

  • NSViewRepresentable documentation
  • Binding documentation

Third step is to create a SwiftUI view wrapping our AppKit AppKitTouchesView view.

struct TouchesView: NSViewRepresentable {
// Up to date list of touching touches.
@Binding var touches: [Touch]

func updateNSView(_ nsView: AppKitTouchesView, context: Context) {
}

func makeNSView(context: Context) -> AppKitTouchesView {
let view = AppKitTouchesView()
view.delegate = context.coordinator
return view
}

func makeCoordinator() -> Coordinator {
Coordinator(self)
}

class Coordinator: NSObject, AppKitTouchesViewDelegate {
let parent: TouchesView

init(_ view: TouchesView) {
self.parent = view
}

func touchesView(_ view: AppKitTouchesView, didUpdateTouchingTouches touches: Set<NSTouch>) {
parent.touches = touches.map(Touch.init)
}
}
}

Step 4 - Make a TrackPadView

Fourth step is to create a TrackPadView which internally does use our
TouchesView and draws circles on it representing physical location of fingers.

struct TrackPadView: View {
private let touchViewSize: CGFloat = 20

@State var touches: [Touch] = []

var body: some View {
ZStack {
GeometryReader { proxy in
TouchesView(touches: self.$touches)

ForEach(self.touches) { touch in
Circle()
.foregroundColor(Color.green)
.frame(width: self.touchViewSize, height: self.touchViewSize)
.offset(
x: proxy.size.width * touch.normalizedX - self.touchViewSize / 2.0,
y: proxy.size.height * touch.normalizedY - self.touchViewSize / 2.0
)
}
}
}
}
}

Step 5 - Use it in the main ContentView

Fifth step is to use it in our main view with some aspect ratio which is close to the real trackpad aspect ratio.

struct ContentView: View {
var body: some View {
TrackPadView()
.background(Color.gray)
.aspectRatio(1.6, contentMode: .fit)
.padding()
.frame(maxWidth: .infinity, maxHeight: .infinity)
}
}

Complete project

  • Open Xcode
  • Create a new project (macOS App & Swift & SwiftUI)
  • Copy & paste ContentView.swift from this gist

Selecting nearest button according to finger position in SwiftUI

Here is possible approach. Tested with Xcode 11.4 / iOS 13.4

demo

struct SelectTheKey: View {

private var sArray = ["e", "s", "p", "b", "k"]
@State var isShowPopup: Bool = false
@State private var dragPosition = CGPoint.zero

@State private var rects = [Int: CGRect]()
@State private var selected = -1
var body: some View {

VStack() {
Spacer()
Text("global: \(self.dragPosition.x) : \(self.dragPosition.y)")

if isShowPopup {
HStack(spacing: 5) {
ForEach(0..<sArray.count) { id in
Text("\(self.sArray[id])").fontWeight(.bold).font(.title)
.foregroundColor(.white)
.padding()
.background(id == self.selected ? Color.red : Color.blue)
.cornerRadius(5)
.background(self.rectReader(for: id))
}
}.offset(x:40, y:0)
}

Text("A").frame(width: 60, height: 90)
.foregroundColor(.white)
.background(Color.purple)
.shadow(radius: 2)
.padding(10)
.gesture(DragGesture(minimumDistance: 2, coordinateSpace: .global)
.onChanged { dragGesture in
self.dragPosition = dragGesture.location
if let (id, _) = self.rects.first(where: { (_, value) -> Bool in
value.minX < dragGesture.location.x && value.maxX > dragGesture.location.x
}) { self.selected = id }

if !self.isShowPopup {self.isShowPopup.toggle()}
}
.onEnded {finalValue in
if self.isShowPopup {self.isShowPopup.toggle()}
})
}
}

func rectReader(for key: Int) -> some View {
return GeometryReader { gp -> AnyView in
let rect = gp.frame(in: .global)
DispatchQueue.main.async {
self.rects[key] = rect
}
return AnyView(Rectangle().fill(Color.clear))
}
}

}

Determine finger up on trackpad in MBP

The methods you mentioned give you an NSEvent. Using this you can get the actual touches with $touchesMatchingPhase:inView:. After that you can e.g. loop trough them or just get the count (which you may compare to a previous count you have saved in an instance variable).

NSSet *touches = [[event touchesMatchingPhase:NSTouchPhaseAny inView:self];
NSInteger touchCount = [touches count];
for(NSTouch* touch in touches) {
//do something e.g. check if they are still moving or not
}

How do i detect CMD + Click of a SwiftUI view?

Use the following way.

 .gesture(TapGesture().modifiers(.command).onEnded{print("Here we are!") })

Detecting absolute finger positions on trackpad

You can do this with the mtdev library. Start off with evtest to figure out what input device and information from the input device you need, then use the mtdev library to access it from C.

Another great library to do what I was originally trying to do (i.e., create a zone on the trackpad which would invoke a right-click menu up) is the mtrack library (to save some work). It also has examples for accessing information from mtdev if you want to go hunting through the code.

Get Coordinates of Moving Pointer in SwiftUI (iPadOS)

The WWDC video covers this very topic:

Handle trackpad and mouse input

Add SupportsIndirectInputEvents to your Info.plist

From the video:

It is required in order to get the new touch type indirect pointer and
EventType.transform.
Existing projects do not have this key set and will need to add it. Starting with iOS 14 and macOS Big Sur SDKs, new UIKit and SwiftUI
projects will have this value set to "true."

In addition you will use UIPointerInteraction. This tutorial shows you step by step including custom cursors:

https://pspdfkit.com/blog/2020/supporting-pointer-interactions/

Recognize UISwipeGestureRecognizer with two-finger scroll on trackpad

While I didn't find a way to do this with UISwipeGestureRecognizer, I solved it by adding a UIPanGestureRecognizer. There's a few things to be aware of.

You need to allow it to be triggered with continuous (trackpad) and discrete (mouse scroll wheel) scrolling:

panGestureRecognizer.allowedScrollTypesMask = [.continuous, .discrete]

To ensure it's only triggered when scrolling down for example, you can implement this delegate method to check which direction they scrolled:

func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
if gestureRecognizer == panGestureRecognizer {
//only support scrolling down to close
if let view = panGestureRecognizer.view {
let velocity = panGestureRecognizer.velocity(in: view)
return velocity.y > velocity.x
}
}
return true
}

One other gotcha, the pan will be triggered when swiping with your finger on the display. You can prevent the pan gesture from being recognized with direct touches so it's only triggered via trackpad/mouse by implementing this other delegate method:

func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldReceive touch: UITouch) -> Bool {
if gestureRecognizer == panGestureRecognizer {
return false //disallow touches, only allow trackpad/mouse scroll
}
return true
}


Related Topics



Leave a reply



Submit