first commit
This commit is contained in:
parent
b286e49b3e
commit
1e23856b0a
115
.gitignore
vendored
Normal file
115
.gitignore
vendored
Normal file
@ -0,0 +1,115 @@
|
||||
# Xcode
|
||||
#
|
||||
# gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
|
||||
|
||||
## User settings
|
||||
xcuserdata/
|
||||
|
||||
## compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9)
|
||||
*.xcscmblueprint
|
||||
*.xccheckout
|
||||
|
||||
## compatibility with Xcode 3 and earlier (ignoring not required starting Xcode 4)
|
||||
build/
|
||||
DerivedData/
|
||||
*.moved-aside
|
||||
*.pbxuser
|
||||
!default.pbxuser
|
||||
*.mode1v3
|
||||
!default.mode1v3
|
||||
*.mode2v3
|
||||
!default.mode2v3
|
||||
*.perspectivev3
|
||||
!default.perspectivev3
|
||||
|
||||
## Obj-C/Swift specific
|
||||
*.hmap
|
||||
|
||||
## App packaging
|
||||
*.ipa
|
||||
*.dSYM.zip
|
||||
*.dSYM
|
||||
|
||||
## Playgrounds
|
||||
timeline.xctimeline
|
||||
playground.xcworkspace
|
||||
|
||||
# Swift Package Manager
|
||||
#
|
||||
# Add this line if you want to avoid checking in source code from Swift Package Manager dependencies.
|
||||
# Packages/
|
||||
# Package.pins
|
||||
# Package.resolved
|
||||
# *.xcodeproj
|
||||
#
|
||||
# Xcode automatically generates this directory with a .xcworkspacedata file and xcuserdata dir, do not check in
|
||||
.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
|
||||
.swiftpm/xcode
|
||||
|
||||
# CocoaPods
|
||||
#
|
||||
# We recommend against adding the Pods directory to your .gitignore. However
|
||||
# you should judge for yourself, the pros and cons are mentioned at:
|
||||
# https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
|
||||
#
|
||||
# Pods/
|
||||
#
|
||||
# Add this line if you want to avoid checking in source code from the Xcode workspace
|
||||
# *.xcworkspace
|
||||
|
||||
# Carthage
|
||||
#
|
||||
# Add this line if you want to avoid checking in source code from Carthage dependencies.
|
||||
# Carthage/Checkouts
|
||||
|
||||
Carthage/Build/
|
||||
|
||||
# Accio dependency management
|
||||
Dependencies/
|
||||
.accio/
|
||||
|
||||
# fastlane
|
||||
#
|
||||
# It is recommended to not store the screenshots in the git repo.
|
||||
# Instead, use fastlane to re-generate the screenshots whenever they are needed.
|
||||
# For more information about the recommended setup visit:
|
||||
# https://docs.fastlane.tools/best-practices/source-control/
|
||||
|
||||
fastlane/report.xml
|
||||
fastlane/Preview.html
|
||||
fastlane/screenshots/**/*.png
|
||||
fastlane/test_output
|
||||
|
||||
# Code Injection
|
||||
#
|
||||
# After new code Injection tools there's a generated folder /iOSInjectionProject
|
||||
# https://github.com/johnno1962/injectionforxcode
|
||||
|
||||
iOSInjectionProject/
|
||||
|
||||
# macOS specific
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
|
||||
# Icon must end with two \r
|
||||
Icon
|
||||
|
||||
# Thumbnails
|
||||
._*
|
||||
|
||||
# Files that might appear in the root of a volume
|
||||
.DocumentRevisions-V100
|
||||
.fseventsd
|
||||
.Spotlight-V100
|
||||
.TemporaryItems
|
||||
.Trashes
|
||||
.VolumeIcon.icns
|
||||
.com.apple.timemachine.donotpresent
|
||||
|
||||
# Directories potentially created on remote AFP share
|
||||
.AppleDB
|
||||
.AppleDesktop
|
||||
Network Trash Folder
|
||||
Temporary Items
|
||||
.apdisk
|
||||
@ -35,16 +35,6 @@
|
||||
path = "optc-tracker";
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
0C81B91E2E56A6CD004CD96D /* optc-trackerTests */ = {
|
||||
isa = PBXFileSystemSynchronizedRootGroup;
|
||||
path = "optc-trackerTests";
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
0C81B9282E56A6CD004CD96D /* optc-trackerUITests */ = {
|
||||
isa = PBXFileSystemSynchronizedRootGroup;
|
||||
path = "optc-trackerUITests";
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
/* End PBXFileSystemSynchronizedRootGroup section */
|
||||
|
||||
/* Begin PBXFrameworksBuildPhase section */
|
||||
@ -76,8 +66,6 @@
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
0C81B9102E56A6CC004CD96D /* optc-tracker */,
|
||||
0C81B91E2E56A6CD004CD96D /* optc-trackerTests */,
|
||||
0C81B9282E56A6CD004CD96D /* optc-trackerUITests */,
|
||||
0C81B90F2E56A6CC004CD96D /* Products */,
|
||||
);
|
||||
sourceTree = "<group>";
|
||||
@ -130,9 +118,6 @@
|
||||
dependencies = (
|
||||
0C81B91D2E56A6CD004CD96D /* PBXTargetDependency */,
|
||||
);
|
||||
fileSystemSynchronizedGroups = (
|
||||
0C81B91E2E56A6CD004CD96D /* optc-trackerTests */,
|
||||
);
|
||||
name = "optc-trackerTests";
|
||||
packageProductDependencies = (
|
||||
);
|
||||
@ -153,9 +138,6 @@
|
||||
dependencies = (
|
||||
0C81B9272E56A6CD004CD96D /* PBXTargetDependency */,
|
||||
);
|
||||
fileSystemSynchronizedGroups = (
|
||||
0C81B9282E56A6CD004CD96D /* optc-trackerUITests */,
|
||||
);
|
||||
name = "optc-trackerUITests";
|
||||
packageProductDependencies = (
|
||||
);
|
||||
@ -175,6 +157,7 @@
|
||||
TargetAttributes = {
|
||||
0C81B90D2E56A6CC004CD96D = {
|
||||
CreatedOnToolsVersion = 16.4;
|
||||
LastSwiftMigration = 1640;
|
||||
};
|
||||
0C81B91A2E56A6CD004CD96D = {
|
||||
CreatedOnToolsVersion = 16.4;
|
||||
@ -393,10 +376,13 @@
|
||||
buildSettings = {
|
||||
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
|
||||
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
|
||||
CLANG_ENABLE_MODULES = YES;
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 1;
|
||||
DEVELOPMENT_TEAM = 4AF7VXQ923;
|
||||
ENABLE_PREVIEWS = YES;
|
||||
GENERATE_INFOPLIST_FILE = YES;
|
||||
INFOPLIST_KEY_NSCameraUsageDescription = "需要获取摄像头权限以进行实验";
|
||||
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
|
||||
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
|
||||
INFOPLIST_KEY_UILaunchScreen_Generation = YES;
|
||||
@ -410,6 +396,8 @@
|
||||
PRODUCT_BUNDLE_IDENTIFIER = "feietech.optc-tracker";
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SWIFT_EMIT_LOC_STRINGS = YES;
|
||||
SWIFT_OBJC_BRIDGING_HEADER = "";
|
||||
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
|
||||
SWIFT_VERSION = 5.0;
|
||||
TARGETED_DEVICE_FAMILY = "1,2";
|
||||
};
|
||||
@ -420,10 +408,13 @@
|
||||
buildSettings = {
|
||||
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
|
||||
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
|
||||
CLANG_ENABLE_MODULES = YES;
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 1;
|
||||
DEVELOPMENT_TEAM = 4AF7VXQ923;
|
||||
ENABLE_PREVIEWS = YES;
|
||||
GENERATE_INFOPLIST_FILE = YES;
|
||||
INFOPLIST_KEY_NSCameraUsageDescription = "需要获取摄像头权限以进行实验";
|
||||
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
|
||||
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
|
||||
INFOPLIST_KEY_UILaunchScreen_Generation = YES;
|
||||
@ -437,6 +428,7 @@
|
||||
PRODUCT_BUNDLE_IDENTIFIER = "feietech.optc-tracker";
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SWIFT_EMIT_LOC_STRINGS = YES;
|
||||
SWIFT_OBJC_BRIDGING_HEADER = "";
|
||||
SWIFT_VERSION = 5.0;
|
||||
TARGETED_DEVICE_FAMILY = "1,2";
|
||||
};
|
||||
|
||||
470
optc-tracker/CameraManager.swift
Normal file
470
optc-tracker/CameraManager.swift
Normal file
@ -0,0 +1,470 @@
|
||||
//
|
||||
// optc-tracker
|
||||
//
|
||||
// Created by feie9454 on 2025/8/21.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import AVFoundation
|
||||
import UIKit
|
||||
import Combine
|
||||
import CoreImage
|
||||
|
||||
// GreenDetection 统一定义在 CrossDetection.swift 中
|
||||
|
||||
// MARK: - 调试结构
|
||||
struct DebugStage: Identifiable {
|
||||
let id = UUID()
|
||||
let name: String
|
||||
let image: UIImage
|
||||
let info: String?
|
||||
}
|
||||
|
||||
// MARK: - 连通域统计
|
||||
private struct ComponentStats {
|
||||
var minX: Int, minY: Int, maxX: Int, maxY: Int
|
||||
var sumX: Int, sumY: Int, count: Int
|
||||
var touchesBorder: Bool
|
||||
}
|
||||
|
||||
final class CameraManager: NSObject, ObservableObject {
|
||||
let session = AVCaptureSession()
|
||||
|
||||
@Published var currentLensPosition: Float = 0.80
|
||||
@Published var currentZoomFactor: CGFloat = 1.0
|
||||
@Published var greenRegion: GreenDetection? = nil
|
||||
@Published var debugImage: UIImage? = nil
|
||||
@Published var debugStages: [DebugStage] = []
|
||||
var debugEnabled: Bool = true
|
||||
|
||||
let targetLensPositionForHalfMeter: Float = 0.80
|
||||
|
||||
private var device: AVCaptureDevice?
|
||||
private let sessionQueue = DispatchQueue(label: "camera.session.queue")
|
||||
private let videoOutput = AVCaptureVideoDataOutput()
|
||||
private let videoOutputQueue = DispatchQueue(label: "camera.video.output")
|
||||
|
||||
private let ciContext = CIContext(options: [.useSoftwareRenderer: false])
|
||||
private var lastDetectionTime: CFTimeInterval = 0
|
||||
private let detectionInterval: CFTimeInterval = 0.10
|
||||
|
||||
override init() {
|
||||
super.init()
|
||||
checkPermissionAndConfigure()
|
||||
}
|
||||
|
||||
private func checkPermissionAndConfigure() {
|
||||
switch AVCaptureDevice.authorizationStatus(for: .video) {
|
||||
case .authorized:
|
||||
configureSession()
|
||||
case .notDetermined:
|
||||
AVCaptureDevice.requestAccess(for: .video) { [weak self] granted in
|
||||
guard let self = self else { return }
|
||||
if granted { self.configureSession() }
|
||||
}
|
||||
default:
|
||||
print("Camera permission denied or restricted.")
|
||||
}
|
||||
}
|
||||
|
||||
private func configureSession() {
|
||||
sessionQueue.async { [weak self] in
|
||||
guard let self = self else { return }
|
||||
self.session.beginConfiguration()
|
||||
self.session.sessionPreset = .high
|
||||
|
||||
var selected: AVCaptureDevice?
|
||||
if let wide = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) {
|
||||
selected = wide
|
||||
} else if let ultra = AVCaptureDevice.default(.builtInUltraWideCamera, for: .video, position: .back) {
|
||||
selected = ultra
|
||||
}
|
||||
guard let device = selected else {
|
||||
print("No back camera available.")
|
||||
self.session.commitConfiguration()
|
||||
return
|
||||
}
|
||||
self.device = device
|
||||
|
||||
do {
|
||||
let input = try AVCaptureDeviceInput(device: device)
|
||||
if self.session.canAddInput(input) { self.session.addInput(input) }
|
||||
} catch {
|
||||
print("Failed to create device input: \(error)")
|
||||
}
|
||||
|
||||
self.videoOutput.alwaysDiscardsLateVideoFrames = true
|
||||
self.videoOutput.setSampleBufferDelegate(self, queue: self.videoOutputQueue)
|
||||
self.videoOutput.videoSettings = [
|
||||
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
|
||||
]
|
||||
if self.session.canAddOutput(self.videoOutput) { self.session.addOutput(self.videoOutput) }
|
||||
if let connection = self.videoOutput.connection(with: .video) {
|
||||
connection.videoOrientation = .portrait
|
||||
}
|
||||
|
||||
self.session.commitConfiguration()
|
||||
self.start()
|
||||
self.lockFocus(to: self.targetLensPositionForHalfMeter)
|
||||
}
|
||||
}
|
||||
|
||||
func start() {
|
||||
sessionQueue.async { [weak self] in
|
||||
guard let self = self, !self.session.isRunning else { return }
|
||||
self.session.startRunning()
|
||||
}
|
||||
}
|
||||
|
||||
func stop() {
|
||||
sessionQueue.async { [weak self] in
|
||||
guard let self = self, self.session.isRunning else { return }
|
||||
self.session.stopRunning()
|
||||
}
|
||||
}
|
||||
|
||||
func lockFocus(to lensPos: Float) {
|
||||
sessionQueue.async { [weak self] in
|
||||
guard let self = self, let device = self.device else { return }
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
let clamped = max(0.0, min(lensPos, 1.0))
|
||||
|
||||
if device.isFocusModeSupported(.locked),
|
||||
device.isLockingFocusWithCustomLensPositionSupported {
|
||||
|
||||
if device.isAutoFocusRangeRestrictionSupported {
|
||||
device.autoFocusRangeRestriction = .near
|
||||
}
|
||||
|
||||
device.setFocusModeLocked(lensPosition: clamped) { [weak self] _ in
|
||||
device.unlockForConfiguration()
|
||||
DispatchQueue.main.async {
|
||||
self?.currentLensPosition = clamped
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if device.isFocusModeSupported(.continuousAutoFocus) {
|
||||
device.focusMode = .continuousAutoFocus
|
||||
}
|
||||
device.unlockForConfiguration()
|
||||
}
|
||||
} catch {
|
||||
print("lockForConfiguration failed: \(error)")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func setZoomTo(_ factor: CGFloat) {
|
||||
sessionQueue.async { [weak self] in
|
||||
guard let self = self, let device = self.device else { return }
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
let f = min(max(factor, device.minAvailableVideoZoomFactor), device.maxAvailableVideoZoomFactor)
|
||||
device.videoZoomFactor = f
|
||||
device.unlockForConfiguration()
|
||||
DispatchQueue.main.async { self.currentZoomFactor = f }
|
||||
} catch {
|
||||
print("Failed to set zoom: \(error)")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var maxZoomFactor: CGFloat { device?.maxAvailableVideoZoomFactor ?? 1.0 }
|
||||
var minZoomFactor: CGFloat { device?.minAvailableVideoZoomFactor ?? 1.0 }
|
||||
}
|
||||
|
||||
// MARK: - 帧回调
|
||||
extension CameraManager: AVCaptureVideoDataOutputSampleBufferDelegate {
|
||||
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
|
||||
let now = CACurrentMediaTime()
|
||||
guard now - lastDetectionTime >= detectionInterval else { return }
|
||||
lastDetectionTime = now
|
||||
|
||||
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
|
||||
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
|
||||
detectEdgesKeepingOnlyGreen(in: ciImage)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - 核心:只保留绿色较强的 edge
|
||||
private extension CameraManager {
|
||||
func detectEdgesKeepingOnlyGreen(in image: CIImage) {
|
||||
// 缩放到 <=640 的较小边,兼顾速度与稳定
|
||||
let targetMax: CGFloat = 640
|
||||
let scale = min(1.0, targetMax / max(image.extent.width, image.extent.height))
|
||||
let scaled = image.transformed(by: CGAffineTransform(scaleX: scale, y: scale))
|
||||
|
||||
var stages: [DebugStage] = []
|
||||
if debugEnabled, let cg0 = ciContext.createCGImage(scaled, from: scaled.extent) {
|
||||
stages.append(DebugStage(name: "0 Scaled", image: UIImage(cgImage: cg0), info: nil))
|
||||
}
|
||||
|
||||
// (1) 边缘图(灰度)
|
||||
let edgesCI = scaled.applyingFilter("CIEdges", parameters: [kCIInputIntensityKey: 1.2])
|
||||
guard let cgEdges = ciContext.createCGImage(edgesCI, from: edgesCI.extent) else { return }
|
||||
if debugEnabled { stages.append(DebugStage(name: "1 Edges", image: UIImage(cgImage: cgEdges), info: nil)) }
|
||||
|
||||
// (2) 原始彩色图
|
||||
guard let cgColor = ciContext.createCGImage(scaled, from: scaled.extent) else { return }
|
||||
|
||||
// 拉到 CPU
|
||||
let w = cgEdges.width, h = cgEdges.height
|
||||
let rowBytes = w * 4
|
||||
|
||||
var rawEdges = [UInt8](repeating: 0, count: rowBytes * h)
|
||||
if let ctx = CGContext(data: &rawEdges, width: w, height: h, bitsPerComponent: 8, bytesPerRow: rowBytes,
|
||||
space: CGColorSpaceCreateDeviceRGB(),
|
||||
bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue) {
|
||||
ctx.draw(cgEdges, in: CGRect(x: 0, y: 0, width: w, height: h))
|
||||
} else { return }
|
||||
|
||||
var rawRGB = [UInt8](repeating: 0, count: rowBytes * h)
|
||||
if let ctx2 = CGContext(data: &rawRGB, width: w, height: h, bitsPerComponent: 8, bytesPerRow: rowBytes,
|
||||
space: CGColorSpaceCreateDeviceRGB(),
|
||||
bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue) {
|
||||
ctx2.draw(cgColor, in: CGRect(x: 0, y: 0, width: w, height: h))
|
||||
} else { return }
|
||||
|
||||
// (3) 计算边缘阈值(90% 分位)
|
||||
let thresh = percentileThresholdFromGray(rawEdges: rawEdges, w: w, h: h, rowBytes: rowBytes, percentile: 0.90)
|
||||
|
||||
// (4) 构造 "绿色强 edge" 的 finalMask
|
||||
var finalMask = [UInt8](repeating: 0, count: w * h) // 0/1
|
||||
var strongCount = 0
|
||||
|
||||
var idxE = 0
|
||||
var idxC = 0
|
||||
for y in 0..<h {
|
||||
var o = y * w
|
||||
for _ in 0..<w {
|
||||
let gEdge = rawEdges[idxE + 1] // CIEdges: RGB 同值,取 G
|
||||
if gEdge >= thresh {
|
||||
let b = rawRGB[idxC + 0]
|
||||
let g = rawRGB[idxC + 1]
|
||||
let r = rawRGB[idxC + 2]
|
||||
if isStrongGreen(r: r, g: g, b: b) {
|
||||
finalMask[o] = 1
|
||||
strongCount += 1
|
||||
}
|
||||
}
|
||||
o += 1
|
||||
idxE += 4
|
||||
idxC += 4
|
||||
}
|
||||
}
|
||||
|
||||
// (5) 可选:如果绿色 edge 太稀疏,可以轻微膨胀一次连通性
|
||||
// 这里留空,如需要可添加 3x3 膨胀
|
||||
|
||||
// (6) 连通域:忽略触边,选最大
|
||||
let minAreaRatio: CGFloat = 0.0004
|
||||
let minAreaPixels = max(20, Int(CGFloat(w*h) * minAreaRatio))
|
||||
guard let comp = largestComponent(in: &finalMask, w: w, h: h,
|
||||
ignoreBorderTouching: true,
|
||||
borderMargin: 0,
|
||||
minArea: minAreaPixels) else {
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
self?.greenRegion = nil
|
||||
if self?.debugEnabled == true { self?.debugStages = stages }
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// (7) 回到原图坐标(归一化)
|
||||
let inv = 1 / scale
|
||||
let fullW = image.extent.width
|
||||
let fullH = image.extent.height
|
||||
|
||||
let cx = CGFloat(comp.sumX) / CGFloat(comp.count)
|
||||
let cy = CGFloat(comp.sumY) / CGFloat(comp.count)
|
||||
|
||||
let center = CGPoint(x: (cx * inv) / fullW, y: (cy * inv) / fullH)
|
||||
let box = CGRect(x: (CGFloat(comp.minX) * inv) / fullW,
|
||||
y: (CGFloat(comp.minY) * inv) / fullH,
|
||||
width: (CGFloat(comp.maxX - comp.minX + 1) * inv) / fullW,
|
||||
height: (CGFloat(comp.maxY - comp.minY + 1) * inv) / fullH)
|
||||
let areaRatio = CGFloat(comp.count) * inv * inv / (fullW * fullH)
|
||||
let detection = GreenDetection(boundingBox: box, center: center,
|
||||
areaRatio: areaRatio,
|
||||
offsetX: center.x - 0.5, offsetY: center.y - 0.5)
|
||||
|
||||
// (8) 调试输出:只显示“绿色 edge”与最大连通域
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
self?.greenRegion = detection
|
||||
if let self = self, self.debugEnabled {
|
||||
if let greenEdgeImg = binaryMaskToImage(mask: finalMask, w: w, h: h, box: nil) {
|
||||
stages.append(DebugStage(name: "2 Green-only Edges", image: greenEdgeImg,
|
||||
info: "strong=\(strongCount) th=\(thresh)"))
|
||||
}
|
||||
if let vis = binaryMaskToImage(mask: finalMask, w: w, h: h,
|
||||
box: (comp.minX, comp.minY, comp.maxX, comp.maxY)) {
|
||||
stages.append(DebugStage(name: "3 Largest CC", image: vis,
|
||||
info: "pix=\(comp.count)"))
|
||||
self.debugImage = vis
|
||||
} else { self.debugImage = nil }
|
||||
self.debugStages = stages
|
||||
} else { self?.debugImage = nil }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - 工具:绿色判定 / 连通域 / 阈值 / 可视化
|
||||
|
||||
/// “绿色较强”判定:先 RGB 预筛,再 HSV 限制(Hue≈[70°,170°])
|
||||
private func isStrongGreen(r: UInt8, g: UInt8, b: UInt8) -> Bool {
|
||||
let rI = Int(r), gI = Int(g), bI = Int(b)
|
||||
// 快速 RGB 预筛:G 明显领先,且自身强度不太低
|
||||
guard gI >= 90, gI >= rI + 20, gI >= bI + 20 else { return false }
|
||||
|
||||
// HSV 判断(宽一些的绿色区间)
|
||||
let rf = Float(r) / 255.0, gf = Float(g) / 255.0, bf = Float(b) / 255.0
|
||||
let maxv = max(rf, max(gf, bf)), minv = min(rf, min(gf, bf))
|
||||
let delta = maxv - minv
|
||||
if maxv == 0 { return false }
|
||||
let s = delta / maxv
|
||||
let v = maxv
|
||||
var h: Float = 0
|
||||
if delta > 0 {
|
||||
if maxv == gf {
|
||||
h = 60 * ((bf - rf) / delta) + 120
|
||||
} else if maxv == rf {
|
||||
h = 60 * ((gf - bf) / delta).truncatingRemainder(dividingBy: 6)
|
||||
} else {
|
||||
h = 60 * ((rf - gf) / delta) + 240
|
||||
}
|
||||
if h < 0 { h += 360 }
|
||||
}
|
||||
// 绿色:~[70°,170°];适度饱和和亮度
|
||||
return (h >= 70 && h <= 170) && (s >= 0.35) && (v >= 0.25)
|
||||
}
|
||||
|
||||
/// 计算边缘图灰度阈值(分位数法)
|
||||
private func percentileThresholdFromGray(rawEdges: [UInt8], w: Int, h: Int, rowBytes: Int, percentile: Double) -> UInt8 {
|
||||
var hist = [Int](repeating: 0, count: 256)
|
||||
for y in 0..<h {
|
||||
var idx = y * rowBytes
|
||||
for _ in 0..<w {
|
||||
let g = Int(rawEdges[idx + 1]) // CIEdges 输出 RGB 一致
|
||||
hist[g] += 1
|
||||
idx += 4
|
||||
}
|
||||
}
|
||||
let total = w * h
|
||||
let target = Int(Double(total) * percentile)
|
||||
var cum = 0
|
||||
for i in 0..<256 {
|
||||
cum += hist[i]
|
||||
if cum >= target { return UInt8(i) }
|
||||
}
|
||||
return 200
|
||||
}
|
||||
|
||||
/// 8 邻域最大连通域(可忽略触边)
|
||||
private func largestComponent(in mask: inout [UInt8],
|
||||
w: Int, h: Int,
|
||||
ignoreBorderTouching: Bool,
|
||||
borderMargin: Int,
|
||||
minArea: Int) -> ComponentStats? {
|
||||
let total = w * h
|
||||
if mask.isEmpty || total == 0 { return nil }
|
||||
|
||||
var best: ComponentStats? = nil
|
||||
var visited = [UInt8](repeating: 0, count: total)
|
||||
var stack = [Int]()
|
||||
stack.reserveCapacity(4096)
|
||||
|
||||
@inline(__always)
|
||||
func push(_ i: Int) { stack.append(i) }
|
||||
@inline(__always)
|
||||
func tryPush(_ i: Int) { if visited[i] == 0 && mask[i] != 0 { visited[i] = 1; push(i) } }
|
||||
|
||||
for y0 in 0..<h {
|
||||
for x0 in 0..<w {
|
||||
let i0 = y0 * w + x0
|
||||
if visited[i0] != 0 || mask[i0] == 0 { continue }
|
||||
|
||||
var comp = ComponentStats(minX: x0, minY: y0, maxX: x0, maxY: y0,
|
||||
sumX: 0, sumY: 0, count: 0, touchesBorder: false)
|
||||
stack.removeAll(keepingCapacity: true)
|
||||
visited[i0] = 1
|
||||
push(i0)
|
||||
|
||||
while !stack.isEmpty {
|
||||
let i = stack.removeLast()
|
||||
let y = i / w
|
||||
let x = i - y * w
|
||||
|
||||
comp.count += 1
|
||||
comp.sumX += x
|
||||
comp.sumY += y
|
||||
if x < comp.minX { comp.minX = x }
|
||||
if x > comp.maxX { comp.maxX = x }
|
||||
if y < comp.minY { comp.minY = y }
|
||||
if y > comp.maxY { comp.maxY = y }
|
||||
if x <= borderMargin || y <= borderMargin || x >= w - 1 - borderMargin || y >= h - 1 - borderMargin {
|
||||
comp.touchesBorder = true
|
||||
}
|
||||
|
||||
// 8 邻域
|
||||
if x > 0 { tryPush(i - 1) }
|
||||
if x + 1 < w { tryPush(i + 1) }
|
||||
if y > 0 { tryPush(i - w) }
|
||||
if y + 1 < h { tryPush(i + w) }
|
||||
if x > 0 && y > 0 { tryPush(i - w - 1) }
|
||||
if x + 1 < w && y > 0 { tryPush(i - w + 1) }
|
||||
if x > 0 && y + 1 < h { tryPush(i + w - 1) }
|
||||
if x + 1 < w && y + 1 < h { tryPush(i + w + 1) }
|
||||
}
|
||||
|
||||
if comp.count < minArea { continue }
|
||||
if ignoreBorderTouching && comp.touchesBorder { continue }
|
||||
if let b = best {
|
||||
if comp.count > b.count { best = comp }
|
||||
} else { best = comp }
|
||||
}
|
||||
}
|
||||
return best
|
||||
}
|
||||
|
||||
/// 二值 mask 可视化;若给定 box 则加红框
|
||||
private func binaryMaskToImage(mask: [UInt8], w: Int, h: Int, box: (Int, Int, Int, Int)?) -> UIImage? {
|
||||
var rgba = [UInt8](repeating: 0, count: w * h * 4)
|
||||
for i in 0..<(w*h) {
|
||||
if mask[i] != 0 {
|
||||
let o = i * 4
|
||||
rgba[o+0] = 255; rgba[o+1] = 255; rgba[o+2] = 255; rgba[o+3] = 255
|
||||
} else {
|
||||
let o = i * 4
|
||||
rgba[o+3] = 255
|
||||
}
|
||||
}
|
||||
if let box = box {
|
||||
let (minX, minY, maxX, maxY) = box
|
||||
for x in minX...maxX {
|
||||
let top = (minY * w + x) * 4
|
||||
let bot = (maxY * w + x) * 4
|
||||
rgba[top+0] = 255; rgba[top+1] = 0; rgba[top+2] = 0; rgba[top+3] = 255
|
||||
rgba[bot+0] = 255; rgba[bot+1] = 0; rgba[bot+2] = 0; rgba[bot+3] = 255
|
||||
}
|
||||
for y in minY...maxY {
|
||||
let left = (y * w + minX) * 4
|
||||
let right = (y * w + maxX) * 4
|
||||
rgba[left+0] = 255; rgba[left+1] = 0; rgba[left+2] = 0; rgba[left+3] = 255
|
||||
rgba[right+0] = 255; rgba[right+1] = 0; rgba[right+2] = 0; rgba[right+3] = 255
|
||||
}
|
||||
}
|
||||
return rgbaToUIImage(&rgba, w, h)
|
||||
}
|
||||
|
||||
/// RGBA 数组转 UIImage
|
||||
private func rgbaToUIImage(_ buf: inout [UInt8], _ w: Int, _ h: Int) -> UIImage? {
|
||||
return buf.withUnsafeMutableBytes { ptr in
|
||||
guard let ctx = CGContext(data: ptr.baseAddress, width: w, height: h,
|
||||
bitsPerComponent: 8, bytesPerRow: w*4,
|
||||
space: CGColorSpaceCreateDeviceRGB(),
|
||||
bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue) else { return nil }
|
||||
guard let cg = ctx.makeImage() else { return nil }
|
||||
return UIImage(cgImage: cg)
|
||||
}
|
||||
}
|
||||
@ -8,17 +8,151 @@
|
||||
import SwiftUI
|
||||
|
||||
struct ContentView: View {
|
||||
@EnvironmentObject var camera: CameraManager
|
||||
@EnvironmentObject var motion: MotionManager
|
||||
|
||||
@State private var angleRecords: [AngleRecord] = []
|
||||
|
||||
var body: some View {
|
||||
VStack {
|
||||
Image(systemName: "globe")
|
||||
.imageScale(.large)
|
||||
.foregroundStyle(.tint)
|
||||
Text("Hello, world!")
|
||||
NavigationStack {
|
||||
ZStack(alignment: .bottom) {
|
||||
CameraPreview()
|
||||
.ignoresSafeArea()
|
||||
|
||||
// 检测结果叠加层
|
||||
VStack(alignment: .leading, spacing: 10) {
|
||||
// 顶部角度 & 记录区域
|
||||
VStack(alignment: .leading, spacing: 6) {
|
||||
HStack(alignment: .firstTextBaseline, spacing: 8) {
|
||||
Text(String(format: "Yaw %.1f° Pitch %.1f° Roll %.1f°", motion.yaw, motion.pitch, motion.roll))
|
||||
.font(.system(size: 13, weight: .semibold, design: .monospaced))
|
||||
.fixedSize(horizontal: false, vertical: true)
|
||||
Spacer()
|
||||
Button("记录") {
|
||||
func r2(_ v: Double) -> Double { (v * 100).rounded() / 100 }
|
||||
let rec = AngleRecord(timestamp: Date(), yaw: r2(motion.yaw), pitch: r2(motion.pitch), roll: r2(motion.roll))
|
||||
withAnimation(.spring(response: 0.3, dampingFraction: 0.8)) {
|
||||
angleRecords.insert(rec, at: 0)
|
||||
// 限制最多 30 条
|
||||
if angleRecords.count > 30 { angleRecords.removeLast(angleRecords.count - 30) }
|
||||
}
|
||||
}
|
||||
.buttonStyle(.borderedProminent)
|
||||
.controlSize(.mini)
|
||||
}
|
||||
if !angleRecords.isEmpty {
|
||||
ScrollView(.horizontal, showsIndicators: false) {
|
||||
HStack(spacing: 8) {
|
||||
ForEach(angleRecords) { rec in
|
||||
VStack(alignment: .leading, spacing: 2) {
|
||||
Text(AngleRecord.dateFormatter.string(from: rec.timestamp))
|
||||
.font(.caption2)
|
||||
.foregroundColor(.secondary)
|
||||
Text(String(format: "Y%.2f P%.2f R%.2f", rec.yaw, rec.pitch, rec.roll))
|
||||
.font(.caption.monospaced())
|
||||
}
|
||||
.padding(6)
|
||||
.background(.ultraThinMaterial, in: RoundedRectangle(cornerRadius: 6, style: .continuous))
|
||||
}
|
||||
}
|
||||
.padding(.vertical, 2)
|
||||
}
|
||||
.transition(.opacity.combined(with: .move(edge: .top)))
|
||||
.frame(maxHeight: 70)
|
||||
}
|
||||
}
|
||||
.padding(10)
|
||||
.background(.thinMaterial, in: RoundedRectangle(cornerRadius: 12, style: .continuous))
|
||||
.padding(.top, 50)
|
||||
.padding(.horizontal)
|
||||
|
||||
// 原 guidance 信息
|
||||
let threshold: CGFloat = 0.03
|
||||
let guidance: String = {
|
||||
guard let c = camera.greenRegion else { return "未检测到光标" }
|
||||
var msgs: [String] = []
|
||||
if c.offsetX > threshold { msgs.append("向左移动") }
|
||||
else if c.offsetX < -threshold { msgs.append("向右移动") }
|
||||
if c.offsetY > threshold { msgs.append("向上移动") }
|
||||
else if c.offsetY < -threshold { msgs.append("向下移动") }
|
||||
return msgs.isEmpty ? "已居中" : msgs.joined(separator: " · ")
|
||||
}()
|
||||
Text(guidance)
|
||||
.font(.headline)
|
||||
.padding(8)
|
||||
.background(.ultraThinMaterial, in: Capsule())
|
||||
.foregroundColor(.white)
|
||||
.padding(.horizontal)
|
||||
|
||||
Spacer()
|
||||
}
|
||||
|
||||
VStack(spacing: 12) {
|
||||
HStack {
|
||||
Text(String(format: "lensPosition: %.3f", camera.currentLensPosition))
|
||||
.font(.system(size: 14, weight: .medium, design: .monospaced))
|
||||
.padding(8)
|
||||
.background(.thinMaterial)
|
||||
.cornerRadius(8)
|
||||
|
||||
Spacer()
|
||||
|
||||
Text(String(format: "缩放: %.1fx", camera.currentZoomFactor))
|
||||
.font(.system(size: 14, weight: .medium, design: .monospaced))
|
||||
.padding(8)
|
||||
.background(.thinMaterial)
|
||||
.cornerRadius(8)
|
||||
NavigationLink(destination: DebugView()) {
|
||||
Image(systemName: "waveform.path.ecg")
|
||||
.padding(8)
|
||||
.background(.thinMaterial)
|
||||
.cornerRadius(8)
|
||||
}
|
||||
NavigationLink(destination: PipelineDebugView()) {
|
||||
Image(systemName: "list.bullet.rectangle.portrait")
|
||||
.padding(8)
|
||||
.background(.thinMaterial)
|
||||
.cornerRadius(8)
|
||||
}
|
||||
}
|
||||
.padding(.horizontal)
|
||||
|
||||
VStack(spacing: 10) {
|
||||
// 对焦控制区域
|
||||
Text("拖动微调:对准目标后,调节使得目标最锐利")
|
||||
.font(.footnote)
|
||||
.foregroundColor(.secondary)
|
||||
|
||||
Slider(value: Binding(
|
||||
get: { Double(camera.currentLensPosition) },
|
||||
set: { newVal in
|
||||
camera.lockFocus(to: Float(newVal))
|
||||
}
|
||||
), in: 0.0...1.0)
|
||||
|
||||
Divider()
|
||||
.padding(.vertical, 5)
|
||||
|
||||
// 缩放控制区域
|
||||
Text("缩放控制:拖动滑块调整画面缩放")
|
||||
.font(.footnote)
|
||||
.foregroundColor(.secondary)
|
||||
|
||||
Slider(value: Binding(
|
||||
get: { Double(camera.currentZoomFactor) },
|
||||
set: { newVal in
|
||||
camera.setZoomTo(CGFloat(newVal))
|
||||
}
|
||||
), in: Double(camera.minZoomFactor)...min(Double(camera.maxZoomFactor), 10.0))
|
||||
}
|
||||
.padding()
|
||||
.background(.thinMaterial)
|
||||
.cornerRadius(12)
|
||||
.padding(.bottom, 24)
|
||||
.padding(.horizontal)
|
||||
}
|
||||
}
|
||||
.navigationBarHidden(true)
|
||||
}
|
||||
.padding()
|
||||
}
|
||||
}
|
||||
|
||||
#Preview {
|
||||
ContentView()
|
||||
}
|
||||
|
||||
23
optc-tracker/CrossDetection.swift
Normal file
23
optc-tracker/CrossDetection.swift
Normal file
@ -0,0 +1,23 @@
|
||||
// CrossDetection.swift
|
||||
// optc-tracker
|
||||
//
|
||||
// 绿色十字检测结果模型(归一化坐标)
|
||||
//
|
||||
import CoreGraphics
|
||||
import Foundation // for UUID
|
||||
import QuartzCore // for CACurrentMediaTime
|
||||
|
||||
// 通用绿色区域检测结果
|
||||
struct GreenDetection: Identifiable {
|
||||
let id = UUID()
|
||||
/// 归一化包围盒(原点左上,宽高为 0~1)
|
||||
let boundingBox: CGRect
|
||||
/// 归一化中心点
|
||||
let center: CGPoint
|
||||
/// 像素面积占比(相对原图)
|
||||
let areaRatio: CGFloat
|
||||
/// 偏移(相对 0.5,0.5)右/下为正
|
||||
let offsetX: CGFloat
|
||||
let offsetY: CGFloat
|
||||
let timestamp: CFTimeInterval = CACurrentMediaTime()
|
||||
}
|
||||
67
optc-tracker/DebugView.swift
Normal file
67
optc-tracker/DebugView.swift
Normal file
@ -0,0 +1,67 @@
|
||||
import SwiftUI
|
||||
|
||||
struct DebugView: View {
|
||||
@EnvironmentObject var camera: CameraManager
|
||||
@State private var showRaw = true
|
||||
|
||||
var body: some View {
|
||||
VStack(spacing: 12) {
|
||||
Text("调试视图 (绿色区域处理结果)")
|
||||
.font(.headline)
|
||||
if let img = camera.debugImage {
|
||||
Image(uiImage: img)
|
||||
.resizable()
|
||||
.interpolation(.none)
|
||||
.scaledToFit()
|
||||
.border(Color.gray.opacity(0.4))
|
||||
.overlay(alignment: .topTrailing) {
|
||||
if let region = camera.greenRegion {
|
||||
VStack(alignment: .trailing, spacing: 4) {
|
||||
Text(String(format: "中心: (%.3f, %.3f)", region.center.x, region.center.y))
|
||||
Text(String(format: "偏移: x=%.3f y=%.3f", region.offsetX, region.offsetY))
|
||||
Text(String(format: "面积比例: %.5f", region.areaRatio))
|
||||
}
|
||||
.font(.system(.caption, design: .monospaced))
|
||||
.padding(6)
|
||||
.background(.ultraThinMaterial, in: RoundedRectangle(cornerRadius: 8))
|
||||
.padding(6)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Text("等待第一帧...")
|
||||
.foregroundColor(.secondary)
|
||||
}
|
||||
HStack {
|
||||
Button("返回") { dismissSelf() }
|
||||
Spacer()
|
||||
Button("保存图像") { saveDebugImage() }
|
||||
.disabled(camera.debugImage == nil)
|
||||
}
|
||||
.padding(.horizontal)
|
||||
Spacer()
|
||||
}
|
||||
.padding()
|
||||
.navigationBarBackButtonHidden(true)
|
||||
}
|
||||
|
||||
private func dismissSelf() {
|
||||
// 由上层 NavigationStack / NavigationLink 控制,此处交给外部
|
||||
// 可使用环境 dismiss
|
||||
if let scene = UIApplication.shared.connectedScenes.first as? UIWindowScene,
|
||||
let window = scene.windows.first,
|
||||
let root = window.rootViewController {
|
||||
root.dismiss(animated: true)
|
||||
}
|
||||
}
|
||||
|
||||
private func saveDebugImage() {
|
||||
guard let img = camera.debugImage else { return }
|
||||
UIImageWriteToSavedPhotosAlbum(img, nil, nil, nil)
|
||||
}
|
||||
}
|
||||
|
||||
struct DebugView_Previews: PreviewProvider {
|
||||
static var previews: some View {
|
||||
DebugView().environmentObject(CameraManager())
|
||||
}
|
||||
}
|
||||
63
optc-tracker/MotionManager.swift
Normal file
63
optc-tracker/MotionManager.swift
Normal file
@ -0,0 +1,63 @@
|
||||
//
|
||||
// MotionManager.swift
|
||||
// optc-tracker
|
||||
//
|
||||
// Created by GitHub Copilot on 2025/8/21.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import CoreMotion
|
||||
import Combine
|
||||
|
||||
/// 管理设备姿态(陀螺仪)数据
|
||||
final class MotionManager: ObservableObject {
|
||||
private let motionManager = CMMotionManager()
|
||||
private let queue = OperationQueue()
|
||||
|
||||
@Published var yaw: Double = 0 // 航向
|
||||
@Published var pitch: Double = 0 // 俯仰
|
||||
@Published var roll: Double = 0 // 横滚
|
||||
|
||||
init() {
|
||||
start()
|
||||
}
|
||||
|
||||
private func start() {
|
||||
guard motionManager.isDeviceMotionAvailable else { return }
|
||||
motionManager.deviceMotionUpdateInterval = 1.0/30.0
|
||||
motionManager.startDeviceMotionUpdates(using: .xArbitraryZVertical, to: queue) { [weak self] motion, _ in
|
||||
guard let self, let attitude = motion?.attitude else { return }
|
||||
// 将弧度转成角度
|
||||
let deg = 180.0 / .pi
|
||||
let newYaw = attitude.yaw * deg
|
||||
let newPitch = attitude.pitch * deg
|
||||
let newRoll = attitude.roll * deg
|
||||
DispatchQueue.main.async {
|
||||
self.yaw = newYaw
|
||||
self.pitch = newPitch
|
||||
self.roll = newRoll
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
deinit {
|
||||
motionManager.stopDeviceMotionUpdates()
|
||||
}
|
||||
}
|
||||
|
||||
/// 角度记录结构
|
||||
struct AngleRecord: Identifiable, Hashable {
|
||||
let id = UUID()
|
||||
let timestamp: Date
|
||||
let yaw: Double
|
||||
let pitch: Double
|
||||
let roll: Double
|
||||
}
|
||||
|
||||
extension AngleRecord {
|
||||
static let dateFormatter: DateFormatter = {
|
||||
let f = DateFormatter()
|
||||
f.dateFormat = "HH:mm:ss"
|
||||
return f
|
||||
}()
|
||||
}
|
||||
95
optc-tracker/PipelineDebugView.swift
Normal file
95
optc-tracker/PipelineDebugView.swift
Normal file
@ -0,0 +1,95 @@
|
||||
import SwiftUI
|
||||
|
||||
struct PipelineDebugView: View {
|
||||
@EnvironmentObject var camera: CameraManager
|
||||
@State private var selectedStageID: UUID?
|
||||
@State private var autoRefresh: Bool = true
|
||||
|
||||
var body: some View {
|
||||
List {
|
||||
if camera.debugStages.isEmpty {
|
||||
Text("暂无调试阶段图像,确保已开启 debugEnabled")
|
||||
.foregroundColor(.secondary)
|
||||
} else {
|
||||
ForEach(camera.debugStages) { stage in
|
||||
Button {
|
||||
selectedStageID = stage.id
|
||||
} label: {
|
||||
HStack(alignment: .top, spacing: 12) {
|
||||
Image(uiImage: stage.image)
|
||||
.resizable()
|
||||
.interpolation(.none)
|
||||
.aspectRatio(contentMode: .fit)
|
||||
.frame(width: 120, height: 90)
|
||||
.clipped()
|
||||
.border(Color.gray.opacity(0.3))
|
||||
VStack(alignment: .leading, spacing: 4) {
|
||||
Text(stage.name)
|
||||
.font(.headline)
|
||||
if let info = stage.info { Text(info).font(.caption.monospaced()).foregroundColor(.secondary) }
|
||||
}
|
||||
Spacer()
|
||||
}
|
||||
}
|
||||
.buttonStyle(.plain)
|
||||
}
|
||||
}
|
||||
}
|
||||
.navigationTitle("处理管线调试")
|
||||
.toolbar {
|
||||
ToolbarItemGroup(placement: .topBarTrailing) {
|
||||
Toggle(isOn: $autoRefresh) { Text("自动刷新").font(.caption) }
|
||||
.toggleStyle(.switch)
|
||||
Button(camera.debugEnabled ? "停用" : "启用") { camera.debugEnabled.toggle() }
|
||||
}
|
||||
}
|
||||
.onReceive(camera.$debugStages) { _ in
|
||||
guard autoRefresh else { return }
|
||||
// 触发视图刷新;List 已绑定 camera.debugStages
|
||||
}
|
||||
.sheet(item: Binding(
|
||||
get: { camera.debugStages.first { $0.id == selectedStageID } },
|
||||
set: { _ in selectedStageID = nil }
|
||||
)) { stage in
|
||||
ZoomableImageStage(stage: stage)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private struct ZoomableImageStage: View {
|
||||
let stage: DebugStage
|
||||
@State private var zoom: CGFloat = 1
|
||||
@State private var offset: CGSize = .zero
|
||||
var body: some View {
|
||||
NavigationStack {
|
||||
GeometryReader { geo in
|
||||
Image(uiImage: stage.image)
|
||||
.resizable()
|
||||
.interpolation(.none)
|
||||
.aspectRatio(contentMode: .fit)
|
||||
.scaleEffect(zoom)
|
||||
.offset(offset)
|
||||
.gesture(MagnificationGesture()
|
||||
.onChanged { v in zoom = v }
|
||||
.onEnded { _ in if zoom < 1 { withAnimation { zoom = 1; offset = .zero } } }
|
||||
)
|
||||
.gesture(DragGesture()
|
||||
.onChanged { g in offset = g.translation }
|
||||
.onEnded { _ in if zoom <= 1 { withAnimation { offset = .zero } } }
|
||||
)
|
||||
.frame(width: geo.size.width, height: geo.size.height)
|
||||
.background(Color.black.opacity(0.9))
|
||||
}
|
||||
.navigationTitle(stage.name)
|
||||
.navigationBarTitleDisplayMode(.inline)
|
||||
.toolbar {
|
||||
ToolbarItem(placement: .topBarTrailing) { Button("重置") { withAnimation { zoom = 1; offset = .zero } } }
|
||||
ToolbarItem(placement: .topBarLeading) { Text(stage.info ?? "").font(.caption.monospaced()) }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#Preview {
|
||||
PipelineDebugView().environmentObject(CameraManager())
|
||||
}
|
||||
120
optc-tracker/PreviewView.swift
Normal file
120
optc-tracker/PreviewView.swift
Normal file
@ -0,0 +1,120 @@
|
||||
//
|
||||
// PreviewView.swift
|
||||
// optc-tracker
|
||||
//
|
||||
// Created by feie9454 on 2025/8/21.
|
||||
//
|
||||
|
||||
import UIKit
|
||||
import AVFoundation
|
||||
import SwiftUI
|
||||
|
||||
final class PreviewCanvasView: UIView {
|
||||
override class var layerClass: AnyClass { AVCaptureVideoPreviewLayer.self }
|
||||
var videoPreviewLayer: AVCaptureVideoPreviewLayer { layer as! AVCaptureVideoPreviewLayer }
|
||||
var session: AVCaptureSession? {
|
||||
get { videoPreviewLayer.session }
|
||||
set { videoPreviewLayer.session = newValue }
|
||||
}
|
||||
|
||||
private let overlayLayer = CAShapeLayer()
|
||||
private let centerDot = CAShapeLayer()
|
||||
|
||||
var green: GreenDetection? { didSet { updateOverlay() } }
|
||||
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
isOpaque = true
|
||||
videoPreviewLayer.videoGravity = .resizeAspectFill
|
||||
overlayLayer.strokeColor = UIColor.systemGreen.cgColor
|
||||
overlayLayer.fillColor = UIColor.systemGreen.withAlphaComponent(0.15).cgColor
|
||||
overlayLayer.lineWidth = 3
|
||||
overlayLayer.lineJoin = .round
|
||||
overlayLayer.lineCap = .round
|
||||
videoPreviewLayer.addSublayer(overlayLayer)
|
||||
|
||||
centerDot.fillColor = UIColor.systemGreen.cgColor
|
||||
overlayLayer.addSublayer(centerDot)
|
||||
}
|
||||
required init?(coder: NSCoder) { fatalError("init(coder:) has not been implemented") }
|
||||
|
||||
override func layoutSubviews() {
|
||||
super.layoutSubviews()
|
||||
CATransaction.begin()
|
||||
CATransaction.setDisableActions(true)
|
||||
overlayLayer.frame = bounds
|
||||
updateOverlay()
|
||||
CATransaction.commit()
|
||||
}
|
||||
|
||||
private func updateOverlay() {
|
||||
CATransaction.begin()
|
||||
CATransaction.setDisableActions(true)
|
||||
guard let det = green else {
|
||||
overlayLayer.path = nil
|
||||
centerDot.path = nil
|
||||
CATransaction.commit()
|
||||
return
|
||||
}
|
||||
// 识别算法在 CIImage 坐标系(原点在左下)下给出 boundingBox;
|
||||
// AVCaptureMetadata / layerRectConverted 期望的 normalized rect 原点在左上,需要做 Y 翻转。
|
||||
// 翻转:y' = 1 - y - h
|
||||
let box = det.boundingBox
|
||||
var metaRect = CGRect(
|
||||
x: box.origin.y ,
|
||||
y: 1 - box.origin.x - box.width,
|
||||
width: box.height,
|
||||
height: box.width)
|
||||
// Clamp 防御
|
||||
metaRect.origin.x = max(0, min(1, metaRect.origin.x))
|
||||
metaRect.origin.y = max(0, min(1, metaRect.origin.y))
|
||||
metaRect.size.width = max(0, min(1 - metaRect.origin.x, metaRect.width))
|
||||
metaRect.size.height = max(0, min(1 - metaRect.origin.y, metaRect.height))
|
||||
let layerRect = videoPreviewLayer.layerRectConverted(fromMetadataOutputRect: metaRect)
|
||||
|
||||
overlayLayer.path = UIBezierPath(rect: layerRect).cgPath
|
||||
|
||||
let centerMeta = CGRect(
|
||||
x: det.center.y - 0.001,
|
||||
y: 1 - det.center.x - 0.001,
|
||||
width: 0.002,
|
||||
height: 0.002)
|
||||
let centerRect = videoPreviewLayer.layerRectConverted(fromMetadataOutputRect: centerMeta)
|
||||
centerDot.path = UIBezierPath(ovalIn: centerRect.insetBy(dx: -4, dy: -4)).cgPath
|
||||
centerDot.fillColor = UIColor.systemGreen.cgColor
|
||||
CATransaction.commit()
|
||||
}
|
||||
}
|
||||
|
||||
struct CameraPreview: UIViewRepresentable {
|
||||
@EnvironmentObject var camera: CameraManager
|
||||
|
||||
func makeUIView(context: Context) -> PreviewCanvasView {
|
||||
let v = PreviewCanvasView()
|
||||
v.videoPreviewLayer.videoGravity = .resizeAspectFill
|
||||
v.session = camera.session
|
||||
let pinchGesture = UIPinchGestureRecognizer(target: context.coordinator, action: #selector(Coordinator.handlePinch(_:)))
|
||||
v.addGestureRecognizer(pinchGesture)
|
||||
return v
|
||||
}
|
||||
|
||||
func updateUIView(_ uiView: PreviewCanvasView, context: Context) {
|
||||
uiView.green = camera.greenRegion
|
||||
context.coordinator.camera = camera
|
||||
}
|
||||
|
||||
func makeCoordinator() -> Coordinator { Coordinator(camera: camera) }
|
||||
|
||||
class Coordinator: NSObject {
|
||||
var camera: CameraManager
|
||||
private var initialZoomFactor: CGFloat = 1.0
|
||||
init(camera: CameraManager) { self.camera = camera }
|
||||
@objc func handlePinch(_ gesture: UIPinchGestureRecognizer) {
|
||||
switch gesture.state {
|
||||
case .began: initialZoomFactor = camera.currentZoomFactor
|
||||
case .changed: camera.setZoomTo(initialZoomFactor * gesture.scale)
|
||||
default: break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
21
optc-tracker/TeleFocusDemoApp.swift
Normal file
21
optc-tracker/TeleFocusDemoApp.swift
Normal file
@ -0,0 +1,21 @@
|
||||
//
|
||||
// TeleFocusDemoApp.swift
|
||||
// optc-tracker
|
||||
//
|
||||
// Created by feie9454 on 2025/8/21.
|
||||
//
|
||||
|
||||
import SwiftUI
|
||||
|
||||
@main
|
||||
struct TeleFocusDemoApp: App {
|
||||
@StateObject private var camera = CameraManager()
|
||||
@StateObject private var motion = MotionManager()
|
||||
var body: some Scene {
|
||||
WindowGroup {
|
||||
ContentView()
|
||||
.environmentObject(camera)
|
||||
.environmentObject(motion)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,17 +0,0 @@
|
||||
//
|
||||
// optc_trackerApp.swift
|
||||
// optc-tracker
|
||||
//
|
||||
// Created by feie9454 on 2025/8/21.
|
||||
//
|
||||
|
||||
import SwiftUI
|
||||
|
||||
@main
|
||||
struct optc_trackerApp: App {
|
||||
var body: some Scene {
|
||||
WindowGroup {
|
||||
ContentView()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,17 +0,0 @@
|
||||
//
|
||||
// optc_trackerTests.swift
|
||||
// optc-trackerTests
|
||||
//
|
||||
// Created by feie9454 on 2025/8/21.
|
||||
//
|
||||
|
||||
import Testing
|
||||
@testable import optc_tracker
|
||||
|
||||
struct optc_trackerTests {
|
||||
|
||||
@Test func example() async throws {
|
||||
// Write your test here and use APIs like `#expect(...)` to check expected conditions.
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,41 +0,0 @@
|
||||
//
|
||||
// optc_trackerUITests.swift
|
||||
// optc-trackerUITests
|
||||
//
|
||||
// Created by feie9454 on 2025/8/21.
|
||||
//
|
||||
|
||||
import XCTest
|
||||
|
||||
final class optc_trackerUITests: XCTestCase {
|
||||
|
||||
override func setUpWithError() throws {
|
||||
// Put setup code here. This method is called before the invocation of each test method in the class.
|
||||
|
||||
// In UI tests it is usually best to stop immediately when a failure occurs.
|
||||
continueAfterFailure = false
|
||||
|
||||
// In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this.
|
||||
}
|
||||
|
||||
override func tearDownWithError() throws {
|
||||
// Put teardown code here. This method is called after the invocation of each test method in the class.
|
||||
}
|
||||
|
||||
@MainActor
|
||||
func testExample() throws {
|
||||
// UI tests must launch the application that they test.
|
||||
let app = XCUIApplication()
|
||||
app.launch()
|
||||
|
||||
// Use XCTAssert and related functions to verify your tests produce the correct results.
|
||||
}
|
||||
|
||||
@MainActor
|
||||
func testLaunchPerformance() throws {
|
||||
// This measures how long it takes to launch your application.
|
||||
measure(metrics: [XCTApplicationLaunchMetric()]) {
|
||||
XCUIApplication().launch()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,33 +0,0 @@
|
||||
//
|
||||
// optc_trackerUITestsLaunchTests.swift
|
||||
// optc-trackerUITests
|
||||
//
|
||||
// Created by feie9454 on 2025/8/21.
|
||||
//
|
||||
|
||||
import XCTest
|
||||
|
||||
final class optc_trackerUITestsLaunchTests: XCTestCase {
|
||||
|
||||
override class var runsForEachTargetApplicationUIConfiguration: Bool {
|
||||
true
|
||||
}
|
||||
|
||||
override func setUpWithError() throws {
|
||||
continueAfterFailure = false
|
||||
}
|
||||
|
||||
@MainActor
|
||||
func testLaunch() throws {
|
||||
let app = XCUIApplication()
|
||||
app.launch()
|
||||
|
||||
// Insert steps here to perform after app launch but before taking a screenshot,
|
||||
// such as logging into a test account or navigating somewhere in the app
|
||||
|
||||
let attachment = XCTAttachment(screenshot: app.screenshot())
|
||||
attachment.name = "Launch Screen"
|
||||
attachment.lifetime = .keepAlways
|
||||
add(attachment)
|
||||
}
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user