its whats on the tin; culls raw photos
0
fork

Configure Feed

Select the types of activity you want to include in your feed.

feat: add eye detection

+332 -32
+6
cull/CullApp.swift
··· 28 28 .keyboardShortcut("e") 29 29 .disabled(session.groups.isEmpty) 30 30 31 + Button("Reanalyze Photos") { 32 + NotificationCenter.default.post(name: .reimport, object: nil) 33 + } 34 + .disabled(session.sourceFolder == nil || session.isImporting) 35 + 31 36 Divider() 32 37 33 38 Button("Close Folder") { ··· 126 131 extension Notification.Name { 127 132 static let openFolder = Notification.Name("openFolder") 128 133 static let showExport = Notification.Name("showExport") 134 + static let reimport = Notification.Name("reimport") 129 135 }
+1 -6
cull/Models/CullSession.swift
··· 84 84 85 85 func moveToNextGroup() { 86 86 guard !groups.isEmpty else { return } 87 - resetZoom() 88 87 saveCursorPosition() 89 88 let start = selectedGroupIndex 90 89 for offset in 1...groups.count { ··· 99 98 100 99 func moveToPreviousGroup() { 101 100 guard !groups.isEmpty else { return } 102 - resetZoom() 103 101 saveCursorPosition() 104 102 let start = selectedGroupIndex 105 103 for offset in 1...groups.count { ··· 114 112 115 113 func moveToNextPhoto() { 116 114 guard let group = selectedGroup else { return } 117 - resetZoom() 118 115 // Try to find next visible photo in current group 119 116 for i in (selectedPhotoIndex + 1)..<group.photos.count { 120 117 if !isPhotoFiltered(group.photos[i]) { ··· 128 125 129 126 func moveToPreviousPhoto() { 130 127 guard let group = selectedGroup else { return } 131 - resetZoom() 132 128 // Try to find previous visible photo in current group 133 129 for i in stride(from: selectedPhotoIndex - 1, through: 0, by: -1) { 134 130 if !isPhotoFiltered(group.photos[i]) { ··· 151 147 152 148 func selectGroup(at index: Int) { 153 149 guard groups.indices.contains(index) else { return } 154 - resetZoom() 155 150 saveCursorPosition() 156 151 selectedGroupIndex = index 157 152 restoreCursorPosition() ··· 159 154 160 155 func selectPhoto(at index: Int) { 161 156 guard let group = selectedGroup, group.photos.indices.contains(index) else { return } 162 - resetZoom() 163 157 selectedPhotoIndex = index 164 158 } 165 159 ··· 389 383 photo.pairedPixelHeight = saved.pairedPixelHeight 390 384 photo.pairedFileSize = saved.pairedFileSize 391 385 photo.captureDate = saved.captureDate 386 + photo.eyeAspectRatios = saved.eyeAspectRatios 392 387 photosByPath[saved.path] = photo 393 388 } 394 389
+2
cull/Models/Photo.swift
··· 25 25 var faceSharpness: Double? 26 26 /// Normalized face bounding boxes (Vision coordinates: origin bottom-left, 0-1 range) 27 27 var faceRegions: [CGRect] = [] 28 + /// Per-face Eye Aspect Ratio, parallel to faceRegions (0 = closed, ~0.3 = wide open) 29 + var eyeAspectRatios: [Double] = [] 28 30 29 31 // Image metadata (populated during import) 30 32 var pixelWidth: Int = 0
+70 -10
cull/Services/QualityAnalyzer.swift
··· 119 119 /// Sharpness of the best face region (Laplacian variance on face crop) 120 120 let sharpness: Double? 121 121 let regions: [CGRect] 122 + /// Per-face Eye Aspect Ratio, parallel to regions (0 = closed, ~0.3 = wide open) 123 + let eyeAspectRatios: [Double] 122 124 } 123 125 124 126 static func analyzeFaces(imageURL: URL) async -> FaceResult { 125 127 guard let (source, imageIndex) = sourceForAnalysis(imageURL) else { 126 - return FaceResult(sharpness: nil, regions: []) 128 + return FaceResult(sharpness: nil, regions: [], eyeAspectRatios: []) 127 129 } 128 130 let options: [CFString: Any] = [ 129 131 kCGImageSourceCreateThumbnailFromImageIfAbsent: true, ··· 132 134 kCGImageSourceCreateThumbnailWithTransform: true 133 135 ] 134 136 guard let cgImage = CGImageSourceCreateThumbnailAtIndex(source, imageIndex, options as CFDictionary) else { 135 - return FaceResult(sharpness: nil, regions: []) 137 + return FaceResult(sharpness: nil, regions: [], eyeAspectRatios: []) 136 138 } 137 139 138 - let request = VNDetectFaceCaptureQualityRequest() 140 + let qualityRequest = VNDetectFaceCaptureQualityRequest() 141 + let landmarksRequest = VNDetectFaceLandmarksRequest() 139 142 let handler = VNImageRequestHandler(cgImage: cgImage, options: [:]) 140 - try? handler.perform([request]) 143 + try? handler.perform([qualityRequest, landmarksRequest]) 141 144 142 - guard let results = request.results, !results.isEmpty else { 143 - return FaceResult(sharpness: nil, regions: []) 145 + guard let results = qualityRequest.results, !results.isEmpty else { 146 + return FaceResult(sharpness: nil, regions: [], eyeAspectRatios: []) 144 147 } 145 148 146 149 // Filter out small background faces and low-confidence detections ··· 151 154 } 152 155 153 156 guard !meaningful.isEmpty else { 154 - return FaceResult(sharpness: nil, regions: []) 157 + return FaceResult(sharpness: nil, regions: [], eyeAspectRatios: []) 155 158 } 156 159 157 160 // Sort faces by size (largest first) for better cycling order ··· 159 162 .map(\.boundingBox) 160 163 .sorted { $0.width * $0.height > $1.width * $1.height } 161 164 165 + // Build per-face EAR map keyed by bounding box 166 + var earByBox: [CGRect: Double] = [:] 167 + if let landmarkResults = landmarksRequest.results { 168 + for face in landmarkResults { 169 + if let landmarks = face.landmarks { 170 + let leftEAR = eyeAspectRatio(landmarks.leftEye) 171 + let rightEAR = eyeAspectRatio(landmarks.rightEye) 172 + if let l = leftEAR, let r = rightEAR { 173 + earByBox[face.boundingBox] = (l + r) / 2.0 174 + } 175 + } 176 + } 177 + } 178 + 179 + // Map to sorted regions order 180 + let eyeAspectRatios = regions.map { box in 181 + earByBox.first { closeEnough($0.key, box) }?.value ?? 0.3 182 + } 183 + 162 184 // Measure sharpness directly on the largest face crop 163 - // This is what actually matters — is the face in focus? 164 185 let bestFaceRect = regions[0] 165 186 let imageW = CGFloat(cgImage.width) 166 187 let imageH = CGFloat(cgImage.height) 167 - // Vision rect (bottom-left origin) → pixel rect (top-left origin), padded 20% 168 188 let padX = bestFaceRect.width * 0.2 169 189 let padY = bestFaceRect.height * 0.2 170 190 let pixelRect = CGRect( ··· 180 200 faceSharpness = laplacianVariance(faceCrop) 181 201 } 182 202 183 - return FaceResult(sharpness: faceSharpness, regions: regions) 203 + return FaceResult(sharpness: faceSharpness, regions: regions, eyeAspectRatios: eyeAspectRatios) 204 + } 205 + 206 + /// Bounding boxes from different Vision requests may differ slightly 207 + private static func closeEnough(_ a: CGRect, _ b: CGRect) -> Bool { 208 + abs(a.origin.x - b.origin.x) < 0.01 && 209 + abs(a.origin.y - b.origin.y) < 0.01 && 210 + abs(a.width - b.width) < 0.01 && 211 + abs(a.height - b.height) < 0.01 212 + } 213 + 214 + /// Eye Aspect Ratio (EAR) from Vision landmark points. 215 + /// Uses vertical vs horizontal distances to detect closed eyes. 216 + /// Returns nil if landmarks are unavailable. 217 + private static func eyeAspectRatio(_ eye: VNFaceLandmarkRegion2D?) -> Double? { 218 + guard let eye, eye.pointCount >= 6 else { return nil } 219 + let pts = eye.normalizedPoints 220 + // Vision eye landmarks: roughly ordered as outer corner, top points, inner corner, bottom points 221 + // For 6-point eyes: 0=outer, 1=top-outer, 2=top-inner, 3=inner, 4=bottom-inner, 5=bottom-outer 222 + // For 8-point eyes: 0=outer, 1=top-outer, 2=top, 3=top-inner, 4=inner, 5=bottom-inner, 6=bottom, 7=bottom-outer 223 + let count = eye.pointCount 224 + if count == 6 { 225 + let vertical1 = distance(pts[1], pts[5]) 226 + let vertical2 = distance(pts[2], pts[4]) 227 + let horizontal = distance(pts[0], pts[3]) 228 + guard horizontal > 0 else { return nil } 229 + return Double((vertical1 + vertical2) / (2.0 * horizontal)) 230 + } else if count >= 8 { 231 + let vertical1 = distance(pts[1], pts[7]) 232 + let vertical2 = distance(pts[2], pts[6]) 233 + let vertical3 = distance(pts[3], pts[5]) 234 + let horizontal = distance(pts[0], pts[4]) 235 + guard horizontal > 0 else { return nil } 236 + return Double((vertical1 + vertical2 + vertical3) / (3.0 * horizontal)) 237 + } 238 + return nil 239 + } 240 + 241 + private static func distance(_ a: CGPoint, _ b: CGPoint) -> CGFloat { 242 + hypot(a.x - b.x, a.y - b.y) 184 243 } 185 244 186 245 static func analyze(photo: Photo) async { ··· 193 252 photo.blurScore = blurResult 194 253 photo.faceSharpness = faceResult.sharpness 195 254 photo.faceRegions = faceResult.regions 255 + photo.eyeAspectRatios = faceResult.eyeAspectRatios 196 256 } 197 257 } 198 258 }
+26 -4
cull/Services/WorkspaceDB.swift
··· 17 17 exec("PRAGMA synchronous=NORMAL") 18 18 19 19 createTables() 20 + migrate() 20 21 } 21 22 22 23 deinit { ··· 42 43 paired_pixel_height INTEGER DEFAULT 0, 43 44 paired_file_size INTEGER DEFAULT 0, 44 45 capture_date REAL, 45 - group_id TEXT 46 + group_id TEXT, 47 + eye_aspect_ratios TEXT 46 48 ) 47 49 """) 48 50 ··· 61 63 """) 62 64 } 63 65 66 + private func migrate() { 67 + // Add eye_aspect_ratios column if missing (added in v2) 68 + exec("ALTER TABLE photos ADD COLUMN eye_aspect_ratios TEXT") 69 + } 70 + 64 71 // MARK: - Save 65 72 66 73 func savePhotos(_ photos: [Photo], sourceFolder: URL) { ··· 69 76 INSERT OR REPLACE INTO photos 70 77 (path, paired_path, rating, flag, blur_score, face_sharpness, face_regions, 71 78 pixel_width, pixel_height, file_size, 72 - paired_pixel_width, paired_pixel_height, paired_file_size, capture_date, group_id) 73 - VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?) 79 + paired_pixel_width, paired_pixel_height, paired_file_size, capture_date, group_id, 80 + eye_aspect_ratios) 81 + VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?) 74 82 """) 75 83 defer { sqlite3_finalize(stmt) } 76 84 ··· 96 104 bind(stmt, 13, photo.pairedFileSize) 97 105 bind(stmt, 14, photo.captureDate?.timeIntervalSinceReferenceDate) 98 106 bind(stmt, 15, nil as String?) // group_id set separately 107 + bind(stmt, 16, encodeDoubles(photo.eyeAspectRatios)) 99 108 sqlite3_step(stmt) 100 109 } 101 110 exec("COMMIT") ··· 172 181 let pairedFileSize: Int64 173 182 let captureDate: Date? 174 183 let groupID: String? 184 + let eyeAspectRatios: [Double] 175 185 } 176 186 177 187 func loadPhotos() -> [SavedPhoto] { ··· 195 205 let pairedFileSize = sqlite3_column_int64(stmt, 12) 196 206 let captureDateInterval = getOptionalDouble(stmt, 13) 197 207 let groupID = getString(stmt, 14) 208 + let earJSON = getString(stmt, 15) 198 209 199 210 results.append(SavedPhoto( 200 211 path: path, ··· 211 222 pairedPixelHeight: pairedPixelHeight, 212 223 pairedFileSize: pairedFileSize, 213 224 captureDate: captureDateInterval.map { Date(timeIntervalSinceReferenceDate: $0) }, 214 - groupID: groupID 225 + groupID: groupID, 226 + eyeAspectRatios: decodeDoubles(earJSON) 215 227 )) 216 228 } 217 229 return results ··· 323 335 let arrays = regions.map { [Double($0.origin.x), Double($0.origin.y), Double($0.width), Double($0.height)] } 324 336 guard let data = try? JSONSerialization.data(withJSONObject: arrays) else { return nil } 325 337 return String(data: data, encoding: .utf8) 338 + } 339 + 340 + private func encodeDoubles(_ values: [Double]) -> String? { 341 + guard !values.isEmpty else { return nil } 342 + return values.map { String(format: "%.3f", $0) }.joined(separator: ",") 343 + } 344 + 345 + private func decodeDoubles(_ str: String?) -> [Double] { 346 + guard let str, !str.isEmpty else { return [] } 347 + return str.split(separator: ",").compactMap { Double($0) } 326 348 } 327 349 328 350 private func decodeRegions(_ json: String?) -> [CGRect] {
+140 -3
cull/Views/ContentView.swift
··· 37 37 .onReceive(NotificationCenter.default.publisher(for: .showExport)) { _ in 38 38 showExportSheet = true 39 39 } 40 + .onReceive(NotificationCenter.default.publisher(for: .reimport)) { _ in 41 + guard let folder = session.sourceFolder else { return } 42 + startReanalyze(folder) 43 + } 40 44 .onAppear { 41 45 session.undoManager = windowUndoManager 42 46 } ··· 230 234 } 231 235 } 232 236 237 + @MainActor 238 + private func startReanalyze(_ url: URL) { 239 + session.isImporting = true 240 + session.importProgress = 0.02 241 + cache.clearCache() 242 + 243 + // Snapshot existing ratings/flags keyed by relative path 244 + let existingState: [String: (rating: Int, flag: PhotoFlag)] = { 245 + var map: [String: (Int, PhotoFlag)] = [:] 246 + for photo in session.allPhotos { 247 + let rel = photo.url.relativePath(from: url) 248 + map[rel] = (photo.rating, photo.flag) 249 + } 250 + return map 251 + }() 252 + 253 + let s = session 254 + let c = cache 255 + 256 + Task { 257 + do { 258 + // Phase 1: Full re-scan and metadata read 259 + await MainActor.run { s.importStatus = "Scanning photos..." } 260 + let result = try await PhotoImporter.importFolder(url, recursive: s.importRecursive) 261 + 262 + // Restore ratings/flags from previous state 263 + for photo in result.photos { 264 + let rel = photo.url.relativePath(from: url) 265 + if let saved = existingState[rel] { 266 + photo.rating = saved.rating 267 + photo.flag = saved.flag 268 + } 269 + } 270 + 271 + // Phase 2: Re-group (0-20%) 272 + await MainActor.run { s.importStatus = "Grouping similar shots..." } 273 + var lastReported = 0.0 274 + let groups = await ShotGrouper.group(photos: result.photos) { p in 275 + let mapped = p * 0.20 276 + guard mapped - lastReported > 0.01 else { return } 277 + lastReported = mapped 278 + await MainActor.run { 279 + withAnimation(.linear(duration: 0.3)) { 280 + s.importProgress = mapped 281 + } 282 + } 283 + } 284 + 285 + // Phase 3: Analysis + Thumbnails + Previews in parallel (20-100%) 286 + let allPhotos = groups.flatMap(\.photos) 287 + await MainActor.run { s.importStatus = "Analyzing & loading..." } 288 + 289 + let totalPhotos = Double(allPhotos.count) 290 + nonisolated(unsafe) var analysisProgress = 0.0 291 + nonisolated(unsafe) var thumbProgress = 0.0 292 + nonisolated(unsafe) var previewProgress = 0.0 293 + 294 + @Sendable func reportProgress() async { 295 + let combined = 0.20 + (analysisProgress * 0.40 + thumbProgress * 0.35 + previewProgress * 0.25) * 0.80 296 + await MainActor.run { 297 + withAnimation(.linear(duration: 0.2)) { 298 + s.importProgress = combined 299 + } 300 + } 301 + } 302 + 303 + await withTaskGroup(of: Void.self) { parallelGroup in 304 + parallelGroup.addTask { 305 + var completed = 0.0 306 + for batchStart in stride(from: 0, to: allPhotos.count, by: 8) { 307 + let batch = Array(allPhotos[batchStart..<min(batchStart + 8, allPhotos.count)]) 308 + await withTaskGroup(of: Void.self) { group in 309 + for photo in batch { 310 + group.addTask(priority: .background) { 311 + await QualityAnalyzer.analyze(photo: photo) 312 + } 313 + } 314 + } 315 + completed += Double(batch.count) 316 + analysisProgress = completed / totalPhotos 317 + await reportProgress() 318 + } 319 + } 320 + 321 + parallelGroup.addTask { 322 + await c.preloadAllThumbnails(photos: allPhotos) { p in 323 + thumbProgress = p 324 + await reportProgress() 325 + } 326 + } 327 + 328 + parallelGroup.addTask { 329 + let ahead = Array(allPhotos.prefix(30)) 330 + let behind = Array(allPhotos.suffix(30)) 331 + let initialPreviews = ahead + behind.reversed() 332 + await c.preloadAllPreviews(photos: initialPreviews) { p in 333 + previewProgress = p 334 + await reportProgress() 335 + } 336 + } 337 + } 338 + 339 + // Rank photos within each group 340 + for group in groups { 341 + let scored = group.photos.map { (photo: $0, score: Self.qualityScore($0, in: group)) } 342 + group.photos = scored.sorted { $0.score > $1.score }.map(\.photo) 343 + } 344 + 345 + await MainActor.run { 346 + s.importProgress = 1.0 347 + s.groups = groups 348 + s.selectedGroupIndex = 0 349 + s.selectedPhotoIndex = 0 350 + s.isImporting = false 351 + s.saveWorkspace() 352 + } 353 + } catch { 354 + await MainActor.run { 355 + s.isImporting = false 356 + } 357 + } 358 + } 359 + } 360 + 233 361 private var cullingView: some View { 234 362 HStack(spacing: 0) { 235 363 // Left: Groups column ··· 357 485 /// Without faces: global blur score relative to group peers. 358 486 static func qualityScore(_ photo: Photo, in group: PhotoGroup) -> Double { 359 487 let peers = group.photos 488 + var score: Double 360 489 361 490 if let faceSharp = photo.faceSharpness, !photo.faceRegions.isEmpty { 362 491 // Face detected — use face-region sharpness (Laplacian on face crop). 363 492 // Normalize relative to peers who also have faces. 364 493 let peerFaceScores = peers.compactMap(\.faceSharpness) 365 494 if let maxF = peerFaceScores.max(), let minF = peerFaceScores.min(), maxF > minF { 366 - return (faceSharp - minF) / (maxF - minF) 495 + score = (faceSharp - minF) / (maxF - minF) 496 + } else { 497 + score = 0.5 367 498 } 368 - return 0.5 369 499 } else { 370 500 // No faces — use global blur score 371 - return normalizedBlur(photo, peers: peers) 501 + score = normalizedBlur(photo, peers: peers) 502 + } 503 + 504 + // Penalize photos with closed eyes 505 + if photo.eyeAspectRatios.contains(where: { $0 < 0.20 }) { 506 + score *= 0.3 372 507 } 508 + 509 + return score 373 510 } 374 511 375 512 /// Normalize blur score relative to group peers (0-1 range)
+7 -1
cull/Views/GroupDetailView.swift
··· 69 69 .font(.caption) 70 70 } 71 71 Spacer() 72 + // Eyes closed badge 73 + if photo.eyeAspectRatios.contains(where: { $0 < 0.20 }) { 74 + Image(systemName: "eye.slash") 75 + .foregroundStyle(.yellow) 76 + .font(.caption) 77 + } 72 78 // Blur badge — hybrid: trust face quality for bokeh shots 73 - if isPhotoBlurry() { 79 + else if isPhotoBlurry() { 74 80 Image(systemName: "eye.slash.fill") 75 81 .foregroundStyle(.orange) 76 82 .font(.caption)
+80 -8
cull/Views/PhotoViewer.swift
··· 120 120 } 121 121 .font(.caption) 122 122 123 + // Eyes closed badge 124 + if photo.eyeAspectRatios.contains(where: { $0 < 0.20 }) { 125 + let closedCount = photo.eyeAspectRatios.filter { $0 < 0.20 }.count 126 + HStack(spacing: 3) { 127 + Image(systemName: "eye.slash") 128 + Text("\(closedCount)") 129 + } 130 + .foregroundStyle(.yellow) 131 + .font(.caption) 132 + } 133 + 123 134 // Blur badge 124 135 if isPhotoBlurry(photo) { 125 136 Label("Blurry", systemImage: "eye.slash.fill") ··· 286 297 return ZoomInfo(scale: 2.5, offset: .zero) 287 298 } 288 299 289 - guard photo.faceRegions.indices.contains(zoomIndex) else { 290 - return ZoomInfo(scale: 1, offset: .zero) 300 + // Zoomed to a face — adapt to current photo's faces 301 + guard !photo.faceRegions.isEmpty else { 302 + // No faces on this photo — fall back to center zoom 303 + return ZoomInfo(scale: 2.5, offset: .zero) 291 304 } 292 305 293 - let faceRect = photo.faceRegions[zoomIndex] 306 + // Clamp to available faces 307 + let clampedIndex = min(zoomIndex, photo.faceRegions.count - 1) 308 + let faceRect = photo.faceRegions[clampedIndex] 294 309 // Vision coordinates: origin bottom-left, normalized 0-1 295 310 // Calculate scale so the face takes up ~35% of the view width 296 311 let faceW = faceRect.width 297 312 let faceH = faceRect.height 298 - let scale = min(0.35 / max(faceW, faceH), 5.0) 313 + let scale = min(max(0.35 / max(faceW, faceH), 1.5), 5.0) 299 314 300 315 // Face center in normalized image coords (flip Y) 301 316 let faceCenterX = faceRect.midX ··· 316 331 return ZoomInfo(scale: scale, offset: CGSize(width: offsetX, height: offsetY)) 317 332 } 318 333 334 + // MARK: - Eye indicator 335 + 336 + /// Draws two small eye shapes that reflect how open/closed the eyes are. 337 + /// EAR ~0.30 = wide open, ~0.20 = threshold, ~0.05 = shut. 338 + private struct EyeIndicator: View { 339 + let ear: Double 340 + let faceWidth: CGFloat 341 + 342 + var body: some View { 343 + let eyeW = min(faceWidth * 0.22, 20) 344 + // Map EAR to openness: 0.05→flat, 0.30→full open 345 + let openness = CGFloat(max(0, min(1, (ear - 0.05) / 0.25))) 346 + let eyeH = eyeW * 0.8 * openness 347 + let color: Color = ear < 0.20 ? .yellow : .white.opacity(0.7) 348 + 349 + EyeShape(openness: openness) 350 + .fill(color.opacity(0.9)) 351 + .frame(width: eyeW, height: max(eyeH, 1.5)) 352 + .shadow(color: .black.opacity(0.8), radius: 1.5) 353 + } 354 + } 355 + 356 + /// Almond-shaped eye that flattens as openness approaches 0. 357 + private struct EyeShape: Shape { 358 + let openness: CGFloat 359 + 360 + func path(in rect: CGRect) -> Path { 361 + var path = Path() 362 + let midY = rect.midY 363 + let bulge = rect.height / 2 364 + let cpInset = rect.width * 0.2 365 + 366 + // Top lid arc (cubic for rounder shape) 367 + path.move(to: CGPoint(x: rect.minX, y: midY)) 368 + path.addCurve( 369 + to: CGPoint(x: rect.maxX, y: midY), 370 + control1: CGPoint(x: rect.minX + cpInset, y: midY - bulge), 371 + control2: CGPoint(x: rect.maxX - cpInset, y: midY - bulge) 372 + ) 373 + // Bottom lid arc 374 + path.addCurve( 375 + to: CGPoint(x: rect.minX, y: midY), 376 + control1: CGPoint(x: rect.maxX - cpInset, y: midY + bulge), 377 + control2: CGPoint(x: rect.minX + cpInset, y: midY + bulge) 378 + ) 379 + return path 380 + } 381 + } 382 + 319 383 private func fittedSize(image: CGSize, in container: CGSize) -> CGSize { 320 384 let scaleW = container.width / image.width 321 385 let scaleH = container.height / image.height ··· 329 393 private func faceOverlays(photo: Photo, fittedSize: CGSize) -> some View { 330 394 ForEach(0..<photo.faceRegions.count, id: \.self) { i in 331 395 let faceRect = photo.faceRegions[i] 396 + let ear = i < photo.eyeAspectRatios.count ? photo.eyeAspectRatios[i] : 0.3 397 + let isClosed = ear < 0.20 332 398 // Convert Vision rect (bottom-left origin) to SwiftUI overlay coords (top-left origin) 333 399 let x = faceRect.origin.x * fittedSize.width 334 400 let y = (1 - faceRect.origin.y - faceRect.height) * fittedSize.height 335 401 let w = faceRect.width * fittedSize.width 336 402 let h = faceRect.height * fittedSize.height 337 403 338 - RoundedRectangle(cornerRadius: 3) 339 - .strokeBorder(Color.white.opacity(0.5), lineWidth: 1.5) 340 - .frame(width: w, height: h) 341 - .position(x: x + w / 2, y: y + h / 2) 404 + ZStack { 405 + RoundedRectangle(cornerRadius: 3) 406 + .strokeBorder(isClosed ? Color.yellow.opacity(0.8) : Color.white.opacity(0.5), lineWidth: 1.5) 407 + .frame(width: w, height: h) 408 + 409 + // Eye openness indicator just under the chin 410 + EyeIndicator(ear: ear, faceWidth: w) 411 + .offset(y: h * 0.55) 412 + } 413 + .position(x: x + w / 2, y: y + h / 2) 342 414 } 343 415 } 344 416 }