its whats on the tin; culls raw photos
0
fork

Configure Feed

Select the types of activity you want to include in your feed.

feat: add scoring

+667 -119
+9
cull/CullApp.swift
··· 70 70 } 71 71 } 72 72 73 + // View menu 74 + CommandMenu("View") { 75 + Button("Zoom to Face / Center") { 76 + session.cycleZoom() 77 + } 78 + .keyboardShortcut(.space, modifiers: []) 79 + .disabled(session.selectedPhoto == nil) 80 + } 81 + 73 82 // Navigate menu 74 83 CommandMenu("Navigate") { 75 84 Button("Next Photo") {
+40
cull/Models/CullSession.swift
··· 8 8 var selectedGroupIndex: Int = 0 9 9 var selectedPhotoIndex: Int = 0 10 10 11 + /// Zoom state: nil = fit, -1 = center zoom, 0+ = face index 12 + var zoomFaceIndex: Int? = nil 13 + 11 14 var isImporting: Bool = false 12 15 var importProgress: Double = 0 13 16 var importStatus: String = "" ··· 67 70 68 71 func moveToNextGroup() { 69 72 guard !groups.isEmpty else { return } 73 + resetZoom() 70 74 saveCursorPosition() 71 75 let start = selectedGroupIndex 72 76 for offset in 1...groups.count { ··· 81 85 82 86 func moveToPreviousGroup() { 83 87 guard !groups.isEmpty else { return } 88 + resetZoom() 84 89 saveCursorPosition() 85 90 let start = selectedGroupIndex 86 91 for offset in 1...groups.count { ··· 95 100 96 101 func moveToNextPhoto() { 97 102 guard let group = selectedGroup else { return } 103 + resetZoom() 98 104 // Try to find next visible photo in current group 99 105 for i in (selectedPhotoIndex + 1)..<group.photos.count { 100 106 if !isPhotoFiltered(group.photos[i]) { ··· 108 114 109 115 func moveToPreviousPhoto() { 110 116 guard let group = selectedGroup else { return } 117 + resetZoom() 111 118 // Try to find previous visible photo in current group 112 119 for i in stride(from: selectedPhotoIndex - 1, through: 0, by: -1) { 113 120 if !isPhotoFiltered(group.photos[i]) { ··· 130 137 131 138 func selectGroup(at index: Int) { 132 139 guard groups.indices.contains(index) else { return } 140 + resetZoom() 133 141 saveCursorPosition() 134 142 selectedGroupIndex = index 135 143 restoreCursorPosition() ··· 137 145 138 146 func selectPhoto(at index: Int) { 139 147 guard let group = selectedGroup, group.photos.indices.contains(index) else { return } 148 + resetZoom() 140 149 selectedPhotoIndex = index 141 150 } 142 151 ··· 273 282 func clearRatingAndFlag() { 274 283 guard let photo = selectedPhoto else { return } 275 284 applyPhotoState(photo, rating: 0, flag: .none, actionName: "Clear Rating & Flag") 285 + } 286 + 287 + // MARK: - Zoom 288 + 289 + func cycleZoom() { 290 + guard let photo = selectedPhoto else { return } 291 + let faces = photo.faceRegions 292 + 293 + switch zoomFaceIndex { 294 + case nil: 295 + // Currently fit → zoom to first face or center 296 + if faces.isEmpty { 297 + zoomFaceIndex = -1 // center zoom 298 + } else { 299 + zoomFaceIndex = 0 // first face 300 + } 301 + case -1: 302 + // Center zoom → back to fit 303 + zoomFaceIndex = nil 304 + case let idx?: 305 + // On a face → next face, or back to fit 306 + if idx + 1 < faces.count { 307 + zoomFaceIndex = idx + 1 308 + } else { 309 + zoomFaceIndex = nil 310 + } 311 + } 312 + } 313 + 314 + private func resetZoom() { 315 + zoomFaceIndex = nil 276 316 } 277 317 }
+2
cull/Models/Photo.swift
··· 22 22 // Populated asynchronously by QualityAnalyzer 23 23 var blurScore: Double? 24 24 var faceQualityScore: Double? 25 + /// Normalized face bounding boxes (Vision coordinates: origin bottom-left, 0-1 range) 26 + var faceRegions: [CGRect] = [] 25 27 26 28 // Populated by ShotGrouper 27 29 var captureDate: Date?
+104 -74
cull/Services/QualityAnalyzer.swift
··· 1 + import Accelerate 1 2 import CoreImage 2 - import Metal 3 - import MetalPerformanceShaders 3 + import ImageIO 4 4 import Vision 5 5 6 6 struct QualityAnalyzer { 7 + 8 + /// Laplacian variance sharpness detection using Accelerate (vDSP). 9 + /// Uses Apple's recommended 8-connected Laplacian kernel for better edge sensitivity. 7 10 static func analyzeBlur(imageURL: URL) async -> Double? { 8 - guard let device = MTLCreateSystemDefaultDevice() else { return nil } 11 + guard let source = CGImageSourceCreateWithURL(imageURL as CFURL, nil) else { return nil } 12 + let options: [CFString: Any] = [ 13 + kCGImageSourceCreateThumbnailFromImageIfAbsent: true, 14 + kCGImageSourceThumbnailMaxPixelSize: 512, 15 + kCGImageSourceShouldCache: false, 16 + kCGImageSourceCreateThumbnailWithTransform: true 17 + ] 18 + guard let cgImage = CGImageSourceCreateThumbnailAtIndex(source, 0, options as CFDictionary) else { return nil } 19 + 20 + // Read ISO for noise compensation 21 + let iso = readISO(from: source) 22 + 23 + guard let variance = laplacianVariance(cgImage) else { return nil } 9 24 10 - let ciImage: CIImage? 11 - if let source = CGImageSourceCreateWithURL(imageURL as CFURL, nil) { 12 - let options: [CFString: Any] = [ 13 - kCGImageSourceCreateThumbnailFromImageIfAbsent: true, 14 - kCGImageSourceThumbnailMaxPixelSize: 512, 15 - kCGImageSourceShouldCache: false, 16 - kCGImageSourceCreateThumbnailWithTransform: true 17 - ] 18 - if let cgImage = CGImageSourceCreateThumbnailAtIndex(source, 0, options as CFDictionary) { 19 - ciImage = CIImage(cgImage: cgImage) 20 - } else { 21 - ciImage = nil 22 - } 23 - } else { 24 - ciImage = nil 25 + // Compensate for high-ISO noise inflating the score 26 + if let iso, iso > 100 { 27 + let isoStops = log2(Double(iso) / 100.0) 28 + let noisePenalty = pow(1.3, isoStops) 29 + return variance / noisePenalty 25 30 } 31 + return variance 32 + } 26 33 27 - guard let ci = ciImage, 28 - let cgImage = CIContext().createCGImage(ci, from: ci.extent) 29 - else { return nil } 30 - 34 + /// 8-connected Laplacian variance via vDSP (Apple's recommended approach). 35 + private static func laplacianVariance(_ cgImage: CGImage) -> Double? { 31 36 let width = cgImage.width 32 37 let height = cgImage.height 33 - 34 - let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor( 35 - pixelFormat: .r32Float, width: width, height: height, mipmapped: false 36 - ) 37 - textureDescriptor.usage = [.shaderRead, .shaderWrite] 38 - 39 - guard let sourceTexture = device.makeTexture(descriptor: textureDescriptor), 40 - let laplacianTexture = device.makeTexture(descriptor: textureDescriptor) 41 - else { return nil } 38 + guard width > 4, height > 4 else { return nil } 42 39 43 - // Convert to grayscale float texture 40 + // Render to 8-bit grayscale 44 41 let colorSpace = CGColorSpaceCreateDeviceGray() 45 42 guard let context = CGContext( 46 43 data: nil, width: width, height: height, 47 - bitsPerComponent: 32, bytesPerRow: width * 4, 48 - space: colorSpace, bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.none.rawValue | CGBitmapInfo.floatComponents.rawValue | CGBitmapInfo.byteOrder32Little.rawValue).rawValue 44 + bitsPerComponent: 8, bytesPerRow: width, 45 + space: colorSpace, 46 + bitmapInfo: CGImageAlphaInfo.none.rawValue 49 47 ) else { return nil } 50 48 context.draw(cgImage, in: CGRect(x: 0, y: 0, width: width, height: height)) 51 - 52 - guard let data = context.data else { return nil } 53 - sourceTexture.replace( 54 - region: MTLRegionMake2D(0, 0, width, height), 55 - mipmapLevel: 0, 56 - withBytes: data, 57 - bytesPerRow: width * 4 58 - ) 49 + guard let pixelData = context.data else { return nil } 59 50 60 - // Laplacian + variance 61 - guard let commandQueue = device.makeCommandQueue(), 62 - let commandBuffer = commandQueue.makeCommandBuffer() 63 - else { return nil } 51 + // Convert UInt8 → Float 52 + let pixelCount = width * height 53 + let uint8Ptr = pixelData.assumingMemoryBound(to: UInt8.self) 54 + var floatPixels = [Float](repeating: 0, count: pixelCount) 55 + vDSP.convertElements(of: UnsafeBufferPointer(start: uint8Ptr, count: pixelCount), to: &floatPixels) 64 56 65 - let laplacian = MPSImageLaplacian(device: device) 66 - laplacian.encode(commandBuffer: commandBuffer, sourceTexture: sourceTexture, destinationTexture: laplacianTexture) 57 + // 8-connected Laplacian: [-1,-1,-1; -1,8,-1; -1,-1,-1] 58 + // More sensitive than 4-connected, detects diagonal edges too 59 + let kernel: [Float] = [ 60 + -1, -1, -1, 61 + -1, 8, -1, 62 + -1, -1, -1 63 + ] 67 64 68 - let varianceDesc = MTLTextureDescriptor.texture2DDescriptor( 69 - pixelFormat: .r32Float, width: 2, height: 1, mipmapped: false 70 - ) 71 - varianceDesc.usage = [.shaderRead, .shaderWrite] 72 - guard let varianceTexture = device.makeTexture(descriptor: varianceDesc) else { return nil } 65 + // vDSP convolution 66 + var result = [Float](repeating: 0, count: pixelCount) 67 + vDSP.convolve(floatPixels, 68 + rowCount: height, 69 + columnCount: width, 70 + with3x3Kernel: kernel, 71 + result: &result) 73 72 74 - let stats = MPSImageStatisticsMeanAndVariance(device: device) 75 - stats.encode(commandBuffer: commandBuffer, sourceTexture: laplacianTexture, destinationTexture: varianceTexture) 73 + // Variance via vDSP_normalize (stddev² = variance) 74 + var mean: Float = 0 75 + var stddev: Float = 0 76 + vDSP_normalize(result, 1, nil, 1, &mean, &stddev, vDSP_Length(pixelCount)) 76 77 77 - commandBuffer.commit() 78 - await commandBuffer.completed() 78 + return Double(stddev * stddev) 79 + } 79 80 80 - var result = [Float](repeating: 0, count: 2) 81 - varianceTexture.getBytes( 82 - &result, 83 - bytesPerRow: 8, 84 - from: MTLRegionMake2D(0, 0, 2, 1), 85 - mipmapLevel: 0 86 - ) 81 + /// Read ISO speed from EXIF for noise compensation 82 + private static func readISO(from source: CGImageSource) -> Int? { 83 + guard let properties = CGImageSourceCopyPropertiesAtIndex(source, 0, nil) as? [String: Any], 84 + let exif = properties[kCGImagePropertyExifDictionary as String] as? [String: Any], 85 + let isoArray = exif[kCGImagePropertyExifISOSpeedRatings as String] as? [Int], 86 + let iso = isoArray.first 87 + else { return nil } 88 + return iso 89 + } 87 90 88 - return Double(result[1]) // variance = sharpness score 91 + struct FaceResult { 92 + let quality: Double? 93 + let regions: [CGRect] 89 94 } 90 95 91 - static func analyzeFaceQuality(imageURL: URL) async -> Double? { 92 - guard let source = CGImageSourceCreateWithURL(imageURL as CFURL, nil) else { return nil } 96 + static func analyzeFaces(imageURL: URL) async -> FaceResult { 97 + guard let source = CGImageSourceCreateWithURL(imageURL as CFURL, nil) else { 98 + return FaceResult(quality: nil, regions: []) 99 + } 93 100 let options: [CFString: Any] = [ 94 101 kCGImageSourceCreateThumbnailFromImageIfAbsent: true, 95 102 kCGImageSourceThumbnailMaxPixelSize: 1024, 96 103 kCGImageSourceShouldCache: false, 97 104 kCGImageSourceCreateThumbnailWithTransform: true 98 105 ] 99 - guard let cgImage = CGImageSourceCreateThumbnailAtIndex(source, 0, options as CFDictionary) else { return nil } 106 + guard let cgImage = CGImageSourceCreateThumbnailAtIndex(source, 0, options as CFDictionary) else { 107 + return FaceResult(quality: nil, regions: []) 108 + } 100 109 101 110 let request = VNDetectFaceCaptureQualityRequest() 102 111 let handler = VNImageRequestHandler(cgImage: cgImage, options: [:]) 103 112 try? handler.perform([request]) 104 113 105 - guard let results = request.results, !results.isEmpty else { return nil } 106 - return results.map { Double($0.faceCaptureQuality ?? 0) }.max() 114 + guard let results = request.results, !results.isEmpty else { 115 + return FaceResult(quality: nil, regions: []) 116 + } 117 + 118 + // Filter out small background faces and low-confidence detections 119 + let meaningful = results.filter { face in 120 + let area = face.boundingBox.width * face.boundingBox.height 121 + // Must be at least 1.5% of image area and have decent confidence 122 + guard area >= 0.015, face.confidence >= 0.5 else { return false } 123 + // Skip very low quality faces (blurry background people) 124 + if let q = face.faceCaptureQuality, q < 0.15 { return false } 125 + return true 126 + } 127 + 128 + let quality = meaningful.map { Double($0.faceCaptureQuality ?? 0) }.max() 129 + // Sort faces by size (largest first) for better cycling order 130 + let regions = meaningful 131 + .map(\.boundingBox) 132 + .sorted { $0.width * $0.height > $1.width * $1.height } 133 + 134 + return FaceResult(quality: quality, regions: regions) 107 135 } 108 136 109 137 static func analyze(photo: Photo) async { 110 - async let blur = analyzeBlur(imageURL: photo.pairedURL ?? photo.url) 111 - async let face = analyzeFaceQuality(imageURL: photo.pairedURL ?? photo.url) 138 + let url = photo.pairedURL ?? photo.url 139 + async let blur = analyzeBlur(imageURL: url) 140 + async let faces = analyzeFaces(imageURL: url) 112 141 113 - let (blurResult, faceResult) = await (blur, face) 142 + let (blurResult, faceResult) = await (blur, faces) 114 143 await MainActor.run { 115 144 photo.blurScore = blurResult 116 - photo.faceQualityScore = faceResult 145 + photo.faceQualityScore = faceResult.quality 146 + photo.faceRegions = faceResult.regions 117 147 } 118 148 } 119 149 }
+75 -32
cull/Views/ContentView.swift
··· 57 57 await MainActor.run { s.importStatus = "Scanning photos..." } 58 58 let result = try await PhotoImporter.importFolder(url) 59 59 60 + // Phase 1: Feature print grouping (0-20%) 60 61 await MainActor.run { s.importStatus = "Grouping similar shots..." } 61 - // Phase 1: Feature print grouping (0-30%) 62 62 var lastReported = 0.0 63 63 let groups = await ShotGrouper.group(photos: result.photos) { p in 64 - let mapped = p * 0.30 64 + let mapped = p * 0.20 65 65 guard mapped - lastReported > 0.01 else { return } 66 66 lastReported = mapped 67 67 await MainActor.run { ··· 71 71 } 72 72 } 73 73 74 - await MainActor.run { s.importStatus = "Generating thumbnails..." } 75 - // Phase 2: Thumbnails (30-60%) 74 + // Phase 2: Quality analysis — blur + faces (20-50%) 75 + await MainActor.run { s.importStatus = "Analyzing sharpness & faces..." } 76 76 let allPhotos = groups.flatMap(\.photos) 77 + let totalPhotos = Double(allPhotos.count) 78 + var analysisCompleted = 0.0 79 + for batchStart in stride(from: 0, to: allPhotos.count, by: 4) { 80 + let batch = Array(allPhotos[batchStart..<min(batchStart + 4, allPhotos.count)]) 81 + await withTaskGroup(of: Void.self) { group in 82 + for photo in batch { 83 + group.addTask { 84 + let url = photo.pairedURL ?? photo.url 85 + let blur = await QualityAnalyzer.analyzeBlur(imageURL: url) 86 + let faces = await QualityAnalyzer.analyzeFaces(imageURL: url) 87 + await MainActor.run { 88 + photo.blurScore = blur 89 + photo.faceQualityScore = faces.quality 90 + photo.faceRegions = faces.regions 91 + 92 + } 93 + } 94 + } 95 + } 96 + analysisCompleted += Double(batch.count) 97 + let mapped = 0.20 + (analysisCompleted / totalPhotos) * 0.30 98 + await MainActor.run { 99 + withAnimation(.linear(duration: 0.2)) { 100 + s.importProgress = mapped 101 + } 102 + } 103 + } 104 + 105 + // Rank photos within each group — best first 106 + for group in groups { 107 + let scored = group.photos.map { (photo: $0, score: Self.qualityScore($0, in: group)) } 108 + group.photos = scored.sorted { $0.score > $1.score }.map(\.photo) 109 + } 110 + 111 + // Phase 3: Thumbnails (50-75%) 112 + await MainActor.run { s.importStatus = "Generating thumbnails..." } 77 113 await c.preloadAllThumbnails(photos: allPhotos) { p in 78 - let mapped = 0.30 + p * 0.30 114 + let mapped = 0.50 + p * 0.25 79 115 await MainActor.run { 80 116 withAnimation(.linear(duration: 0.2)) { 81 117 s.importProgress = mapped ··· 83 119 } 84 120 } 85 121 122 + // Phase 4: Initial full-res previews (75-100%) 86 123 await MainActor.run { s.importStatus = "Loading previews..." } 87 - // Phase 3: Initial full-res previews (60-100%) 88 124 let ahead = Array(allPhotos.prefix(30)) 89 125 let behind = Array(allPhotos.suffix(30)) 90 126 let initialPreviews = ahead + behind.reversed() 91 127 await c.preloadAllPreviews(photos: initialPreviews) { p in 92 - let mapped = 0.60 + p * 0.40 128 + let mapped = 0.75 + p * 0.25 93 129 await MainActor.run { 94 130 withAnimation(.linear(duration: 0.2)) { 95 131 s.importProgress = mapped ··· 104 140 s.selectedPhotoIndex = 0 105 141 s.isImporting = false 106 142 } 107 - 108 - let analysisWork: [(UUID, URL)] = allPhotos.map { ($0.id, $0.pairedURL ?? $0.url) } 109 - let photosByID: [UUID: Photo] = Dictionary(uniqueKeysWithValues: allPhotos.map { ($0.id, $0) }) 110 - Task.detached(priority: .background) { 111 - for batchStart in stride(from: 0, to: analysisWork.count, by: 4) { 112 - let batch = Array(analysisWork[batchStart..<min(batchStart + 4, analysisWork.count)]) 113 - await withTaskGroup(of: (UUID, Double?, Double?).self) { group in 114 - for (id, url) in batch { 115 - group.addTask { 116 - let blur = await QualityAnalyzer.analyzeBlur(imageURL: url) 117 - let face = await QualityAnalyzer.analyzeFaceQuality(imageURL: url) 118 - return (id, blur, face) 119 - } 120 - } 121 - for await (id, blur, face) in group { 122 - await MainActor.run { 123 - if let photo = photosByID[id] { 124 - photo.blurScore = blur 125 - photo.faceQualityScore = face 126 - } 127 - } 128 - } 129 - } 130 - } 131 - } 132 143 } catch { 133 144 await MainActor.run { 134 145 s.sourceFolder = nil ··· 173 184 } 174 185 return .ignored 175 186 } 187 + .onKeyPress(.space) { session.cycleZoom(); return .handled } 176 188 .onKeyPress(keys: ["e"]) { _ in showExportSheet = true; return .handled } 177 189 .onAppear { isViewerFocused = true } 178 190 .onChange(of: session.selectedGroupIndex) { isViewerFocused = true } ··· 245 257 } 246 258 } 247 259 } 260 + } 261 + 262 + extension ContentView { 263 + /// Quality score for ranking within a group. Higher = better. 264 + static func qualityScore(_ photo: Photo, in group: PhotoGroup) -> Double { 265 + var score = 0.0 266 + let peers = group.photos 267 + 268 + if let blur = photo.blurScore { 269 + let peerBlurs = peers.compactMap(\.blurScore) 270 + if let maxB = peerBlurs.max(), let minB = peerBlurs.min(), maxB > minB { 271 + score += ((blur - minB) / (maxB - minB)) * 0.5 272 + } else { 273 + score += 0.25 274 + } 275 + } 276 + 277 + if let fq = photo.faceQualityScore { 278 + score += fq * 0.5 279 + } else if let blur = photo.blurScore { 280 + let peerBlurs = peers.compactMap(\.blurScore) 281 + if let maxB = peerBlurs.max(), let minB = peerBlurs.min(), maxB > minB { 282 + score += ((blur - minB) / (maxB - minB)) * 0.5 283 + } else { 284 + score += 0.25 285 + } 286 + } 287 + 288 + return score 289 + } 290 + 248 291 } 249 292 250 293 struct ToolbarFilterButton: View {
+86 -7
cull/Views/GroupDetailView.swift
··· 13 13 if !session.isPhotoFiltered(photo) { 14 14 PhotoThumbnail( 15 15 photo: photo, 16 + group: group, 16 17 isSelected: index == session.selectedPhotoIndex 17 18 ) 18 19 .id(photo.id) ··· 38 39 39 40 private struct PhotoThumbnail: View { 40 41 let photo: Photo 42 + let group: PhotoGroup 41 43 let isSelected: Bool 42 44 @Environment(ThumbnailCache.self) private var cache 43 45 @State private var thumbnail: NSImage? 44 46 45 47 var body: some View { 46 48 VStack(spacing: 2) { 47 - ZStack(alignment: .topLeading) { 49 + ZStack { 48 50 if let thumbnail { 49 51 Image(nsImage: thumbnail) 50 52 .resizable() ··· 57 59 .frame(width: 148, height: 100) 58 60 } 59 61 60 - // Flag badge 61 - if photo.flag != .none { 62 - Image(systemName: photo.flag == .pick ? "checkmark.circle.fill" : "xmark.circle.fill") 63 - .foregroundStyle(photo.flag == .pick ? .green : .red) 64 - .font(.caption) 65 - .padding(4) 62 + // Badges 63 + VStack { 64 + HStack { 65 + // Flag badge 66 + if photo.flag != .none { 67 + Image(systemName: photo.flag == .pick ? "checkmark.circle.fill" : "xmark.circle.fill") 68 + .foregroundStyle(photo.flag == .pick ? .green : .red) 69 + .font(.caption) 70 + } 71 + Spacer() 72 + // Blur badge — hybrid: trust face quality for bokeh shots 73 + if isPhotoBlurry() { 74 + Image(systemName: "eye.slash.fill") 75 + .foregroundStyle(.orange) 76 + .font(.caption) 77 + } 78 + } 79 + Spacer() 80 + HStack { 81 + // Best-in-group badge 82 + if isBestInGroup() { 83 + Image(systemName: "star.circle.fill") 84 + .foregroundStyle(.green) 85 + .font(.caption) 86 + } 87 + Spacer() 88 + // Face count badge 89 + if !photo.faceRegions.isEmpty { 90 + HStack(spacing: 2) { 91 + Image(systemName: "face.smiling") 92 + Text("\(photo.faceRegions.count)") 93 + } 94 + .font(.caption2) 95 + .foregroundStyle(.white) 96 + .padding(.horizontal, 4) 97 + .padding(.vertical, 1) 98 + .background(.black.opacity(0.5), in: Capsule()) 99 + } 100 + } 66 101 } 102 + .padding(4) 67 103 } 68 104 69 105 // Rating stars ··· 92 128 guard thumbnail == nil else { return } 93 129 thumbnail = await cache.thumbnail(for: photo) 94 130 } 131 + } 132 + 133 + private func isBestInGroup() -> Bool { 134 + let scored = group.photos.filter { $0.blurScore != nil || $0.faceQualityScore != nil } 135 + guard scored.count >= 2 else { return false } 136 + let best = scored.max { qualityScore($0) < qualityScore($1) } 137 + return best?.id == photo.id 138 + } 139 + 140 + private func qualityScore(_ p: Photo) -> Double { 141 + var score = 0.0 142 + let peers = group.photos 143 + if let blur = p.blurScore { 144 + let peerBlurs = peers.compactMap(\.blurScore) 145 + if let maxB = peerBlurs.max(), let minB = peerBlurs.min(), maxB > minB { 146 + score += ((blur - minB) / (maxB - minB)) * 0.5 147 + } else { 148 + score += 0.25 149 + } 150 + } 151 + if let fq = p.faceQualityScore { 152 + score += fq * 0.5 153 + } else if let blur = p.blurScore { 154 + let peerBlurs = peers.compactMap(\.blurScore) 155 + if let maxB = peerBlurs.max(), let minB = peerBlurs.min(), maxB > minB { 156 + score += ((blur - minB) / (maxB - minB)) * 0.5 157 + } else { 158 + score += 0.25 159 + } 160 + } 161 + return score 162 + } 163 + 164 + private func isPhotoBlurry() -> Bool { 165 + if !photo.faceRegions.isEmpty { 166 + guard let fq = photo.faceQualityScore else { return false } 167 + return fq < 0.35 168 + } 169 + guard let blur = photo.blurScore else { return false } 170 + let peerScores = group.photos.compactMap(\.blurScore) 171 + guard peerScores.count >= 2 else { return false } 172 + let median = peerScores.sorted()[peerScores.count / 2] 173 + return blur < median * 0.4 95 174 } 96 175 }
+212 -4
cull/Views/PhotoViewer.swift
··· 9 9 private let lookaheadCount = 30 10 10 private let lookbehindCount = 30 11 11 12 + /// Face quality threshold — below this, faces are considered blurry 13 + private let faceBlurThreshold: Double = 0.35 14 + 12 15 var body: some View { 13 16 ZStack { 14 17 Color.black 15 18 16 19 if let displayImage { 17 - Image(nsImage: displayImage) 18 - .resizable() 19 - .aspectRatio(contentMode: .fit) 20 - .frame(maxWidth: .infinity, maxHeight: .infinity) 20 + GeometryReader { geo in 21 + let imageSize = displayImage.size 22 + let fitted = fittedSize(image: imageSize, in: geo.size) 23 + let zoomInfo = currentZoomInfo(fittedSize: fitted, containerSize: geo.size) 24 + 25 + ZStack { 26 + Image(nsImage: displayImage) 27 + .resizable() 28 + .aspectRatio(contentMode: .fit) 29 + .overlay { 30 + // Face region overlays (only when not zoomed) 31 + if session.zoomFaceIndex == nil, let photo = session.selectedPhoto, !photo.faceRegions.isEmpty { 32 + faceOverlays(photo: photo, fittedSize: fitted) 33 + } 34 + } 35 + } 36 + .frame(width: fitted.width, height: fitted.height) 37 + .scaleEffect(zoomInfo.scale) 38 + .offset(zoomInfo.offset) 39 + .frame(width: geo.size.width, height: geo.size.height) 40 + .clipped() 41 + .animation(.easeInOut(duration: 0.3), value: session.zoomFaceIndex) 42 + } 21 43 } 22 44 45 + // Bottom bar overlay 23 46 if let photo = session.selectedPhoto { 24 47 VStack { 25 48 Spacer() ··· 34 57 35 58 Spacer() 36 59 60 + // Quality scores 61 + HStack(spacing: 10) { 62 + if let blur = photo.blurScore { 63 + HStack(spacing: 3) { 64 + Image(systemName: "scope") 65 + Text(String(format: "%.0f", blur)) 66 + } 67 + .foregroundStyle(isPhotoBlurry(photo) ? .orange : .white.opacity(0.6)) 68 + } 69 + 70 + if let fq = photo.faceQualityScore { 71 + HStack(spacing: 3) { 72 + Image(systemName: "face.smiling") 73 + Text(String(format: "%.0f%%", fq * 100)) 74 + } 75 + .foregroundStyle(.white.opacity(0.7)) 76 + } 77 + 78 + if !photo.faceRegions.isEmpty { 79 + HStack(spacing: 3) { 80 + Image(systemName: "person.crop.rectangle") 81 + Text("\(photo.faceRegions.count)") 82 + } 83 + .foregroundStyle(.white.opacity(0.6)) 84 + } 85 + 86 + // Group rank 87 + if let group = session.selectedGroup { 88 + let rank = groupRank(photo: photo, in: group) 89 + if let rank { 90 + HStack(spacing: 3) { 91 + Image(systemName: "number") 92 + Text("\(rank)/\(group.photos.count)") 93 + } 94 + .foregroundStyle(rank == 1 ? .green : .white.opacity(0.6)) 95 + } 96 + } 97 + } 98 + .font(.caption) 99 + 100 + // Blur badge 101 + if isPhotoBlurry(photo) { 102 + Label("Blurry", systemImage: "eye.slash.fill") 103 + .foregroundStyle(.orange) 104 + .font(.caption) 105 + } 106 + 37 107 HStack(spacing: 2) { 38 108 ForEach(1...5, id: \.self) { star in 39 109 Image(systemName: star <= photo.rating ? "star.fill" : "star") ··· 116 186 let behind = session.photosBehind(lookbehindCount) 117 187 cache.preloadPreviews(photos: behind + [photo] + ahead) 118 188 } 189 + } 190 + } 191 + 192 + // MARK: - Group ranking 193 + 194 + /// Ranks photo within its group by quality. Returns 1-based rank, or nil if no scores yet. 195 + private func groupRank(photo: Photo, in group: PhotoGroup) -> Int? { 196 + let scored = group.photos.filter { $0.blurScore != nil || $0.faceQualityScore != nil } 197 + guard scored.count >= 2 else { return nil } 198 + 199 + let ranked = scored.sorted { qualityScore($0, in: group) > qualityScore($1, in: group) } 200 + guard let idx = ranked.firstIndex(where: { $0.id == photo.id }) else { return nil } 201 + return idx + 1 202 + } 203 + 204 + /// Composite quality score for ranking within a group. 205 + /// Higher = better. Uses relative ranking within the group's score range. 206 + private func qualityScore(_ photo: Photo, in group: PhotoGroup) -> Double { 207 + var score = 0.0 208 + let peers = group.photos 209 + 210 + if let blur = photo.blurScore { 211 + let peerBlurs = peers.compactMap(\.blurScore) 212 + if let maxBlur = peerBlurs.max(), let minBlur = peerBlurs.min(), maxBlur > minBlur { 213 + score += ((blur - minBlur) / (maxBlur - minBlur)) * 0.5 214 + } else { 215 + score += 0.25 216 + } 217 + } 218 + 219 + if let fq = photo.faceQualityScore { 220 + score += fq * 0.5 221 + } else if photo.blurScore != nil { 222 + // No faces — blur gets full weight 223 + let peerBlurs = peers.compactMap(\.blurScore) 224 + if let maxBlur = peerBlurs.max(), let minBlur = peerBlurs.min(), maxBlur > minBlur { 225 + score += ((photo.blurScore! - minBlur) / (maxBlur - minBlur)) * 0.5 226 + } else { 227 + score += 0.25 228 + } 229 + } 230 + 231 + return score 232 + } 233 + 234 + // MARK: - Blur detection (relative within group) 235 + 236 + /// Uses relative ranking: a photo is blurry only if it's significantly softer 237 + /// than its group peers. For faces, uses face quality score directly. 238 + private func isPhotoBlurry(_ photo: Photo) -> Bool { 239 + if !photo.faceRegions.isEmpty { 240 + guard let fq = photo.faceQualityScore else { return false } 241 + return fq < faceBlurThreshold 242 + } 243 + 244 + guard let blur = photo.blurScore, 245 + let group = session.selectedGroup else { return false } 246 + 247 + // Gather blur scores from group peers that have been analyzed 248 + let peerScores = group.photos.compactMap(\.blurScore) 249 + guard peerScores.count >= 2 else { return false } 250 + 251 + let median = peerScores.sorted()[peerScores.count / 2] 252 + // Only flag if this photo is less than 40% of the group median 253 + return blur < median * 0.4 254 + } 255 + 256 + // MARK: - Zoom calculations 257 + 258 + private struct ZoomInfo { 259 + let scale: CGFloat 260 + let offset: CGSize 261 + } 262 + 263 + private func currentZoomInfo(fittedSize: CGSize, containerSize: CGSize) -> ZoomInfo { 264 + guard let zoomIndex = session.zoomFaceIndex, 265 + let photo = session.selectedPhoto else { 266 + return ZoomInfo(scale: 1, offset: .zero) 267 + } 268 + 269 + if zoomIndex == -1 { 270 + // Center zoom — 2.5x 271 + return ZoomInfo(scale: 2.5, offset: .zero) 272 + } 273 + 274 + guard photo.faceRegions.indices.contains(zoomIndex) else { 275 + return ZoomInfo(scale: 1, offset: .zero) 276 + } 277 + 278 + let faceRect = photo.faceRegions[zoomIndex] 279 + // Vision coordinates: origin bottom-left, normalized 0-1 280 + // Calculate scale so the face takes up ~35% of the view width 281 + let faceW = faceRect.width 282 + let faceH = faceRect.height 283 + let scale = min(0.35 / max(faceW, faceH), 5.0) 284 + 285 + // Face center in normalized image coords (flip Y) 286 + let faceCenterX = faceRect.midX 287 + let faceCenterY = 1 - faceRect.midY 288 + 289 + // Face center in fitted image pixel coords 290 + let facePixelX = faceCenterX * fittedSize.width 291 + let facePixelY = faceCenterY * fittedSize.height 292 + 293 + // Image center in fitted coords 294 + let imageCenterX = fittedSize.width / 2 295 + let imageCenterY = fittedSize.height / 2 296 + 297 + // Offset to move face center to view center, then multiply by scale 298 + let offsetX = (imageCenterX - facePixelX) * scale 299 + let offsetY = (imageCenterY - facePixelY) * scale 300 + 301 + return ZoomInfo(scale: scale, offset: CGSize(width: offsetX, height: offsetY)) 302 + } 303 + 304 + private func fittedSize(image: CGSize, in container: CGSize) -> CGSize { 305 + let scaleW = container.width / image.width 306 + let scaleH = container.height / image.height 307 + let s = min(scaleW, scaleH) 308 + return CGSize(width: image.width * s, height: image.height * s) 309 + } 310 + 311 + // MARK: - Face overlays 312 + 313 + @ViewBuilder 314 + private func faceOverlays(photo: Photo, fittedSize: CGSize) -> some View { 315 + ForEach(0..<photo.faceRegions.count, id: \.self) { i in 316 + let faceRect = photo.faceRegions[i] 317 + // Convert Vision rect (bottom-left origin) to SwiftUI overlay coords (top-left origin) 318 + let x = faceRect.origin.x * fittedSize.width 319 + let y = (1 - faceRect.origin.y - faceRect.height) * fittedSize.height 320 + let w = faceRect.width * fittedSize.width 321 + let h = faceRect.height * fittedSize.height 322 + 323 + RoundedRectangle(cornerRadius: 3) 324 + .strokeBorder(Color.white.opacity(0.5), lineWidth: 1.5) 325 + .frame(width: w, height: h) 326 + .position(x: x + w / 2, y: y + h / 2) 119 327 } 120 328 } 121 329 }
+6 -2
cull/cull.xcodeproj/project.pbxproj
··· 23 23 0B0EC2962F722491004523FA /* PhotoImporter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0B0EC27E2F722491004523FA /* PhotoImporter.swift */; }; 24 24 0B0EC2972F722491004523FA /* PhotoExporter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0B0EC27D2F722491004523FA /* PhotoExporter.swift */; }; 25 25 0B0EC2982F722491004523FA /* ThumbnailCache.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0B0EC2812F722491004523FA /* ThumbnailCache.swift */; }; 26 + 0B0EC2A12F72570F004523FA /* icon.icon in Resources */ = {isa = PBXBuildFile; fileRef = 0B0EC2A02F72570F004523FA /* icon.icon */; }; 26 27 /* End PBXBuildFile section */ 27 28 28 29 /* Begin PBXFileReference section */ ··· 43 44 0B0EC2872F722491004523FA /* ImportView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImportView.swift; sourceTree = "<group>"; }; 44 45 0B0EC2882F722491004523FA /* PhotoViewer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoViewer.swift; sourceTree = "<group>"; }; 45 46 0B0EC2992F724FE5004523FA /* cull.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = cull.app; sourceTree = BUILT_PRODUCTS_DIR; }; 47 + 0B0EC2A02F72570F004523FA /* icon.icon */ = {isa = PBXFileReference; lastKnownFileType = folder.iconcomposer.icon; path = icon.icon; sourceTree = "<group>"; }; 46 48 /* End PBXFileReference section */ 47 49 48 50 /* Begin PBXFrameworksBuildPhase section */ ··· 59 61 0B0EC2612F722109004523FA = { 60 62 isa = PBXGroup; 61 63 children = ( 64 + 0B0EC2A02F72570F004523FA /* icon.icon */, 62 65 0B0EC2712F72210B004523FA /* Assets.xcassets */, 63 66 0B0EC2782F722491004523FA /* CullApp.swift */, 64 67 0B0EC27C2F722491004523FA /* Models */, ··· 165 168 buildActionMask = 2147483647; 166 169 files = ( 167 170 0B0EC2722F72210B004523FA /* Assets.xcassets in Resources */, 171 + 0B0EC2A12F72570F004523FA /* icon.icon in Resources */, 168 172 ); 169 173 runOnlyForDeploymentPostprocessing = 0; 170 174 }; ··· 319 323 0B0EC2762F72210B004523FA /* Debug */ = { 320 324 isa = XCBuildConfiguration; 321 325 buildSettings = { 322 - ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 326 + ASSETCATALOG_COMPILER_APPICON_NAME = icon; 323 327 ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 324 328 CODE_SIGN_STYLE = Automatic; 325 329 COMBINE_HIDPI_IMAGES = YES; ··· 361 365 0B0EC2772F72210B004523FA /* Release */ = { 362 366 isa = XCBuildConfiguration; 363 367 buildSettings = { 364 - ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 368 + ASSETCATALOG_COMPILER_APPICON_NAME = icon; 365 369 ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 366 370 CODE_SIGN_STYLE = Automatic; 367 371 COMBINE_HIDPI_IMAGES = YES;
+16
cull/icon.icon/Assets/Subtract.svg
··· 1 + <svg width="835" height="533" viewBox="0 0 835 533" fill="none" xmlns="http://www.w3.org/2000/svg"> 2 + <g filter="url(#filter0_d_122_24)"> 3 + <path d="M22.8826 43.1101C24.4795 17.7553 46.3285 -1.50434 71.6834 0.0925453L790.971 45.3943C816.326 46.9912 835.586 68.8402 833.989 94.1951L811.199 456.047C809.602 481.401 787.753 500.661 762.398 499.064L43.1101 453.762C17.7554 452.165 -1.50432 430.316 0.0925424 404.962L22.8826 43.1101ZM720.307 193.733C685.382 193.733 657.068 222.047 657.068 256.972C657.068 291.898 685.382 320.212 720.307 320.212C755.233 320.211 783.546 291.898 783.546 256.972C783.546 222.047 755.233 193.733 720.307 193.733Z" fill="#D6A071"/> 4 + </g> 5 + <defs> 6 + <filter id="filter0_d_122_24" x="0" y="0" width="834.082" height="532.157" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB"> 7 + <feFlood flood-opacity="0" result="BackgroundImageFix"/> 8 + <feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/> 9 + <feOffset dy="33"/> 10 + <feComposite in2="hardAlpha" operator="out"/> 11 + <feColorMatrix type="matrix" values="0 0 0 0 0.665584 0 0 0 0 0.47187 0 0 0 0 0.303268 0 0 0 1 0"/> 12 + <feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_122_24"/> 13 + <feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_122_24" result="shape"/> 14 + </filter> 15 + </defs> 16 + </svg>
+27
cull/icon.icon/Assets/emoji_u1f39e 1 (1).svg
··· 1 + <svg width="566" height="566" viewBox="0 0 566 566" fill="none" xmlns="http://www.w3.org/2000/svg"> 2 + <g clip-path="url(#clip0_122_25)"> 3 + <g opacity="0.85"> 4 + <path d="M486.57 129.172L38.0752 211.201L80.4361 442.811L528.931 360.783L486.57 129.172Z" fill="#96A6E7"/> 5 + </g> 6 + <path d="M487.342 133.396L475.913 70.9055L27.418 152.934L38.8541 215.462L83.6288 207.272L124.445 430.436L79.6701 438.625L91.3523 502.498L539.847 420.47L528.158 356.559L483.383 364.748L442.567 141.585L487.342 133.396ZM441.427 107.263C440.805 103.862 443.065 100.59 446.466 99.9682L454.614 98.478C458.015 97.856 461.286 100.116 461.908 103.517L464.649 118.504C465.271 121.905 463.012 125.177 459.611 125.799L451.463 127.289C448.062 127.911 444.79 125.651 444.168 122.25L441.427 107.263ZM75.2812 189.718C75.9032 193.12 73.6434 196.391 70.2423 197.013L62.0946 198.503C58.6935 199.125 55.422 196.866 54.7999 193.464L52.0588 178.477C51.4367 175.076 53.6966 171.805 57.0977 171.183L65.2454 169.692C68.6465 169.07 71.918 171.33 72.5401 174.731L75.2812 189.718ZM125.585 464.758C126.207 468.159 123.947 471.431 120.546 472.053L112.399 473.543C108.997 474.165 105.726 471.905 105.104 468.504L102.356 453.479C101.734 450.078 103.994 446.807 107.395 446.185L115.542 444.694C118.944 444.072 122.215 446.332 122.837 449.733L125.585 464.758ZM491.724 382.265C491.102 378.864 493.362 375.592 496.763 374.97L504.911 373.48C508.312 372.858 511.583 375.118 512.205 378.519L514.947 393.506C515.569 396.907 513.309 400.179 509.908 400.801L501.76 402.291C498.359 402.913 495.087 400.653 494.465 397.252L491.724 382.265ZM398.147 115.179C397.525 111.778 399.785 108.506 403.186 107.884L411.334 106.394C414.735 105.772 418.007 108.032 418.629 111.433L421.37 126.42C421.992 129.821 419.732 133.093 416.331 133.715L408.183 135.205C404.782 135.827 401.51 133.567 400.888 130.166L398.147 115.179ZM354.905 123.088C354.283 119.686 356.543 116.415 359.944 115.793L368.091 114.303C371.493 113.681 374.764 115.941 375.386 119.342L378.127 134.329C378.749 137.73 376.489 141.001 373.088 141.623L364.941 143.114C361.54 143.736 358.268 141.476 357.646 138.075L354.905 123.088ZM311.625 131.003C311.003 127.602 313.263 124.331 316.664 123.709L324.812 122.218C328.213 121.596 331.484 123.856 332.106 127.257L334.848 142.245C335.47 145.646 333.21 148.917 329.809 149.539L321.661 151.029C318.26 151.651 314.988 149.392 314.366 145.99L311.625 131.003ZM268.383 138.912C267.761 135.511 270.021 132.24 273.422 131.618L281.569 130.127C284.97 129.505 288.242 131.765 288.864 135.166L291.605 150.153C292.227 153.555 289.967 156.826 286.566 157.448L278.419 158.938C275.018 159.56 271.746 157.3 271.124 153.899L268.383 138.912ZM225.103 146.828C224.481 143.427 226.741 140.155 230.142 139.533L238.29 138.043C241.691 137.421 244.962 139.681 245.584 143.082L248.325 158.069C248.947 161.47 246.688 164.742 243.287 165.364L235.139 166.854C231.738 167.476 228.466 165.216 227.844 161.815L225.103 146.828ZM181.861 154.737C181.239 151.336 183.499 148.064 186.9 147.442L195.047 145.952C198.448 145.33 201.72 147.59 202.342 150.991L205.083 165.978C205.705 169.379 203.445 172.651 200.044 173.273L191.896 174.763C188.495 175.385 185.224 173.125 184.602 169.724L181.861 154.737ZM138.581 162.653C137.959 159.252 140.219 155.98 143.62 155.358L151.768 153.868C155.169 153.246 158.44 155.506 159.062 158.907L161.803 173.894C162.425 177.295 160.165 180.566 156.764 181.189L148.617 182.679C145.216 183.301 141.944 181.041 141.322 177.64L138.581 162.653ZM95.3385 170.562C94.7165 167.16 96.9764 163.889 100.377 163.267L108.525 161.777C111.926 161.155 115.198 163.414 115.82 166.816L118.561 181.803C119.183 185.204 116.923 188.475 113.522 189.097L105.374 190.588C101.973 191.21 98.7017 188.95 98.0797 185.549L95.3385 170.562ZM168.865 456.842C169.487 460.243 167.227 463.515 163.826 464.137L155.678 465.627C152.277 466.249 149.006 463.989 148.384 460.588L145.636 445.564C145.014 442.163 147.273 438.891 150.675 438.269L158.822 436.779C162.223 436.157 165.495 438.417 166.117 441.818L168.865 456.842ZM212.107 448.933C212.729 452.334 210.469 455.606 207.068 456.228L198.921 457.718C195.52 458.34 192.248 456.08 191.626 452.679L188.878 437.655C188.256 434.254 190.516 430.982 193.917 430.36L202.065 428.87C205.466 428.248 208.737 430.508 209.359 433.909L212.107 448.933ZM255.387 441.018C256.009 444.419 253.749 447.69 250.348 448.312L242.2 449.802C238.799 450.424 235.528 448.165 234.906 444.764L232.158 429.739C231.536 426.338 233.796 423.066 237.197 422.444L245.344 420.954C248.745 420.332 252.017 422.592 252.639 425.993L255.387 441.018ZM298.629 433.109C299.251 436.51 296.992 439.781 293.59 440.403L285.443 441.894C282.042 442.516 278.77 440.256 278.148 436.855L275.4 421.83C274.778 418.429 277.038 415.157 280.439 414.535L288.587 413.045C291.988 412.423 295.259 414.683 295.881 418.084L298.629 433.109ZM341.909 425.193C342.531 428.594 340.271 431.866 336.87 432.488L328.723 433.978C325.321 434.6 322.05 432.34 321.428 428.939L318.68 413.914C318.058 410.513 320.318 407.242 323.719 406.62L331.866 405.129C335.268 404.507 338.539 406.767 339.161 410.168L341.909 425.193ZM385.151 417.284C385.774 420.685 383.514 423.957 380.113 424.579L371.965 426.069C368.564 426.691 365.292 424.431 364.67 421.03L361.922 406.005C361.3 402.604 363.56 399.333 366.961 398.711L375.109 397.221C378.51 396.598 381.781 398.858 382.404 402.259L385.151 417.284ZM428.431 409.368C429.053 412.769 426.793 416.041 423.392 416.663L415.245 418.153C411.844 418.775 408.572 416.515 407.95 413.114L405.202 398.09C404.58 394.689 406.84 391.417 410.241 390.795L418.389 389.305C421.79 388.683 425.061 390.943 425.683 394.344L428.431 409.368ZM471.674 401.459C472.296 404.86 470.036 408.132 466.635 408.754L458.487 410.244C455.086 410.866 451.814 408.606 451.192 405.205L448.444 390.181C447.822 386.78 450.082 383.508 453.483 382.886L461.631 381.396C465.032 380.774 468.304 383.034 468.926 386.435L471.674 401.459ZM468.508 367.469L139.313 427.678L98.4971 204.514L427.692 144.306L468.508 367.469Z" fill="url(#paint0_linear_122_25)"/> 7 + <path d="M29.8848 152.483L475.913 70.9054" stroke="#757575" stroke-width="0.9587" stroke-miterlimit="10"/> 8 + <path d="M429.056 144.79L99.8604 204.999L140.676 428.163L469.872 367.954L429.056 144.79Z" fill="url(#paint1_linear_122_25)"/> 9 + </g> 10 + <defs> 11 + <linearGradient id="paint0_linear_122_25" x1="121.991" y1="370.17" x2="365.536" y2="244.378" gradientUnits="userSpaceOnUse"> 12 + <stop stop-color="#424242"/> 13 + <stop offset="0.3935" stop-color="#44545B"/> 14 + <stop offset="0.5171" stop-color="#455A64"/> 15 + <stop offset="0.7406" stop-color="#445157"/> 16 + <stop offset="1" stop-color="#424242"/> 17 + </linearGradient> 18 + <linearGradient id="paint1_linear_122_25" x1="156.247" y1="371.236" x2="366.492" y2="232.656" gradientUnits="userSpaceOnUse"> 19 + <stop stop-color="#FFCC80" stop-opacity="0"/> 20 + <stop offset="0.4864" stop-color="#FFCC80" stop-opacity="0.18"/> 21 + <stop offset="1" stop-color="#FFCC80" stop-opacity="0"/> 22 + </linearGradient> 23 + <clipPath id="clip0_122_25"> 24 + <rect width="486.33" height="486.33" fill="white" transform="translate(0 87.497) rotate(-10.3647)"/> 25 + </clipPath> 26 + </defs> 27 + </svg>
+16
cull/icon.icon/Assets/emoji_u1f52a 1.svg
··· 1 + <svg width="820" height="820" viewBox="0 0 820 820" fill="none" xmlns="http://www.w3.org/2000/svg"> 2 + <g clip-path="url(#clip0_122_12)"> 3 + <path d="M320.499 315.776L294.225 380.552C294.225 380.552 300.291 382.13 311.317 387.834C322.327 393.489 331.293 406.589 331.293 406.589C331.293 406.589 404.813 375.911 404.346 374.457C403.879 373.004 363.644 333.447 363.644 333.447L324.148 314.657L320.499 315.776Z" fill="#B0B0B0"/> 4 + <path d="M331.574 468.177L313.754 477.591C313.754 477.591 302.605 495.123 307.106 501.48C310.941 506.929 372.23 538.652 413.494 556.125C463.664 577.365 557.392 607.911 638.819 615.313C715.792 622.328 760.863 613.725 776.857 607.944C791.317 602.71 794.382 591.624 787.899 589.912C781.417 588.2 567.252 510.516 567.252 510.516L331.574 468.177Z" fill="#B0B0B0"/> 5 + <path d="M363.58 333.414L327.252 400.833C327.252 400.833 336.375 415.753 330.631 434.969C324.871 454.136 313.558 477.814 313.558 477.814C313.558 477.814 390.042 524.057 472.368 552.388C588.666 592.464 666.028 603.684 716.363 602.315C762.884 601.05 789.74 590.817 789.74 590.817C789.74 590.817 723.313 537.281 656.88 494.704C614.626 467.661 486.277 394.738 445.389 373.777C404.549 352.8 363.58 333.414 363.58 333.414Z" fill="#E0E0E0"/> 6 + <path d="M70.8147 250.306L54.1888 243.142C54.1888 243.142 39.0151 280.086 48.0988 292.72C56.0096 303.7 124.604 329.549 186.359 353.907C236.289 373.567 276.274 390.222 285.52 389.389C296.393 388.461 300.063 377.928 300.063 377.928L70.8147 250.306Z" fill="#474C4F"/> 7 + <path d="M91.3188 195.187C76.6937 196.413 56.5963 230.342 51.5334 252.012C46.4705 273.681 60.913 281.868 80.9333 290.614C90.1909 294.641 299.848 378.424 299.848 378.424C299.848 378.424 311.171 358.271 317.468 345.932C324.935 331.08 333.705 316.29 332.032 311.75C329.483 304.979 232.646 261.428 195.766 241.798C152.262 218.684 103.978 194.166 91.3188 195.187Z" fill="#5E6367"/> 8 + <path d="M112.807 236.279C107.459 233.775 101.064 235.83 96.7315 242.139C92.447 248.433 94.4512 257.997 102.048 260.848C109.645 263.698 117.754 259.328 119.947 253.012C122.124 246.647 119.035 239.195 112.807 236.279Z" fill="#B0B0B0"/> 9 + <path d="M287.268 317.314C279.96 314.532 273.274 319.673 271.524 325.206C269.759 330.69 271.589 338.547 278.014 341.079C284.44 343.611 291.33 338.938 293.495 333.7C296.04 327.483 293.294 319.601 287.268 317.314Z" fill="#B0B0B0"/> 10 + </g> 11 + <defs> 12 + <clipPath id="clip0_122_12"> 13 + <rect width="651.329" height="651.329" fill="white" transform="translate(0 199.249) rotate(-17.813)"/> 14 + </clipPath> 15 + </defs> 16 + </svg>
+74
cull/icon.icon/icon.json
··· 1 + { 2 + "fill-specializations" : [ 3 + { 4 + "value" : { 5 + "solid" : "display-p3:0.12880,0.13019,0.13434,1.00000" 6 + } 7 + }, 8 + { 9 + "appearance" : "dark", 10 + "value" : { 11 + "solid" : "display-p3:0.12880,0.13019,0.13434,1.00000" 12 + } 13 + } 14 + ], 15 + "groups" : [ 16 + { 17 + "layers" : [ 18 + { 19 + "blend-mode" : "normal", 20 + "fill" : "none", 21 + "glass" : true, 22 + "hidden" : false, 23 + "image-name" : "emoji_u1f52a 1.svg", 24 + "name" : "emoji_u1f52a 1", 25 + "opacity" : 1, 26 + "position" : { 27 + "scale" : 1, 28 + "translation-in-points" : [ 29 + -97.0625, 30 + -105.04424700203003 31 + ] 32 + } 33 + }, 34 + { 35 + "image-name" : "emoji_u1f39e 1 (1).svg", 36 + "name" : "emoji_u1f39e 1 (1)", 37 + "position" : { 38 + "scale" : 1.23, 39 + "translation-in-points" : [ 40 + -0.7779750000000831, 41 + 23.765625 42 + ] 43 + } 44 + }, 45 + { 46 + "glass" : true, 47 + "image-name" : "Subtract.svg", 48 + "name" : "Subtract", 49 + "position" : { 50 + "scale" : 1.07, 51 + "translation-in-points" : [ 52 + 0.4913672408982279, 53 + 106.2578125 54 + ] 55 + } 56 + } 57 + ], 58 + "shadow" : { 59 + "kind" : "neutral", 60 + "opacity" : 0.5 61 + }, 62 + "translucency" : { 63 + "enabled" : true, 64 + "value" : 0.5 65 + } 66 + } 67 + ], 68 + "supported-platforms" : { 69 + "circles" : [ 70 + "watchOS" 71 + ], 72 + "squares" : "shared" 73 + } 74 + }