forked from
jollywhoppers.com/witchsky.app
Bluesky app fork with some witchin' additions 💫
1import {
2 cacheDirectory,
3 copyAsync,
4 deleteAsync,
5 makeDirectoryAsync,
6 moveAsync,
7} from 'expo-file-system/legacy'
8import {
9 type Action,
10 type ActionCrop,
11 manipulateAsync,
12 SaveFormat,
13} from 'expo-image-manipulator'
14import {type BlobRef} from '@atproto/api'
15import {transformExif} from '@uwx/exif-be-gone-web'
16import {fromByteArray, toByteArray} from 'base64-js'
17import {nanoid} from 'nanoid/non-secure'
18
19import {POST_IMG_MAX} from '#/lib/constants'
20import {getImageDim} from '#/lib/media/manip'
21import {openCropper} from '#/lib/media/picker'
22import {type PickerImage} from '#/lib/media/picker.shared'
23import {getDataUriSize} from '#/lib/media/util'
24import {isCancelledError} from '#/lib/strings/errors'
25import {IS_NATIVE, IS_WEB} from '#/env'
26
27export type ImageTransformation = {
28 crop?: ActionCrop['crop']
29}
30
31export type ImageMeta = {
32 path: string
33 width: number
34 height: number
35 mime: string
36}
37
38export type ImageSource = ImageMeta & {
39 id: string
40}
41
42type ComposerImageBase = {
43 alt: string
44 source: ImageSource
45 blobRef?: BlobRef
46 /** Original localRef path from draft, if editing an existing draft. Used to reuse the same storage key. */
47 localRefPath?: string
48}
49type ComposerImageWithoutTransformation = ComposerImageBase & {
50 transformed?: undefined
51 manips?: undefined
52}
53type ComposerImageWithTransformation = ComposerImageBase & {
54 transformed: ImageMeta
55 manips?: ImageTransformation
56}
57
58export type ComposerImage =
59 | ComposerImageWithoutTransformation
60 | ComposerImageWithTransformation
61
62let _imageCacheDirectory: string
63
64function getImageCacheDirectory(): string | null {
65 if (IS_NATIVE) {
66 return (_imageCacheDirectory ??= joinPath(cacheDirectory!, 'bsky-composer'))
67 }
68
69 return null
70}
71
72export async function createComposerImage(
73 raw: ImageMeta,
74): Promise<ComposerImageWithoutTransformation> {
75 return {
76 alt: '',
77 source: {
78 id: nanoid(),
79 // Copy to cache to ensure file survives OS temporary file cleanup
80 path: await copyToCache(raw.path),
81 width: raw.width,
82 height: raw.height,
83 mime: raw.mime,
84 },
85 }
86}
87
88export type InitialImage = {
89 uri: string
90 width: number
91 height: number
92 altText?: string
93 blobRef?: BlobRef
94}
95
96export function createInitialImages(
97 uris: InitialImage[] = [],
98): ComposerImageWithoutTransformation[] {
99 return uris.map(({uri, width, height, altText = '', blobRef}) => {
100 return {
101 alt: altText,
102 source: {
103 id: nanoid(),
104 path: uri,
105 width: width,
106 height: height,
107 mime: 'image/jpeg',
108 },
109 blobRef,
110 }
111 })
112}
113
114export async function pasteImage(
115 uri: string,
116): Promise<ComposerImageWithoutTransformation> {
117 const {width, height} = await getImageDim(uri)
118 const match = /^data:(.+?);/.exec(uri)
119
120 return {
121 alt: '',
122 source: {
123 id: nanoid(),
124 path: uri,
125 width: width,
126 height: height,
127 mime: match ? match[1] : 'image/jpeg',
128 },
129 }
130}
131
132export async function cropImage(img: ComposerImage): Promise<ComposerImage> {
133 if (!IS_NATIVE) {
134 return img
135 }
136
137 const source = img.source
138
139 // @todo: we're always passing the original image here, does image-cropper
140 // allows for setting initial crop dimensions? -mary
141 try {
142 const cropped = await openCropper({
143 imageUri: source.path,
144 })
145
146 return {
147 alt: img.alt,
148 source: source,
149 transformed: {
150 path: await moveIfNecessary(cropped.path),
151 width: cropped.width,
152 height: cropped.height,
153 mime: cropped.mime,
154 },
155 }
156 } catch (e) {
157 if (!isCancelledError(e)) {
158 return img
159 }
160
161 throw e
162 }
163}
164
165export async function manipulateImage(
166 img: ComposerImage,
167 trans: ImageTransformation,
168): Promise<ComposerImage> {
169 const rawActions: (Action | undefined)[] = [trans.crop && {crop: trans.crop}]
170
171 const actions = rawActions.filter((a): a is Action => a !== undefined)
172
173 if (actions.length === 0) {
174 if (img.transformed === undefined) {
175 return img
176 }
177
178 return {alt: img.alt, source: img.source}
179 }
180
181 const source = img.source
182 const result = await manipulateAsync(source.path, actions, {
183 format: SaveFormat.PNG,
184 })
185
186 return {
187 alt: img.alt,
188 source: img.source,
189 transformed: {
190 path: await moveIfNecessary(result.uri),
191 width: result.width,
192 height: result.height,
193 mime: 'image/png',
194 },
195 manips: trans,
196 }
197}
198
199export function resetImageManipulation(
200 img: ComposerImage,
201): ComposerImageWithoutTransformation {
202 if (img.transformed !== undefined) {
203 return {alt: img.alt, source: img.source}
204 }
205
206 return img
207}
208
209async function bypassCompression(
210 img: ComposerImage,
211): Promise<PickerImage | undefined> {
212 // TODO: use expo-file-system instead of working directly in memory
213
214 function dataUriToUint8Array(uri: string) {
215 const base64 = uri.split(',')[1]
216 return toByteArray(base64)
217 }
218
219 const source = img.transformed || img.source
220 if (
221 source.width > POST_IMG_MAX.width ||
222 source.height > POST_IMG_MAX.height
223 ) {
224 return undefined
225 }
226
227 if (
228 ![
229 'image/jpeg',
230 'image/png',
231 'image/webp',
232 'image/avif',
233 'image/gif',
234 ].includes(source.mime)
235 ) {
236 return undefined
237 }
238
239 let data: Uint8Array
240
241 const path = source.path
242 // convert path to data URI if it is not already
243 if (!path.startsWith('data:')) {
244 try {
245 await fetch(path)
246 const response = await fetch(path)
247 data = new Uint8Array(await response.arrayBuffer())
248 if (data.byteLength > POST_IMG_MAX.size) {
249 return undefined
250 }
251 } catch (e) {
252 // Fetch failed, likely due to CORS. Return undefined to trigger normal compression flow and error handling.
253 return undefined
254 }
255 } else {
256 if (getDataUriSize(path) > POST_IMG_MAX.size) {
257 return undefined
258 }
259 data = new Uint8Array(dataUriToUint8Array(path).buffer)
260 }
261
262 try {
263 data = await transformExif(data)
264 } catch (err) {
265 console.error(
266 'Failed to transform EXIF data, proceeding with original image',
267 err,
268 )
269 return undefined
270 }
271
272 const dataUri = arrayBufferToDataUri(data, source.mime)
273 return {
274 path: dataUri,
275 width: source.width,
276 height: source.height,
277 mime: source.mime,
278 size: getDataUriSize(dataUri),
279 }
280}
281
282export async function compressImage(
283 img: ComposerImage,
284 options?: {
285 highResolution?: boolean
286 increasedBlobSizeLimit?: boolean
287 },
288): Promise<PickerImage> {
289 const res = await bypassCompression(img)
290 if (res) {
291 return res
292 }
293
294 const source = img.transformed || img.source
295 const highResolution = options?.highResolution ?? false
296 let attempts = 0
297 let maxDimension = highResolution ? 4000 : POST_IMG_MAX.width
298 let maxBytes = options?.increasedBlobSizeLimit ? 2000000 : POST_IMG_MAX.size
299
300 let minQualityPercentage = 0
301 let maxQualityPercentage = 101 // exclusive
302 let newDataUri
303
304 while (maxQualityPercentage - minQualityPercentage > 1) {
305 if (attempts >= 4) break
306
307 const [w, h] = containImageRes(source.width, source.height, maxDimension)
308 const qualityPercentage = Math.round(
309 (maxQualityPercentage + minQualityPercentage) / 2,
310 )
311
312 /*
313 * In the event the image doesn't compress well, we want to avoid
314 * unecessary iterations. In this case, binary search will check 51, 26,
315 * 13(rounded). We don't want to go below 25, so if we've halved to 13,
316 * reset the loop and reduce the image dimensions instead.
317 */
318 if (qualityPercentage <= 13) {
319 minQualityPercentage = 0
320 maxQualityPercentage = 101
321 attempts++
322 // 4000px → 3200px → 2560px → 2048px → ~1638px
323 maxDimension = Math.floor(maxDimension * 0.8)
324 continue
325 }
326
327 const res = await manipulateAsync(
328 source.path,
329 [{resize: {width: w, height: h}}],
330 {
331 compress: qualityPercentage / 100,
332 format: SaveFormat.WEBP,
333 base64: true,
334 },
335 )
336
337 const base64 = res.base64
338 const size = base64 ? getDataUriSize(base64) : 0
339 if (base64 && size <= maxBytes) {
340 minQualityPercentage = qualityPercentage
341 newDataUri = {
342 path: await moveIfNecessary(res.uri),
343 width: res.width,
344 height: res.height,
345 mime: 'image/webp',
346 size,
347 }
348 } else {
349 maxQualityPercentage = qualityPercentage
350 }
351 }
352
353 if (newDataUri) {
354 return newDataUri
355 }
356
357 throw new Error(`Unable to compress image`)
358}
359
360async function moveIfNecessary(from: string) {
361 const cacheDir = IS_NATIVE && getImageCacheDirectory()
362
363 if (cacheDir && !from.startsWith(cacheDir)) {
364 const to = joinPath(cacheDir, nanoid(36))
365
366 await makeDirectoryAsync(cacheDir, {intermediates: true})
367 await moveAsync({from, to})
368
369 return to
370 }
371
372 return from
373}
374
375/**
376 * Copy a file from a potentially temporary location to our cache directory.
377 * This ensures picker files are available for draft saving even if the original
378 * temporary files are cleaned up by the OS.
379 *
380 * On web, converts blob URLs to data URIs immediately to prevent revocation issues.
381 */
382async function copyToCache(from: string): Promise<string> {
383 // Data URIs don't need any conversion
384 if (from.startsWith('data:')) {
385 return from
386 }
387
388 if (IS_WEB) {
389 // Web: convert blob URLs to data URIs before they can be revoked
390 if (from.startsWith('blob:')) {
391 try {
392 const response = await fetch(from)
393 const blob = await response.blob()
394 return await blobToDataUri(blob)
395 } catch (e) {
396 // Blob URL was likely revoked, return as-is for downstream error handling
397 return from
398 }
399 }
400 // Other URLs on web don't need conversion
401 return from
402 }
403
404 // Native: copy to cache directory to survive OS temp file cleanup
405 const cacheDir = getImageCacheDirectory()
406 if (!cacheDir || from.startsWith(cacheDir)) {
407 return from
408 }
409
410 const to = joinPath(cacheDir, nanoid(36))
411 await makeDirectoryAsync(cacheDir, {intermediates: true})
412
413 let normalizedFrom = from
414 if (!from.startsWith('file://') && from.startsWith('/')) {
415 normalizedFrom = `file://${from}`
416 }
417
418 await copyAsync({from: normalizedFrom, to})
419 return to
420}
421
422/**
423 * Convert a Blob to a data URI
424 */
425function blobToDataUri(blob: Blob): Promise<string> {
426 return new Promise((resolve, reject) => {
427 const reader = new FileReader()
428 reader.onloadend = () => {
429 if (typeof reader.result === 'string') {
430 resolve(reader.result)
431 } else {
432 reject(new Error('Failed to convert blob to data URI'))
433 }
434 }
435 reader.onerror = () => reject(reader.error)
436 reader.readAsDataURL(blob)
437 })
438}
439
440function arrayBufferToDataUri(
441 buffer: Uint8Array | ArrayBufferLike,
442 mime: string,
443): string {
444 const base64 = fromByteArray(
445 buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer),
446 )
447 return `data:${mime};base64,${base64}`
448}
449
450/** Purge files that were created to accomodate image manipulation */
451export async function purgeTemporaryImageFiles() {
452 const cacheDir = IS_NATIVE && getImageCacheDirectory()
453
454 if (cacheDir) {
455 await deleteAsync(cacheDir, {idempotent: true})
456 await makeDirectoryAsync(cacheDir)
457 }
458}
459
460function joinPath(a: string, b: string) {
461 if (a.endsWith('/')) {
462 if (b.startsWith('/')) {
463 return a.slice(0, -1) + b
464 }
465 return a + b
466 } else if (b.startsWith('/')) {
467 return a + b
468 }
469 return a + '/' + b
470}
471
472function containImageRes(
473 w: number,
474 h: number,
475 max: number,
476): [width: number, height: number] {
477 let scale = 1
478
479 if (w > max || h > max) {
480 scale = w > h ? max / w : max / h
481 w = Math.floor(w * scale)
482 h = Math.floor(h * scale)
483 }
484
485 return [w, h]
486}