kaneo (minimalist kanban) fork to experiment adding a tangled integration
github.com/usekaneo/kaneo
1import { Readable } from "node:stream";
2import {
3 GetObjectCommand,
4 PutObjectCommand,
5 S3Client,
6 type S3ClientConfig,
7} from "@aws-sdk/client-s3";
8import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
9import { createId } from "@paralleldrive/cuid2";
10import { config } from "dotenv-mono";
11
12config();
13
14const DEFAULT_MAX_IMAGE_UPLOAD_BYTES = 10 * 1024 * 1024;
15const DEFAULT_PRESIGN_TTL_SECONDS = 300;
16
17const allowedImageMimeTypes = new Set([
18 "image/apng",
19 "image/avif",
20 "image/gif",
21 "image/heic",
22 "image/heif",
23 "image/jpeg",
24 "image/jpg",
25 "image/png",
26 "image/svg+xml",
27 "image/webp",
28]);
29
30export function isImageContentType(contentType: string) {
31 return allowedImageMimeTypes.has(contentType.toLowerCase());
32}
33
34type UploadSurface = "description" | "comment";
35
36type StorageConfig = {
37 endpoint: string;
38 region: string;
39 bucket: string;
40 accessKeyId: string;
41 secretAccessKey: string;
42 publicBaseUrl?: string;
43 forcePathStyle: boolean;
44 maxImageUploadBytes: number;
45 presignTtlSeconds: number;
46};
47
48type TaskImageUploadContext = {
49 workspaceId: string;
50 projectId: string;
51 taskId: string;
52 surface: UploadSurface;
53 filename: string;
54 contentType: string;
55};
56
57type TaskImageUploadUrl = {
58 key: string;
59 uploadUrl: string;
60 headers: Record<string, string>;
61};
62
63type AssetObject = {
64 body: unknown;
65 contentType: string | undefined;
66 contentLength: number | undefined;
67 etag: string | undefined;
68 lastModified: Date | undefined;
69};
70
71let clientCache:
72 | {
73 cacheKey: string;
74 client: S3Client;
75 }
76 | undefined;
77
78function env(name: string) {
79 return process.env[name]?.trim() || "";
80}
81
82export function parseBoolean(value: string | undefined, fallback: boolean) {
83 if (value === undefined || value.trim() === "") return fallback;
84 return value.trim().toLowerCase() === "true";
85}
86
87export function parsePositiveInt(value: string | undefined, fallback: number) {
88 const parsed = Number.parseInt(value?.trim() || "", 10);
89 if (!Number.isFinite(parsed) || parsed <= 0) return fallback;
90 return parsed;
91}
92
93function getStorageConfig(): StorageConfig {
94 const endpoint = env("S3_ENDPOINT");
95 const bucket = env("S3_BUCKET");
96 const accessKeyId = env("S3_ACCESS_KEY_ID");
97 const secretAccessKey = env("S3_SECRET_ACCESS_KEY");
98
99 if (!endpoint || !bucket || !accessKeyId || !secretAccessKey) {
100 throw new Error(
101 "S3 uploads are not configured. Set S3_ENDPOINT, S3_BUCKET, S3_ACCESS_KEY_ID, and S3_SECRET_ACCESS_KEY.",
102 );
103 }
104
105 return {
106 endpoint,
107 region: env("S3_REGION") || "us-east-1",
108 bucket,
109 accessKeyId,
110 secretAccessKey,
111 publicBaseUrl: env("S3_PUBLIC_BASE_URL") || undefined,
112 forcePathStyle: parseBoolean(process.env.S3_FORCE_PATH_STYLE, true),
113 maxImageUploadBytes: parsePositiveInt(
114 process.env.S3_MAX_IMAGE_UPLOAD_BYTES,
115 DEFAULT_MAX_IMAGE_UPLOAD_BYTES,
116 ),
117 presignTtlSeconds: parsePositiveInt(
118 process.env.S3_PRESIGN_TTL_SECONDS,
119 DEFAULT_PRESIGN_TTL_SECONDS,
120 ),
121 };
122}
123
124function getMaxImageUploadBytes() {
125 return parsePositiveInt(
126 process.env.S3_MAX_IMAGE_UPLOAD_BYTES,
127 DEFAULT_MAX_IMAGE_UPLOAD_BYTES,
128 );
129}
130
131function getClient(config: StorageConfig) {
132 const cacheKey = JSON.stringify({
133 endpoint: config.endpoint,
134 region: config.region,
135 accessKeyId: config.accessKeyId,
136 bucket: config.bucket,
137 forcePathStyle: config.forcePathStyle,
138 });
139
140 if (clientCache?.cacheKey === cacheKey) {
141 return clientCache.client;
142 }
143
144 const clientConfig: S3ClientConfig = {
145 endpoint: config.endpoint,
146 region: config.region,
147 forcePathStyle: config.forcePathStyle,
148 // Avoid auto-injecting checksum params for presigned PUT URLs. Some
149 // S3-compatible providers (e.g. Garage/R2) reject mismatched hoisted CRCs.
150 requestChecksumCalculation: "WHEN_REQUIRED",
151 credentials: {
152 accessKeyId: config.accessKeyId,
153 secretAccessKey: config.secretAccessKey,
154 },
155 };
156
157 const client = new S3Client(clientConfig);
158 clientCache = { cacheKey, client };
159 return client;
160}
161
162export function sanitizePathSegment(value: string) {
163 return (
164 value
165 .toLowerCase()
166 .replace(/[^a-z0-9._-]+/g, "-")
167 .replace(/-{2,}/g, "-")
168 .replace(/^-+|-+$/g, "") || "file"
169 );
170}
171
172export function getFileExtension(filename: string) {
173 const normalized = filename.trim();
174 const extension = normalized.includes(".")
175 ? normalized.split(".").pop() || ""
176 : "";
177
178 return sanitizePathSegment(extension).slice(0, 12);
179}
180
181export function buildObjectKeyPrefix(
182 context: Omit<TaskImageUploadContext, "filename" | "contentType">,
183) {
184 const surfaceFolder =
185 context.surface === "comment" ? "comments" : "descriptions";
186
187 return [
188 "workspace",
189 sanitizePathSegment(context.workspaceId),
190 "project",
191 sanitizePathSegment(context.projectId),
192 "task",
193 sanitizePathSegment(context.taskId),
194 surfaceFolder,
195 ].join("/");
196}
197
198export function buildObjectKey(context: TaskImageUploadContext) {
199 const extension = getFileExtension(context.filename);
200 const objectKeyPrefix = buildObjectKeyPrefix(context);
201 const timestamp = Date.now();
202 const randomId = createId();
203
204 const baseName = sanitizePathSegment(
205 context.filename.replace(/\.[^/.]+$/, "") || "image",
206 ).slice(0, 64);
207
208 const fileName = extension
209 ? `${baseName}-${timestamp}-${randomId}.${extension}`
210 : `${baseName}-${timestamp}-${randomId}`;
211
212 return `${objectKeyPrefix}/${fileName}`;
213}
214
215export function validateTaskAssetUploadInput(
216 contentType: string,
217 size: number,
218) {
219 const maxImageUploadBytes = getMaxImageUploadBytes();
220
221 if (!contentType.trim()) {
222 throw new Error("A valid content type is required.");
223 }
224
225 if (size <= 0) {
226 throw new Error("Upload size must be greater than zero.");
227 }
228
229 if (size > maxImageUploadBytes) {
230 throw new Error(
231 `Upload exceeds the maximum upload size of ${Math.floor(maxImageUploadBytes / (1024 * 1024))}MB.`,
232 );
233 }
234}
235
236export async function createTaskImageUploadUrl(
237 context: TaskImageUploadContext,
238): Promise<TaskImageUploadUrl> {
239 const config = getStorageConfig();
240 const client = getClient(config);
241 const key = buildObjectKey(context);
242
243 const command = new PutObjectCommand({
244 Bucket: config.bucket,
245 Key: key,
246 ContentType: context.contentType,
247 });
248
249 const uploadUrl = await getSignedUrl(client, command, {
250 expiresIn: config.presignTtlSeconds,
251 });
252
253 return {
254 key,
255 uploadUrl,
256 headers: {
257 "Content-Type": context.contentType,
258 },
259 };
260}
261
262export function assertStorageConfigured() {
263 return getStorageConfig();
264}
265
266export function assertTaskImageKeyMatchesContext(
267 key: string,
268 context: Omit<TaskImageUploadContext, "filename" | "contentType">,
269) {
270 const prefix = `${buildObjectKeyPrefix(context)}/`;
271 return key.startsWith(prefix);
272}
273
274export async function getPrivateObject(key: string): Promise<AssetObject> {
275 const config = getStorageConfig();
276 const client = getClient(config);
277 const response = await client.send(
278 new GetObjectCommand({
279 Bucket: config.bucket,
280 Key: key,
281 }),
282 );
283
284 if (!response.Body) {
285 throw new Error("Storage object body is missing.");
286 }
287
288 const body =
289 "transformToWebStream" in response.Body
290 ? response.Body.transformToWebStream()
291 : Readable.toWeb(response.Body as Readable);
292
293 return {
294 body,
295 contentType: response.ContentType,
296 contentLength: response.ContentLength,
297 etag: response.ETag,
298 lastModified: response.LastModified,
299 };
300}