Skip to content
This repository was archived by the owner on Sep 17, 2024. It is now read-only.

Commit c26e522

Browse files
committed
feat: Create uploads module
1 parent b5c0823 commit c26e522

File tree

17 files changed

+1156
-0
lines changed

17 files changed

+1156
-0
lines changed

modules/uploads/config.ts

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
export interface Config {
2+
maxUploadSize: UploadSize;
3+
maxMultipartUploadSize: UploadSize;
4+
maxFilesPerUpload?: number;
5+
defaultMultipartChunkSize?: UploadSize;
6+
}
7+
8+
export const DEFAULT_MAX_FILES_PER_UPLOAD = 10;
9+
export const DEFAULT_MULTIPART_CHUNK_SIZE: UploadSize = { mb: 100 };
10+
11+
type Units = "b" | "kb" | "mb" | "gb" | "tb" | "kib" | "mib" | "gib" | "tib";
12+
13+
export type UploadSize = {
14+
[unit in Units]: Record<unit, number>;
15+
}[Units];
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
-- CreateTable
2+
CREATE TABLE "Upload" (
3+
"id" UUID NOT NULL,
4+
"userId" UUID,
5+
"bucket" TEXT NOT NULL,
6+
"contentLength" BIGINT NOT NULL,
7+
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
8+
"updatedAt" TIMESTAMP(3) NOT NULL,
9+
"completedAt" TIMESTAMP(3),
10+
"deletedAt" TIMESTAMP(3),
11+
12+
CONSTRAINT "Upload_pkey" PRIMARY KEY ("id")
13+
);
14+
15+
-- CreateTable
16+
CREATE TABLE "Files" (
17+
"path" TEXT NOT NULL,
18+
"mime" TEXT,
19+
"contentLength" BIGINT NOT NULL,
20+
"nsfwScoreThreshold" DOUBLE PRECISION,
21+
"uploadId" UUID NOT NULL,
22+
23+
CONSTRAINT "Files_pkey" PRIMARY KEY ("uploadId","path")
24+
);
25+
26+
-- AddForeignKey
27+
ALTER TABLE "Files" ADD CONSTRAINT "Files_uploadId_fkey" FOREIGN KEY ("uploadId") REFERENCES "Upload"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
/*
2+
Warnings:
3+
4+
- You are about to drop the column `nsfwScoreThreshold` on the `Files` table. All the data in the column will be lost.
5+
6+
*/
7+
-- AlterTable
8+
ALTER TABLE "Files" DROP COLUMN "nsfwScoreThreshold";
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
# Please do not edit this file manually
2+
# It should be added in your version-control system (i.e. Git)
3+
provider = "postgresql"

modules/uploads/db/schema.prisma

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
// Do not modify this `datasource` block
2+
datasource db {
3+
provider = "postgresql"
4+
url = env("DATABASE_URL")
5+
}
6+
7+
model Upload {
8+
id String @id @default(uuid()) @db.Uuid
9+
userId String? @db.Uuid
10+
11+
bucket String
12+
contentLength BigInt
13+
14+
createdAt DateTime @default(now())
15+
updatedAt DateTime @updatedAt
16+
completedAt DateTime?
17+
deletedAt DateTime?
18+
19+
files Files[] @relation("Files")
20+
}
21+
22+
model Files {
23+
uploadId String @db.Uuid
24+
upload Upload @relation("Files", fields: [uploadId], references: [id])
25+
26+
path String
27+
mime String?
28+
contentLength BigInt
29+
30+
@@id([uploadId, path])
31+
}

modules/uploads/module.yaml

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
scripts:
2+
prepare:
3+
name: Prepare Upload
4+
description: Prepare an upload batch for data transfer
5+
complete:
6+
name: Complete Upload
7+
description: Alert the module that the upload has been completed
8+
get:
9+
name: Get Upload Metadata
10+
description: Get the metadata (including contained files) for specified upload IDs
11+
get_public_file_urls:
12+
name: Get File Link
13+
description: Get presigned download links for each of the specified files
14+
list_for_user:
15+
name: List Uploads for Users
16+
description: Get a list of upload IDs associated with the specified user IDs
17+
delete:
18+
name: Delete Upload
19+
description: Removes the upload and deletes the files from the bucket
20+
errors:
21+
no_files:
22+
name: No Files Provided
23+
description: An upload must have at least 1 file
24+
too_many_files:
25+
name: Too Many Files Provided
26+
description: There is a limit to how many files can be put into a single upload (see config)
27+
duplicate_paths:
28+
name: Duplicate Paths Provided
29+
description: An upload cannot contain 2 files with the same paths (see `cause` for offending paths)
30+
size_limit_exceeded:
31+
name: Combined Size Limit Exceeded
32+
description: There is a maximum total size per upload (see config)
33+
upload_not_found:
34+
name: Upload Not Found
35+
description: The provided upload ID didn't match any known existing uploads
36+
upload_already_completed:
37+
name: Upload Already completed
38+
description: \`complete\` was already called on this upload
39+
s3_not_configured:
40+
name: S3 Not Configured
41+
description: The S3 bucket is not configured (missing env variables)
42+
dependencies:
43+
users: {}
Lines changed: 110 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
import { RuntimeError, ScriptContext } from "../_gen/scripts/complete.ts";
2+
import { keyExists } from "../utils/bucket.ts";
3+
import { getS3EnvConfig } from "../utils/env.ts";
4+
import { getKey } from "../utils/types.ts";
5+
import { prismaToOutput } from "../utils/types.ts";
6+
import { Upload } from "../utils/types.ts";
7+
8+
export interface Request {
9+
uploadId: string;
10+
}
11+
12+
export interface Response {
13+
upload: Upload;
14+
}
15+
16+
export async function run(
17+
ctx: ScriptContext,
18+
req: Request,
19+
): Promise<Response> {
20+
const s3 = getS3EnvConfig();
21+
if (!s3) throw new RuntimeError("s3_not_configured");
22+
23+
const newUpload = await ctx.db.$transaction(async (db) => {
24+
// Find the upload by ID
25+
const upload = await db.upload.findFirst({
26+
where: {
27+
id: req.uploadId,
28+
},
29+
select: {
30+
id: true,
31+
userId: true,
32+
bucket: true,
33+
contentLength: true,
34+
files: true,
35+
createdAt: true,
36+
updatedAt: true,
37+
completedAt: true,
38+
},
39+
});
40+
41+
// Error if the upload wasn't prepared
42+
if (!upload) {
43+
throw new RuntimeError(
44+
"upload_not_found",
45+
{
46+
meta: {
47+
reason: `Upload with ID ${req.uploadId} not found`,
48+
},
49+
},
50+
);
51+
}
52+
53+
// Check with S3 to see if the files were uploaded
54+
const fileExistencePromises = upload.files.map(
55+
file => keyExists(s3, getKey(upload.id, file.path)),
56+
);
57+
const fileExistence = await Promise.all(fileExistencePromises);
58+
const filesAllExist = fileExistence.every(Boolean);
59+
if (!filesAllExist) {
60+
const missingFiles = upload.files.filter((_, i) => !fileExistence[i]);
61+
throw new RuntimeError(
62+
"files_not_uploaded",
63+
{
64+
meta: {
65+
reason: `Not all files for upload with ID ${req.uploadId} have not been uploaded`,
66+
missingFiles: missingFiles.map((file) => file.path),
67+
},
68+
},
69+
);
70+
}
71+
72+
// Error if `complete` was already called with this ID
73+
if (upload.completedAt !== null) {
74+
throw new RuntimeError(
75+
"upload_already_completed",
76+
{
77+
meta: {
78+
reason: `Upload with ID ${req.uploadId} has already been completed`,
79+
},
80+
},
81+
);
82+
}
83+
84+
// Update the upload to mark it as completed
85+
const completedUpload = await db.upload.update({
86+
where: {
87+
id: req.uploadId,
88+
},
89+
data: {
90+
completedAt: new Date().toISOString(),
91+
},
92+
select: {
93+
id: true,
94+
userId: true,
95+
bucket: true,
96+
contentLength: true,
97+
files: true,
98+
createdAt: true,
99+
updatedAt: true,
100+
completedAt: true,
101+
},
102+
});
103+
104+
return completedUpload;
105+
});
106+
107+
return {
108+
upload: prismaToOutput(newUpload, true),
109+
};
110+
}

modules/uploads/scripts/delete.ts

Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,86 @@
1+
import { RuntimeError, ScriptContext } from "../_gen/scripts/delete.ts";
2+
import { getKey } from "../utils/types.ts";
3+
import { deleteKeys } from "../utils/bucket.ts";
4+
import { getS3EnvConfig } from "../utils/env.ts";
5+
6+
export interface Request {
7+
uploadId: string;
8+
}
9+
10+
export interface Response {
11+
bytesDeleted: string;
12+
}
13+
14+
export async function run(
15+
ctx: ScriptContext,
16+
req: Request,
17+
): Promise<Response> {
18+
const s3 = getS3EnvConfig();
19+
if (!s3) throw new RuntimeError("s3_not_configured");
20+
21+
const bytesDeleted = await ctx.db.$transaction(async (db) => {
22+
const upload = await db.upload.findFirst({
23+
where: {
24+
id: req.uploadId,
25+
completedAt: { not: null },
26+
deletedAt: null,
27+
},
28+
select: {
29+
id: true,
30+
userId: true,
31+
bucket: true,
32+
contentLength: true,
33+
files: true,
34+
createdAt: true,
35+
updatedAt: true,
36+
completedAt: true,
37+
},
38+
});
39+
if (!upload) {
40+
throw new RuntimeError(
41+
"upload_not_found",
42+
{
43+
meta: {
44+
modified: false,
45+
reason: `Upload with ID ${req.uploadId} not found`,
46+
},
47+
},
48+
);
49+
}
50+
51+
const filesToDelete = upload.files.map((file) =>
52+
getKey(file.uploadId, file.path)
53+
);
54+
const deleteResults = await deleteKeys(s3, filesToDelete);
55+
56+
const failures = upload.files
57+
.map((file, i) => [file, deleteResults[i]] as const)
58+
.filter(([, successfullyDeleted]) => !successfullyDeleted)
59+
.map(([file]) => file);
60+
61+
if (failures.length) {
62+
const failedPaths = JSON.stringify(failures.map((file) => file.path));
63+
throw new RuntimeError(
64+
"failed_to_delete",
65+
{
66+
meta: {
67+
modified: failures.length !== filesToDelete.length,
68+
reason:`Failed to delete files with paths ${failedPaths}`,
69+
},
70+
},
71+
);
72+
}
73+
74+
await db.upload.update({
75+
where: {
76+
id: req.uploadId,
77+
},
78+
data: {
79+
deletedAt: new Date().toISOString(),
80+
},
81+
});
82+
83+
return upload.contentLength.toString();
84+
});
85+
return { bytesDeleted };
86+
}

modules/uploads/scripts/get.ts

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
import { ScriptContext } from "../_gen/scripts/get.ts";
2+
import { PrismaUploadWithOptionalFiles, prismaToOutput } from "../utils/types.ts";
3+
import { Upload } from "../utils/types.ts";
4+
5+
export interface Request {
6+
uploadIds: string[];
7+
filesIncluded?: boolean;
8+
}
9+
10+
type UploadWithFileOpt = Omit<Upload, "files"> & Partial<Pick<Upload, "files">>;
11+
12+
export interface Response {
13+
uploads: (UploadWithFileOpt | null)[];
14+
}
15+
16+
export async function run(
17+
ctx: ScriptContext,
18+
req: Request,
19+
): Promise<Response> {
20+
// Find uploads that match the IDs in the request
21+
const dbUploads = await ctx.db.upload.findMany({
22+
where: {
23+
id: {
24+
in: req.uploadIds,
25+
},
26+
completedAt: { not: null },
27+
deletedAt: null,
28+
},
29+
select: {
30+
id: true,
31+
userId: true,
32+
bucket: true,
33+
contentLength: true,
34+
files: !!req.filesIncluded,
35+
createdAt: true,
36+
updatedAt: true,
37+
completedAt: true,
38+
},
39+
}) as PrismaUploadWithOptionalFiles[];
40+
41+
// Create a map of uploads by ID
42+
const uploadMap = new Map(dbUploads.map((upload) => [upload.id, upload]));
43+
44+
// Reorder uploads to match the order of the request
45+
const uploads = req.uploadIds.map((uploadId) => {
46+
const upload = uploadMap.get(uploadId);
47+
// If the upload wasn't found, return null
48+
return upload ? prismaToOutput(upload, false) : null;
49+
});
50+
51+
return { uploads };
52+
}

0 commit comments

Comments
 (0)