feat(docker): add Dockerfile and .dockerignore for app containerization
This commit is contained in:
7
.dockerignore
Normal file
7
.dockerignore
Normal file
@ -0,0 +1,7 @@
|
||||
# Prevent local build artifacts and secrets from bloating the image
|
||||
.git
|
||||
.next
|
||||
node_modules
|
||||
.env*
|
||||
Dockerfile
|
||||
README.md
|
||||
63
Dockerfile
Normal file
63
Dockerfile
Normal file
@ -0,0 +1,63 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
# ---- Base image with pnpm enabled ----
|
||||
FROM node:20-slim AS base
|
||||
WORKDIR /app
|
||||
ENV NEXT_TELEMETRY_DISABLED=1 \
|
||||
PNPM_HOME=/usr/local/share/pnpm
|
||||
ENV PATH=${PNPM_HOME}:${PATH}
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends ca-certificates openssl \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& corepack enable
|
||||
|
||||
# ---- Install dependencies (cached layer) ----
|
||||
FROM base AS deps
|
||||
COPY package.json pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile
|
||||
|
||||
# ---- Build the Next.js application ----
|
||||
FROM base AS builder
|
||||
ARG DATABASE_URL="postgresql://postgres:postgres@localhost:5432/postgres"
|
||||
ARG NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY=""
|
||||
ENV DATABASE_URL=${DATABASE_URL} \
|
||||
NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY=${NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY}
|
||||
COPY --from=deps /app/node_modules ./node_modules
|
||||
COPY . .
|
||||
RUN mkdir -p public
|
||||
RUN pnpm run prisma:generate
|
||||
RUN pnpm run build
|
||||
|
||||
# ---- Prune devDependencies for the runtime image ----
|
||||
FROM deps AS prod-deps
|
||||
RUN pnpm prune --prod
|
||||
|
||||
# ---- Production runtime ----
|
||||
FROM node:20-slim AS runner
|
||||
ARG NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY=""
|
||||
WORKDIR /app
|
||||
ENV NODE_ENV=production \
|
||||
NEXT_TELEMETRY_DISABLED=1 \
|
||||
PORT=3000 \
|
||||
NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY=${NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY}
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends ca-certificates openssl \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& mkdir -p /app \
|
||||
&& chown node:node /app
|
||||
|
||||
# Copy only what is required to run `next start`
|
||||
COPY --from=prod-deps --chown=node:node /app/node_modules ./node_modules
|
||||
COPY --from=builder --chown=node:node /app/package.json ./package.json
|
||||
COPY --from=builder --chown=node:node /app/.next ./.next
|
||||
# Copy public assets only if they exist in the project
|
||||
COPY --from=builder --chown=node:node /app/public ./public
|
||||
COPY --from=builder --chown=node:node /app/next.config.js ./next.config.js
|
||||
|
||||
# Keep the filesystem owned by the non-root user
|
||||
USER node
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
# `next start` will read any variables from the environment or a mounted .env.local
|
||||
CMD ["node", "./node_modules/next/dist/bin/next", "start"]
|
||||
@ -7,38 +7,47 @@ import { getNumericParam } from "@/lib/route-params";
|
||||
|
||||
type AssignmentRouteParams = { assignmentId: string | string[] | undefined };
|
||||
|
||||
type SubmissionRecord = Awaited<ReturnType<typeof prisma.submission.findMany>>[number];
|
||||
|
||||
type ExportableColumn = {
|
||||
header: string;
|
||||
getter: (submission: SubmissionRecord) => unknown;
|
||||
};
|
||||
|
||||
const EXPORTABLE_COLUMNS = {
|
||||
studentId: { header: "学号", getter: (submission: any) => submission.studentId },
|
||||
studentName: { header: "姓名", getter: (submission: any) => submission.studentName },
|
||||
studentId: { header: "学号", getter: (submission) => submission.studentId },
|
||||
studentName: { header: "姓名", getter: (submission) => submission.studentName },
|
||||
originalFilename: {
|
||||
header: "文件名",
|
||||
getter: (submission: any) => submission.originalFilename,
|
||||
getter: (submission) => submission.originalFilename,
|
||||
},
|
||||
fileUrl: { header: "文件地址", getter: (submission: any) => submission.fileUrl },
|
||||
fileUrl: { header: "文件地址", getter: (submission) => submission.fileUrl },
|
||||
submittedAt: {
|
||||
header: "提交时间",
|
||||
getter: (submission: any) =>
|
||||
getter: (submission) =>
|
||||
submission.submittedAt ? new Date(submission.submittedAt).toISOString() : "",
|
||||
},
|
||||
evaluationScore: {
|
||||
header: "得分",
|
||||
getter: (submission: any) =>
|
||||
getter: (submission) =>
|
||||
typeof submission.evaluationScore === "number"
|
||||
? submission.evaluationScore
|
||||
: "",
|
||||
},
|
||||
evaluationComment: {
|
||||
header: "评价评语",
|
||||
getter: (submission: any) => submission.evaluationComment ?? "",
|
||||
getter: (submission) => submission.evaluationComment ?? "",
|
||||
},
|
||||
evaluatedAt: {
|
||||
header: "评价时间",
|
||||
getter: (submission: any) =>
|
||||
getter: (submission) =>
|
||||
submission.evaluatedAt ? new Date(submission.evaluatedAt).toISOString() : "",
|
||||
},
|
||||
} as const;
|
||||
} satisfies Record<string, ExportableColumn>;
|
||||
|
||||
const DEFAULT_COLUMNS = ["studentId", "evaluationScore"] as Array<keyof typeof EXPORTABLE_COLUMNS>;
|
||||
type ExportableColumnKey = keyof typeof EXPORTABLE_COLUMNS;
|
||||
|
||||
const DEFAULT_COLUMNS: ExportableColumnKey[] = ["studentId", "evaluationScore"];
|
||||
|
||||
export async function GET(
|
||||
request: Request,
|
||||
@ -84,7 +93,7 @@ export async function GET(
|
||||
return NextResponse.json({ error: "请选择有效的导出列" }, { status: 400 });
|
||||
}
|
||||
|
||||
const submissions = await prisma.submission.findMany({
|
||||
const submissions: SubmissionRecord[] = await prisma.submission.findMany({
|
||||
where: { assignmentId },
|
||||
orderBy: { submittedAt: "asc" },
|
||||
});
|
||||
|
||||
@ -2,35 +2,62 @@ import { S3Client, PutObjectCommand, GetObjectCommand } from "@aws-sdk/client-s3
|
||||
import { Readable } from "node:stream";
|
||||
import { randomUUID } from "crypto";
|
||||
|
||||
const {
|
||||
AUTOTEACHER_S3_BUCKET,
|
||||
AUTOTEACHER_S3_REGION,
|
||||
AUTOTEACHER_S3_ENDPOINT_URL,
|
||||
AUTOTEACHER_S3_ACCESS_KEY_ID,
|
||||
AUTOTEACHER_S3_SECRET_ACCESS_KEY,
|
||||
AUTOTEACHER_S3_PUBLIC_BASE_URL,
|
||||
AUTOTEACHER_S3_USE_SSL,
|
||||
} = process.env;
|
||||
type S3EnvConfig = {
|
||||
bucket: string;
|
||||
region?: string;
|
||||
endpoint?: string;
|
||||
accessKeyId?: string;
|
||||
secretAccessKey?: string;
|
||||
publicBaseUrl?: string;
|
||||
useSsl: boolean;
|
||||
};
|
||||
|
||||
if (!AUTOTEACHER_S3_BUCKET) {
|
||||
let cachedConfig: S3EnvConfig | null = null;
|
||||
let cachedClient: S3Client | null = null;
|
||||
|
||||
const resolveEnvConfig = (): S3EnvConfig => {
|
||||
const bucket = process.env.AUTOTEACHER_S3_BUCKET;
|
||||
if (!bucket) {
|
||||
throw new Error("Missing AUTOTEACHER_S3_BUCKET environment variable");
|
||||
}
|
||||
|
||||
const useSsl = (AUTOTEACHER_S3_USE_SSL ?? "true").toLowerCase() !== "false";
|
||||
return {
|
||||
bucket,
|
||||
region: process.env.AUTOTEACHER_S3_REGION || "us-east-1",
|
||||
endpoint: process.env.AUTOTEACHER_S3_ENDPOINT_URL,
|
||||
accessKeyId: process.env.AUTOTEACHER_S3_ACCESS_KEY_ID,
|
||||
secretAccessKey: process.env.AUTOTEACHER_S3_SECRET_ACCESS_KEY,
|
||||
publicBaseUrl: process.env.AUTOTEACHER_S3_PUBLIC_BASE_URL,
|
||||
useSsl: (process.env.AUTOTEACHER_S3_USE_SSL ?? "true").toLowerCase() !== "false",
|
||||
};
|
||||
};
|
||||
|
||||
const s3Client = new S3Client({
|
||||
region: AUTOTEACHER_S3_REGION || "us-east-1",
|
||||
endpoint: AUTOTEACHER_S3_ENDPOINT_URL,
|
||||
forcePathStyle: Boolean(AUTOTEACHER_S3_ENDPOINT_URL),
|
||||
const getConfig = (): S3EnvConfig => {
|
||||
if (!cachedConfig) {
|
||||
cachedConfig = resolveEnvConfig();
|
||||
}
|
||||
return cachedConfig;
|
||||
};
|
||||
|
||||
const getClient = (): S3Client => {
|
||||
if (!cachedClient) {
|
||||
const config = getConfig();
|
||||
cachedClient = new S3Client({
|
||||
region: config.region,
|
||||
endpoint: config.endpoint,
|
||||
forcePathStyle: Boolean(config.endpoint),
|
||||
credentials:
|
||||
AUTOTEACHER_S3_ACCESS_KEY_ID && AUTOTEACHER_S3_SECRET_ACCESS_KEY
|
||||
config.accessKeyId && config.secretAccessKey
|
||||
? {
|
||||
accessKeyId: AUTOTEACHER_S3_ACCESS_KEY_ID,
|
||||
secretAccessKey: AUTOTEACHER_S3_SECRET_ACCESS_KEY,
|
||||
accessKeyId: config.accessKeyId,
|
||||
secretAccessKey: config.secretAccessKey,
|
||||
}
|
||||
: undefined,
|
||||
tls: useSsl,
|
||||
tls: config.useSsl,
|
||||
});
|
||||
}
|
||||
return cachedClient;
|
||||
};
|
||||
|
||||
export type UploadResult = {
|
||||
key: string;
|
||||
@ -38,19 +65,20 @@ export type UploadResult = {
|
||||
};
|
||||
|
||||
const buildFileUrl = (key: string): string => {
|
||||
if (AUTOTEACHER_S3_PUBLIC_BASE_URL) {
|
||||
return `${AUTOTEACHER_S3_PUBLIC_BASE_URL.replace(/\/$/, "")}/${key}`;
|
||||
const config = getConfig();
|
||||
if (config.publicBaseUrl) {
|
||||
return `${config.publicBaseUrl.replace(/\/$/, "")}/${key}`;
|
||||
}
|
||||
|
||||
if (AUTOTEACHER_S3_ENDPOINT_URL) {
|
||||
return `${AUTOTEACHER_S3_ENDPOINT_URL.replace(/\/$/, "")}/${AUTOTEACHER_S3_BUCKET}/${key}`;
|
||||
if (config.endpoint) {
|
||||
return `${config.endpoint.replace(/\/$/, "")}/${config.bucket}/${key}`;
|
||||
}
|
||||
|
||||
if (AUTOTEACHER_S3_REGION) {
|
||||
return `https://${AUTOTEACHER_S3_BUCKET}.s3.${AUTOTEACHER_S3_REGION}.amazonaws.com/${key}`;
|
||||
if (config.region) {
|
||||
return `https://${config.bucket}.s3.${config.region}.amazonaws.com/${key}`;
|
||||
}
|
||||
|
||||
return `https://${AUTOTEACHER_S3_BUCKET}.s3.amazonaws.com/${key}`;
|
||||
return `https://${config.bucket}.s3.amazonaws.com/${key}`;
|
||||
};
|
||||
|
||||
export async function uploadAssignmentFile(
|
||||
@ -70,24 +98,30 @@ export async function uploadAssignmentFile(
|
||||
contentType = "application/msword";
|
||||
}
|
||||
|
||||
const config = getConfig();
|
||||
const client = getClient();
|
||||
|
||||
const command = new PutObjectCommand({
|
||||
Bucket: AUTOTEACHER_S3_BUCKET,
|
||||
Bucket: config.bucket,
|
||||
Key: key,
|
||||
Body: fileBuffer,
|
||||
ContentType: contentType,
|
||||
});
|
||||
|
||||
await s3Client.send(command);
|
||||
await client.send(command);
|
||||
|
||||
return { key, url: buildFileUrl(key) };
|
||||
}
|
||||
|
||||
export async function downloadAssignmentFile(key: string): Promise<Buffer> {
|
||||
const config = getConfig();
|
||||
const client = getClient();
|
||||
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: AUTOTEACHER_S3_BUCKET,
|
||||
Bucket: config.bucket,
|
||||
Key: key,
|
||||
});
|
||||
const response = await s3Client.send(command);
|
||||
const response = await client.send(command);
|
||||
const body = response.Body;
|
||||
if (!body) {
|
||||
throw new Error("无法读取存储的作业文件");
|
||||
|
||||
Reference in New Issue
Block a user