feat: add backup & restore task scheduler

This commit is contained in:
Khairul Hidayat 2024-05-11 02:09:18 +07:00
parent 68fbe42d6c
commit 093b0056fb
21 changed files with 951 additions and 70 deletions

View File

@ -9,10 +9,11 @@
"start": "bun dist/main.js",
"generate": "drizzle-kit generate",
"migrate": "bun src/db/migrate.ts",
"reset": "rm -f storage/database.db && bun run migrate"
"reset": "rm -rf storage && bun run migrate"
},
"devDependencies": {
"@types/bun": "latest",
"@types/node-schedule": "^2.1.7",
"drizzle-kit": "^0.21.0"
},
"peerDependencies": {
@ -23,6 +24,7 @@
"drizzle-orm": "^0.30.10",
"hono": "^4.3.4",
"nanoid": "^5.0.7",
"node-schedule": "^2.1.1",
"zod": "^3.23.8"
}
}

View File

@ -0,0 +1,44 @@
CREATE TABLE `backups` (
`id` text PRIMARY KEY NOT NULL,
`server_id` text NOT NULL,
`database_id` text NOT NULL,
`type` text DEFAULT 'backup',
`status` text DEFAULT 'pending',
`output` text,
`key` text,
`hash` text,
`size` integer,
`created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL,
FOREIGN KEY (`server_id`) REFERENCES `servers`(`id`) ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY (`database_id`) REFERENCES `databases`(`id`) ON UPDATE cascade ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `databases` (
`id` text PRIMARY KEY NOT NULL,
`server_id` text NOT NULL,
`name` text NOT NULL,
`is_active` integer DEFAULT true NOT NULL,
`last_backup_at` text,
`created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL,
FOREIGN KEY (`server_id`) REFERENCES `servers`(`id`) ON UPDATE cascade ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `servers` (
`id` text PRIMARY KEY NOT NULL,
`name` text NOT NULL,
`type` text NOT NULL,
`connection` text,
`ssh` text,
`is_active` integer DEFAULT true NOT NULL,
`created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL
);
--> statement-breakpoint
CREATE TABLE `users` (
`id` text PRIMARY KEY NOT NULL,
`username` text NOT NULL,
`password` text NOT NULL,
`is_active` integer DEFAULT true NOT NULL,
`created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL
);
--> statement-breakpoint
CREATE UNIQUE INDEX `users_username_unique` ON `users` (`username`);

View File

@ -0,0 +1,304 @@
{
"version": "6",
"dialect": "sqlite",
"id": "96dd8a39-5c64-4bb1-86de-7a81b83ed1db",
"prevId": "00000000-0000-0000-0000-000000000000",
"tables": {
"backups": {
"name": "backups",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"server_id": {
"name": "server_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"database_id": {
"name": "database_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"type": {
"name": "type",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "'backup'"
},
"status": {
"name": "status",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "'pending'"
},
"output": {
"name": "output",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"key": {
"name": "key",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"hash": {
"name": "hash",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"size": {
"name": "size",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP"
}
},
"indexes": {},
"foreignKeys": {
"backups_server_id_servers_id_fk": {
"name": "backups_server_id_servers_id_fk",
"tableFrom": "backups",
"tableTo": "servers",
"columnsFrom": [
"server_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "cascade"
},
"backups_database_id_databases_id_fk": {
"name": "backups_database_id_databases_id_fk",
"tableFrom": "backups",
"tableTo": "databases",
"columnsFrom": [
"database_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "cascade"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {}
},
"databases": {
"name": "databases",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"server_id": {
"name": "server_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"name": {
"name": "name",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"is_active": {
"name": "is_active",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": true
},
"last_backup_at": {
"name": "last_backup_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP"
}
},
"indexes": {},
"foreignKeys": {
"databases_server_id_servers_id_fk": {
"name": "databases_server_id_servers_id_fk",
"tableFrom": "databases",
"tableTo": "servers",
"columnsFrom": [
"server_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "cascade"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {}
},
"servers": {
"name": "servers",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"name": {
"name": "name",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"type": {
"name": "type",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"connection": {
"name": "connection",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"ssh": {
"name": "ssh",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"is_active": {
"name": "is_active",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": true
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP"
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {}
},
"users": {
"name": "users",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"username": {
"name": "username",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"password": {
"name": "password",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"is_active": {
"name": "is_active",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": true
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP"
}
},
"indexes": {
"users_username_unique": {
"name": "users_username_unique",
"columns": [
"username"
],
"isUnique": true
}
},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {}
}
},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
}
}

View File

@ -0,0 +1,13 @@
{
"version": "6",
"dialect": "sqlite",
"entries": [
{
"idx": 0,
"version": "6",
"when": 1715367813285,
"tag": "0000_square_agent_brand",
"breakpoints": true
}
]
}

View File

@ -1,6 +1,5 @@
import type { DatabaseConfig } from "@/types/database.types";
import { sql } from "drizzle-orm";
import { integer, sqliteTable, text } from "drizzle-orm/sqlite-core";
import { relations, sql, type InferSelectModel } from "drizzle-orm";
import { blob, integer, sqliteTable, text } from "drizzle-orm/sqlite-core";
import { nanoid } from "nanoid";
export const userModel = sqliteTable("users", {
@ -14,6 +13,7 @@ export const userModel = sqliteTable("users", {
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export type UserModel = InferSelectModel<typeof userModel>;
export const serverModel = sqliteTable("servers", {
id: text("id")
@ -28,6 +28,11 @@ export const serverModel = sqliteTable("servers", {
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export type ServerModel = InferSelectModel<typeof serverModel>;
export const serverRelations = relations(serverModel, ({ many }) => ({
databases: many(databaseModel),
}));
export const databaseModel = sqliteTable("databases", {
id: text("id")
@ -46,3 +51,58 @@ export const databaseModel = sqliteTable("databases", {
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export type DatabaseModel = InferSelectModel<typeof databaseModel>;
export const databaseRelations = relations(databaseModel, ({ one }) => ({
server: one(serverModel, {
fields: [databaseModel.serverId],
references: [serverModel.id],
}),
}));
export const backupTypeEnum = ["backup", "restore"] as const;
export const backupStatusEnum = [
"pending",
"running",
"success",
"failed",
] as const;
export const backupModel = sqliteTable("backups", {
id: text("id")
.primaryKey()
.$defaultFn(() => nanoid()),
serverId: text("server_id")
.references(() => serverModel.id, {
onUpdate: "cascade",
onDelete: "cascade",
})
.notNull(),
databaseId: text("database_id")
.references(() => databaseModel.id, {
onUpdate: "cascade",
onDelete: "cascade",
})
.notNull(),
type: text("type", { enum: backupTypeEnum }).default("backup"),
status: text("status", { enum: backupStatusEnum }).default("pending"),
output: text("output"),
key: text("key"),
hash: text("hash"),
size: integer("size"),
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export const backupRelations = relations(backupModel, ({ one }) => ({
server: one(serverModel, {
fields: [backupModel.serverId],
references: [serverModel.id],
}),
database: one(databaseModel, {
fields: [backupModel.databaseId],
references: [databaseModel.id],
}),
}));

View File

@ -1,9 +1,22 @@
import { databaseModel, serverModel, userModel } from "./models";
import {
backupModel,
backupRelations,
databaseModel,
databaseRelations,
serverModel,
serverRelations,
userModel,
} from "./models";
const schema = {
users: userModel,
servers: serverModel,
database: databaseModel,
backup: backupModel,
serverRelations,
databaseRelations,
backupRelations,
};
export default schema;

View File

@ -1,5 +1,8 @@
import routers from "./routers";
import { initScheduler } from "./schedulers";
console.log("Starting app..");
initScheduler();
export default routers;

View File

@ -0,0 +1,19 @@
import { type Context } from "hono";
import { HTTPException } from "hono/http-exception";
export const handleError = (err: Error, c: Context) => {
let statusCode: number = 400;
if (err instanceof HTTPException) {
statusCode = err.status;
}
return c.json(
{
success: false,
error: err,
message: err.message || "An error occured.",
},
statusCode as never
);
};

View File

@ -0,0 +1,29 @@
import {
createBackupSchema,
getAllBackupQuery,
restoreBackupSchema,
} from "@/schemas/backup.schema";
import BackupService from "@/services/backup.service";
import { zValidator } from "@hono/zod-validator";
import { Hono } from "hono";
const backupService = new BackupService();
const router = new Hono()
.get("/", zValidator("query", getAllBackupQuery), async (c) => {
const query = c.req.valid("query");
const result = await backupService.getAll(query);
return c.json(result);
})
.post("/", zValidator("json", createBackupSchema), async (c) => {
const body = c.req.valid("json");
return c.json(await backupService.create(body));
})
.post("/restore", zValidator("json", restoreBackupSchema), async (c) => {
const body = c.req.valid("json");
return c.json(await backupService.restore(body));
});
export default router;

View File

@ -1,17 +1,17 @@
import { Hono, type Context } from "hono";
import { Hono } from "hono";
import { handleError } from "@/middlewares/error-handler";
import server from "./server.router";
const handleError = (err: Error, c: Context) => {
return c.json({
success: false,
error: err,
message: err.message,
});
};
import backup from "./backup.router";
const routers = new Hono()
// Middlewares
.onError(handleError)
// App health check
.get("/health-check", (c) => c.text("OK"))
.route("/servers", server);
// Routes
.route("/servers", server)
.route("/backups", backup);
export default routers;

View File

@ -1,38 +1,56 @@
import { Hono } from "hono";
import { zValidator } from "@hono/zod-validator";
import { createServerSchema } from "@/schemas/server.schema";
import db from "@/db";
import { asc, eq } from "drizzle-orm";
import { checkServerSchema, createServerSchema } from "@/schemas/server.schema";
import { HTTPException } from "hono/http-exception";
import { serverModel } from "@/db/models";
import DatabaseUtil from "@/lib/database-util";
import ServerService from "@/services/server.service";
const serverService = new ServerService();
const router = new Hono()
.get("/", async (c) => {
const servers = await db.query.servers.findMany({
columns: { connection: false, ssh: false },
orderBy: asc(serverModel.createdAt),
});
return c.json(servers);
return c.json(await serverService.getAll());
})
.post("/", zValidator("json", createServerSchema), async (c) => {
const data = c.req.valid("json");
const isExist = await db.query.servers.findFirst({
where: eq(serverModel.name, data.name),
});
if (isExist) {
throw new HTTPException(400, { message: "Server name already exists" });
}
const dataValue = {
...data,
connection: data.connection ? JSON.stringify(data.connection) : null,
ssh: data.ssh ? JSON.stringify(data.ssh) : null,
};
const [result] = await db.insert(serverModel).values(dataValue).returning();
const result = await serverService.create(data);
return c.json(result);
})
.post("/check", zValidator("json", checkServerSchema), async (c) => {
const data = c.req.valid("json");
const db = new DatabaseUtil(data.connection);
try {
const databases = await db.getDatabases();
return c.json({ success: true, databases });
} catch (err) {
throw new HTTPException(400, {
message: "Cannot connect to the database.",
});
}
})
.get("/check/:id", async (c) => {
const { id } = c.req.param();
const server = await serverService.getOrFail(id);
const db = new DatabaseUtil(server.connection);
try {
const databases = await db.getDatabases();
return c.json({ success: true, databases });
} catch (err) {
throw new HTTPException(400, {
message: "Cannot connect to the database.",
});
}
})
.get("/:id", async (c) => {
const { id } = c.req.param();
const server = await serverService.getOrFail(id);
return c.json(server);
});
export default router;

View File

@ -0,0 +1,6 @@
import scheduler from "node-schedule";
import { processBackup } from "./process-backup";
export const initScheduler = () => {
scheduler.scheduleJob("*/10 * * * * *", processBackup);
};

View File

@ -0,0 +1,118 @@
import db from "@/db";
import fs from "fs";
import path from "path";
import { backupModel, databaseModel } from "@/db/models";
import DatabaseUtil from "@/lib/database-util";
import ServerService from "@/services/server.service";
import { and, asc, eq, sql } from "drizzle-orm";
import { BACKUP_DIR } from "@/consts";
import { mkdir } from "@/utility/utils";
import { hashFile } from "@/utility/hash";
let isRunning = false;
const serverService = new ServerService();
const runBackup = async (task: PendingTasks[number]) => {
try {
await db
.update(backupModel)
.set({ status: "running" })
.where(eq(backupModel.id, task.id));
const server = serverService.parse(task.server as never);
const dbName = task.database.name;
const dbUtil = new DatabaseUtil(server.connection);
if (task.type === "backup") {
const key = path.join(
server.connection.host,
dbName,
`${Date.now()}.tar`
);
const outFile = path.join(BACKUP_DIR, key);
mkdir(path.dirname(outFile));
// Run database dump command
const output = await dbUtil.dump(dbName, outFile);
// Get file stats and file checksum
const fileStats = fs.statSync(outFile);
const sha256Hash = await hashFile(outFile, "sha256");
await db.transaction(async (tx) => {
await tx
.update(backupModel)
.set({
status: "success",
output,
key,
hash: sha256Hash,
size: fileStats.size,
})
.where(eq(backupModel.id, task.id));
await tx
.update(databaseModel)
.set({ lastBackupAt: sql`CURRENT_TIMESTAMP` })
.where(eq(databaseModel.id, task.databaseId));
});
}
if (task.type === "restore") {
if (!task.key) {
throw new Error("Missing backup file key!");
}
const filePath = path.join(BACKUP_DIR, task.key);
if (!fs.existsSync(filePath)) {
throw new Error("Backup file not found!");
}
const sha256Hash = await hashFile(filePath, "sha256");
if (sha256Hash !== task.hash) {
throw new Error("Backup file hash mismatch!");
}
const output = await dbUtil.restore(filePath);
await db
.update(backupModel)
.set({ status: "success", output })
.where(eq(backupModel.id, task.id));
}
} catch (err) {
const output = (err as Error)?.message || "An error occured.";
await db
.update(backupModel)
.set({ status: "failed", output })
.where(eq(backupModel.id, task.id));
}
};
const getPendingTasks = async () => {
const queue = await db.query.backup.findMany({
where: (i) => and(eq(i.status, "pending")),
orderBy: (i) => asc(i.createdAt),
with: {
server: {
columns: { connection: true, ssh: true },
},
database: {
columns: { name: true },
},
},
});
return queue;
};
type PendingTasks = Awaited<ReturnType<typeof getPendingTasks>>;
export const processBackup = async () => {
if (isRunning) return;
isRunning = true;
const queue = await getPendingTasks();
const tasks = queue.map(runBackup);
await Promise.all(tasks);
isRunning = false;
};

View File

@ -0,0 +1,25 @@
import { z } from "zod";
export const getAllBackupQuery = z
.object({
page: z.coerce.number().int(),
limit: z.coerce.number().int(),
serverId: z.string().nanoid(),
databaseId: z.string().nanoid(),
})
.partial()
.optional();
export type GetAllBackupQuery = z.infer<typeof getAllBackupQuery>;
export const createBackupSchema = z.object({
databaseId: z.string().nanoid(),
});
export type CreateBackupSchema = z.infer<typeof createBackupSchema>;
export const restoreBackupSchema = z.object({
backupId: z.string().nanoid(),
});
export type RestoreBackupSchema = z.infer<typeof restoreBackupSchema>;

View File

@ -1,36 +1,39 @@
import { z } from "zod";
export const serverTypeEnum = ["postgres"] as const;
const sshSchema = z
.object({
host: z.string(),
port: z.number().optional(),
user: z.string(),
pass: z.string().optional(),
privateKey: z.string().optional(),
})
.optional()
.nullable();
export const serverSchema = z.object({
name: z.string().min(1),
ssh: z
.object({
host: z.string(),
port: z.number().optional(),
user: z.string(),
pass: z.string().optional(),
privateKey: z.string().optional(),
})
.optional()
.nullable(),
isActive: z.boolean().optional(),
const postgresSchema = z.object({
type: z.literal("postgres"),
host: z.string(),
port: z.number().optional(),
user: z.string(),
pass: z.string(),
});
const postgresSchema = serverSchema.merge(
z.object({
type: z.literal("postgres"),
connection: z.object({
host: z.string(),
port: z.number().optional(),
user: z.string(),
pass: z.string().optional(),
}),
})
);
export const connectionSchema = z.discriminatedUnion("type", [postgresSchema]);
export const createServerSchema = z.discriminatedUnion("type", [
postgresSchema,
]);
export const createServerSchema = z.object({
name: z.string().min(1),
ssh: sshSchema,
connection: connectionSchema,
isActive: z.boolean().optional(),
databases: z.string().array().min(1),
});
export type CreateServerSchema = z.infer<typeof createServerSchema>;
export const checkServerSchema = z.object({
ssh: sshSchema,
connection: connectionSchema,
});
export type CheckServerSchema = z.infer<typeof checkServerSchema>;

View File

@ -0,0 +1,104 @@
import db from "@/db";
import { backupModel, serverModel } from "@/db/models";
import type {
CreateBackupSchema,
GetAllBackupQuery,
RestoreBackupSchema,
} from "@/schemas/backup.schema";
import { and, desc, eq, inArray } from "drizzle-orm";
import DatabaseService from "./database.service";
import { HTTPException } from "hono/http-exception";
export default class BackupService {
private databaseService = new DatabaseService();
/**
* Get all backups
*/
async getAll(query: GetAllBackupQuery = {}) {
const { serverId, databaseId } = query;
const page = query.page || 1;
const limit = query.limit || 10;
const backups = await db.query.backup.findMany({
where: (i) =>
and(
serverId ? eq(i.serverId, serverId) : undefined,
databaseId ? eq(i.databaseId, databaseId) : undefined
),
orderBy: desc(serverModel.createdAt),
limit,
offset: (page - 1) * limit,
});
return backups;
}
async getOrFail(id: string) {
const backup = await db.query.backup.findFirst({
where: eq(backupModel.id, id),
});
if (!backup) {
throw new HTTPException(404, { message: "Backup not found." });
}
return backup;
}
/**
* Queue new backup
*/
async create(data: CreateBackupSchema) {
const database = await this.databaseService.getOrFail(data.databaseId);
await this.checkPendingBackup(database.id);
const [result] = await db
.insert(backupModel)
.values({
type: "backup",
serverId: database.serverId,
databaseId: database.id,
})
.returning();
return result;
}
async restore(data: RestoreBackupSchema) {
const backup = await this.getOrFail(data.backupId);
await this.checkPendingBackup(backup.databaseId);
if (!backup.key) {
throw new HTTPException(400, {
message: "Cannot restore backup without file key.",
});
}
const [result] = await db
.insert(backupModel)
.values({
type: "restore",
serverId: backup.serverId,
databaseId: backup.databaseId,
key: backup.key,
hash: backup.hash,
size: backup.size,
})
.returning();
return result;
}
async checkPendingBackup(databaseId: string) {
const hasOngoingBackup = await db.query.backup.findFirst({
where: and(
eq(backupModel.databaseId, databaseId),
inArray(backupModel.status, ["pending", "running"])
),
});
if (hasOngoingBackup) {
throw new HTTPException(400, {
message: "There is already an ongoing backup for this database",
});
}
}
}

View File

@ -0,0 +1,23 @@
import db from "@/db";
import { databaseModel } from "@/db/models";
import { desc, eq } from "drizzle-orm";
import { HTTPException } from "hono/http-exception";
export default class DatabaseService {
async getAll() {
const servers = await db.query.database.findMany({
orderBy: desc(databaseModel.createdAt),
});
return servers;
}
async getOrFail(id: string) {
const data = await db.query.database.findFirst({
where: eq(databaseModel.id, id),
});
if (!data) {
throw new HTTPException(404, { message: "Database not found." });
}
return data;
}
}

View File

@ -0,0 +1,85 @@
import db from "@/db";
import { databaseModel, serverModel, type ServerModel } from "@/db/models";
import type { CreateServerSchema } from "@/schemas/server.schema";
import { asc, desc, eq } from "drizzle-orm";
import { HTTPException } from "hono/http-exception";
export default class ServerService {
async getAll() {
const servers = await db.query.servers.findMany({
columns: { connection: false, ssh: false },
orderBy: asc(serverModel.createdAt),
with: {
databases: {
columns: { id: true, name: true, lastBackupAt: true },
orderBy: desc(databaseModel.createdAt),
},
},
});
return servers;
}
async getOrFail(id: string) {
const server = await db.query.servers.findFirst({
where: eq(serverModel.id, id),
});
if (!server) {
throw new HTTPException(404, { message: "Server not found." });
}
return this.parse(server);
}
async getById(id: string) {
const server = await db.query.servers.findFirst({
where: eq(serverModel.id, id),
with: {
databases: true,
},
});
return server;
}
async create(data: CreateServerSchema) {
return db.transaction(async (tx) => {
const isExist = await tx.query.servers.findFirst({
where: eq(serverModel.name, data.name),
});
if (isExist) {
throw new HTTPException(400, { message: "Server name already exists" });
}
const dataValue = {
...data,
type: data.connection.type,
connection: data.connection ? JSON.stringify(data.connection) : null,
ssh: data.ssh ? JSON.stringify(data.ssh) : null,
};
// Create server
const [result] = await tx
.insert(serverModel)
.values(dataValue)
.returning();
// Create databases
await tx.insert(databaseModel).values(
data.databases.map((i) => ({
serverId: result.id,
name: i,
}))
);
return data;
});
}
parse(data: ServerModel) {
const result = {
...data,
connection: data.connection ? JSON.parse(data.connection) : null,
ssh: data.ssh ? JSON.parse(data.ssh) : null,
};
return result;
}
}

View File

@ -0,0 +1,16 @@
import crypto from "crypto";
import fs from "fs";
export const hashFile = (
filePath: string,
algorithm: "md5" | "sha256"
): Promise<string> => {
return new Promise((resolve, reject) => {
const hash = crypto.createHash(algorithm);
const stream = fs.createReadStream(filePath);
stream.on("data", (data) => hash.update(data));
stream.on("end", () => resolve(hash.digest("hex")));
stream.on("error", (error) => reject(error));
});
};

BIN
bun.lockb

Binary file not shown.

View File

@ -8,10 +8,6 @@
},
"private": false,
"license": "MIT",
"workspaces": [
"backend",
"frontend"
],
"scripts": {
"dev": "concurrently \"cd backend && pnpm dev\" \"cd frontend && pnpm dev\""
},