diff --git a/backend/package.json b/backend/package.json index 701a105..66fb1b0 100644 --- a/backend/package.json +++ b/backend/package.json @@ -9,10 +9,11 @@ "start": "bun dist/main.js", "generate": "drizzle-kit generate", "migrate": "bun src/db/migrate.ts", - "reset": "rm -f storage/database.db && bun run migrate" + "reset": "rm -rf storage && bun run migrate" }, "devDependencies": { "@types/bun": "latest", + "@types/node-schedule": "^2.1.7", "drizzle-kit": "^0.21.0" }, "peerDependencies": { @@ -23,6 +24,7 @@ "drizzle-orm": "^0.30.10", "hono": "^4.3.4", "nanoid": "^5.0.7", + "node-schedule": "^2.1.1", "zod": "^3.23.8" } } diff --git a/backend/src/db/migrations/0000_square_agent_brand.sql b/backend/src/db/migrations/0000_square_agent_brand.sql new file mode 100644 index 0000000..c935800 --- /dev/null +++ b/backend/src/db/migrations/0000_square_agent_brand.sql @@ -0,0 +1,44 @@ +CREATE TABLE `backups` ( + `id` text PRIMARY KEY NOT NULL, + `server_id` text NOT NULL, + `database_id` text NOT NULL, + `type` text DEFAULT 'backup', + `status` text DEFAULT 'pending', + `output` text, + `key` text, + `hash` text, + `size` integer, + `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, + FOREIGN KEY (`server_id`) REFERENCES `servers`(`id`) ON UPDATE cascade ON DELETE cascade, + FOREIGN KEY (`database_id`) REFERENCES `databases`(`id`) ON UPDATE cascade ON DELETE cascade +); +--> statement-breakpoint +CREATE TABLE `databases` ( + `id` text PRIMARY KEY NOT NULL, + `server_id` text NOT NULL, + `name` text NOT NULL, + `is_active` integer DEFAULT true NOT NULL, + `last_backup_at` text, + `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, + FOREIGN KEY (`server_id`) REFERENCES `servers`(`id`) ON UPDATE cascade ON DELETE cascade +); +--> statement-breakpoint +CREATE TABLE `servers` ( + `id` text PRIMARY KEY NOT NULL, + `name` text NOT NULL, + `type` text NOT NULL, + `connection` text, + `ssh` text, + `is_active` integer DEFAULT true NOT NULL, + `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL +); +--> statement-breakpoint +CREATE TABLE `users` ( + `id` text PRIMARY KEY NOT NULL, + `username` text NOT NULL, + `password` text NOT NULL, + `is_active` integer DEFAULT true NOT NULL, + `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL +); +--> statement-breakpoint +CREATE UNIQUE INDEX `users_username_unique` ON `users` (`username`); \ No newline at end of file diff --git a/backend/src/db/migrations/meta/0000_snapshot.json b/backend/src/db/migrations/meta/0000_snapshot.json new file mode 100644 index 0000000..885ed06 --- /dev/null +++ b/backend/src/db/migrations/meta/0000_snapshot.json @@ -0,0 +1,304 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "96dd8a39-5c64-4bb1-86de-7a81b83ed1db", + "prevId": "00000000-0000-0000-0000-000000000000", + "tables": { + "backups": { + "name": "backups", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "server_id": { + "name": "server_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "database_id": { + "name": "database_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false, + "default": "'backup'" + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false, + "default": "'pending'" + }, + "output": { + "name": "output", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "key": { + "name": "key", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "hash": { + "name": "hash", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "size": { + "name": "size", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "CURRENT_TIMESTAMP" + } + }, + "indexes": {}, + "foreignKeys": { + "backups_server_id_servers_id_fk": { + "name": "backups_server_id_servers_id_fk", + "tableFrom": "backups", + "tableTo": "servers", + "columnsFrom": [ + "server_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "cascade" + }, + "backups_database_id_databases_id_fk": { + "name": "backups_database_id_databases_id_fk", + "tableFrom": "backups", + "tableTo": "databases", + "columnsFrom": [ + "database_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "cascade" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + }, + "databases": { + "name": "databases", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "server_id": { + "name": "server_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "is_active": { + "name": "is_active", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "last_backup_at": { + "name": "last_backup_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "CURRENT_TIMESTAMP" + } + }, + "indexes": {}, + "foreignKeys": { + "databases_server_id_servers_id_fk": { + "name": "databases_server_id_servers_id_fk", + "tableFrom": "databases", + "tableTo": "servers", + "columnsFrom": [ + "server_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "cascade" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + }, + "servers": { + "name": "servers", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "connection": { + "name": "connection", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "ssh": { + "name": "ssh", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "is_active": { + "name": "is_active", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "CURRENT_TIMESTAMP" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + }, + "users": { + "name": "users", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "username": { + "name": "username", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "password": { + "name": "password", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "is_active": { + "name": "is_active", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "CURRENT_TIMESTAMP" + } + }, + "indexes": { + "users_username_unique": { + "name": "users_username_unique", + "columns": [ + "username" + ], + "isUnique": true + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + } + }, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + } +} \ No newline at end of file diff --git a/backend/src/db/migrations/meta/_journal.json b/backend/src/db/migrations/meta/_journal.json new file mode 100644 index 0000000..7d2711a --- /dev/null +++ b/backend/src/db/migrations/meta/_journal.json @@ -0,0 +1,13 @@ +{ + "version": "6", + "dialect": "sqlite", + "entries": [ + { + "idx": 0, + "version": "6", + "when": 1715367813285, + "tag": "0000_square_agent_brand", + "breakpoints": true + } + ] +} \ No newline at end of file diff --git a/backend/src/db/models.ts b/backend/src/db/models.ts index db246d7..fd95e4b 100644 --- a/backend/src/db/models.ts +++ b/backend/src/db/models.ts @@ -1,6 +1,5 @@ -import type { DatabaseConfig } from "@/types/database.types"; -import { sql } from "drizzle-orm"; -import { integer, sqliteTable, text } from "drizzle-orm/sqlite-core"; +import { relations, sql, type InferSelectModel } from "drizzle-orm"; +import { blob, integer, sqliteTable, text } from "drizzle-orm/sqlite-core"; import { nanoid } from "nanoid"; export const userModel = sqliteTable("users", { @@ -14,6 +13,7 @@ export const userModel = sqliteTable("users", { .notNull() .default(sql`CURRENT_TIMESTAMP`), }); +export type UserModel = InferSelectModel; export const serverModel = sqliteTable("servers", { id: text("id") @@ -28,6 +28,11 @@ export const serverModel = sqliteTable("servers", { .notNull() .default(sql`CURRENT_TIMESTAMP`), }); +export type ServerModel = InferSelectModel; + +export const serverRelations = relations(serverModel, ({ many }) => ({ + databases: many(databaseModel), +})); export const databaseModel = sqliteTable("databases", { id: text("id") @@ -46,3 +51,58 @@ export const databaseModel = sqliteTable("databases", { .notNull() .default(sql`CURRENT_TIMESTAMP`), }); +export type DatabaseModel = InferSelectModel; + +export const databaseRelations = relations(databaseModel, ({ one }) => ({ + server: one(serverModel, { + fields: [databaseModel.serverId], + references: [serverModel.id], + }), +})); + +export const backupTypeEnum = ["backup", "restore"] as const; + +export const backupStatusEnum = [ + "pending", + "running", + "success", + "failed", +] as const; + +export const backupModel = sqliteTable("backups", { + id: text("id") + .primaryKey() + .$defaultFn(() => nanoid()), + serverId: text("server_id") + .references(() => serverModel.id, { + onUpdate: "cascade", + onDelete: "cascade", + }) + .notNull(), + databaseId: text("database_id") + .references(() => databaseModel.id, { + onUpdate: "cascade", + onDelete: "cascade", + }) + .notNull(), + type: text("type", { enum: backupTypeEnum }).default("backup"), + status: text("status", { enum: backupStatusEnum }).default("pending"), + output: text("output"), + key: text("key"), + hash: text("hash"), + size: integer("size"), + createdAt: text("created_at") + .notNull() + .default(sql`CURRENT_TIMESTAMP`), +}); + +export const backupRelations = relations(backupModel, ({ one }) => ({ + server: one(serverModel, { + fields: [backupModel.serverId], + references: [serverModel.id], + }), + database: one(databaseModel, { + fields: [backupModel.databaseId], + references: [databaseModel.id], + }), +})); diff --git a/backend/src/db/schema.ts b/backend/src/db/schema.ts index 934c779..d2c9d37 100644 --- a/backend/src/db/schema.ts +++ b/backend/src/db/schema.ts @@ -1,9 +1,22 @@ -import { databaseModel, serverModel, userModel } from "./models"; +import { + backupModel, + backupRelations, + databaseModel, + databaseRelations, + serverModel, + serverRelations, + userModel, +} from "./models"; const schema = { users: userModel, servers: serverModel, database: databaseModel, + backup: backupModel, + + serverRelations, + databaseRelations, + backupRelations, }; export default schema; diff --git a/backend/src/main.ts b/backend/src/main.ts index 25c5d17..d7e905f 100644 --- a/backend/src/main.ts +++ b/backend/src/main.ts @@ -1,5 +1,8 @@ import routers from "./routers"; +import { initScheduler } from "./schedulers"; console.log("Starting app.."); +initScheduler(); + export default routers; diff --git a/backend/src/middlewares/error-handler.ts b/backend/src/middlewares/error-handler.ts new file mode 100644 index 0000000..88c4928 --- /dev/null +++ b/backend/src/middlewares/error-handler.ts @@ -0,0 +1,19 @@ +import { type Context } from "hono"; +import { HTTPException } from "hono/http-exception"; + +export const handleError = (err: Error, c: Context) => { + let statusCode: number = 400; + + if (err instanceof HTTPException) { + statusCode = err.status; + } + + return c.json( + { + success: false, + error: err, + message: err.message || "An error occured.", + }, + statusCode as never + ); +}; diff --git a/backend/src/routers/backup.router.ts b/backend/src/routers/backup.router.ts new file mode 100644 index 0000000..1adab5c --- /dev/null +++ b/backend/src/routers/backup.router.ts @@ -0,0 +1,29 @@ +import { + createBackupSchema, + getAllBackupQuery, + restoreBackupSchema, +} from "@/schemas/backup.schema"; +import BackupService from "@/services/backup.service"; +import { zValidator } from "@hono/zod-validator"; +import { Hono } from "hono"; + +const backupService = new BackupService(); +const router = new Hono() + + .get("/", zValidator("query", getAllBackupQuery), async (c) => { + const query = c.req.valid("query"); + const result = await backupService.getAll(query); + return c.json(result); + }) + + .post("/", zValidator("json", createBackupSchema), async (c) => { + const body = c.req.valid("json"); + return c.json(await backupService.create(body)); + }) + + .post("/restore", zValidator("json", restoreBackupSchema), async (c) => { + const body = c.req.valid("json"); + return c.json(await backupService.restore(body)); + }); + +export default router; diff --git a/backend/src/routers/index.ts b/backend/src/routers/index.ts index df5cecf..40cb87c 100644 --- a/backend/src/routers/index.ts +++ b/backend/src/routers/index.ts @@ -1,17 +1,17 @@ -import { Hono, type Context } from "hono"; +import { Hono } from "hono"; +import { handleError } from "@/middlewares/error-handler"; import server from "./server.router"; - -const handleError = (err: Error, c: Context) => { - return c.json({ - success: false, - error: err, - message: err.message, - }); -}; +import backup from "./backup.router"; const routers = new Hono() + // Middlewares .onError(handleError) + + // App health check .get("/health-check", (c) => c.text("OK")) - .route("/servers", server); + + // Routes + .route("/servers", server) + .route("/backups", backup); export default routers; diff --git a/backend/src/routers/server.router.ts b/backend/src/routers/server.router.ts index e9de634..9901452 100644 --- a/backend/src/routers/server.router.ts +++ b/backend/src/routers/server.router.ts @@ -1,38 +1,56 @@ import { Hono } from "hono"; import { zValidator } from "@hono/zod-validator"; -import { createServerSchema } from "@/schemas/server.schema"; -import db from "@/db"; -import { asc, eq } from "drizzle-orm"; +import { checkServerSchema, createServerSchema } from "@/schemas/server.schema"; import { HTTPException } from "hono/http-exception"; -import { serverModel } from "@/db/models"; +import DatabaseUtil from "@/lib/database-util"; +import ServerService from "@/services/server.service"; +const serverService = new ServerService(); const router = new Hono() .get("/", async (c) => { - const servers = await db.query.servers.findMany({ - columns: { connection: false, ssh: false }, - orderBy: asc(serverModel.createdAt), - }); - return c.json(servers); + return c.json(await serverService.getAll()); }) .post("/", zValidator("json", createServerSchema), async (c) => { const data = c.req.valid("json"); - const isExist = await db.query.servers.findFirst({ - where: eq(serverModel.name, data.name), - }); - if (isExist) { - throw new HTTPException(400, { message: "Server name already exists" }); - } - - const dataValue = { - ...data, - connection: data.connection ? JSON.stringify(data.connection) : null, - ssh: data.ssh ? JSON.stringify(data.ssh) : null, - }; - const [result] = await db.insert(serverModel).values(dataValue).returning(); - + const result = await serverService.create(data); return c.json(result); + }) + + .post("/check", zValidator("json", checkServerSchema), async (c) => { + const data = c.req.valid("json"); + const db = new DatabaseUtil(data.connection); + + try { + const databases = await db.getDatabases(); + return c.json({ success: true, databases }); + } catch (err) { + throw new HTTPException(400, { + message: "Cannot connect to the database.", + }); + } + }) + + .get("/check/:id", async (c) => { + const { id } = c.req.param(); + const server = await serverService.getOrFail(id); + const db = new DatabaseUtil(server.connection); + + try { + const databases = await db.getDatabases(); + return c.json({ success: true, databases }); + } catch (err) { + throw new HTTPException(400, { + message: "Cannot connect to the database.", + }); + } + }) + + .get("/:id", async (c) => { + const { id } = c.req.param(); + const server = await serverService.getOrFail(id); + return c.json(server); }); export default router; diff --git a/backend/src/schedulers/index.ts b/backend/src/schedulers/index.ts new file mode 100644 index 0000000..abd646b --- /dev/null +++ b/backend/src/schedulers/index.ts @@ -0,0 +1,6 @@ +import scheduler from "node-schedule"; +import { processBackup } from "./process-backup"; + +export const initScheduler = () => { + scheduler.scheduleJob("*/10 * * * * *", processBackup); +}; diff --git a/backend/src/schedulers/process-backup.ts b/backend/src/schedulers/process-backup.ts new file mode 100644 index 0000000..ad11c9b --- /dev/null +++ b/backend/src/schedulers/process-backup.ts @@ -0,0 +1,118 @@ +import db from "@/db"; +import fs from "fs"; +import path from "path"; +import { backupModel, databaseModel } from "@/db/models"; +import DatabaseUtil from "@/lib/database-util"; +import ServerService from "@/services/server.service"; +import { and, asc, eq, sql } from "drizzle-orm"; +import { BACKUP_DIR } from "@/consts"; +import { mkdir } from "@/utility/utils"; +import { hashFile } from "@/utility/hash"; + +let isRunning = false; +const serverService = new ServerService(); + +const runBackup = async (task: PendingTasks[number]) => { + try { + await db + .update(backupModel) + .set({ status: "running" }) + .where(eq(backupModel.id, task.id)); + + const server = serverService.parse(task.server as never); + const dbName = task.database.name; + const dbUtil = new DatabaseUtil(server.connection); + + if (task.type === "backup") { + const key = path.join( + server.connection.host, + dbName, + `${Date.now()}.tar` + ); + const outFile = path.join(BACKUP_DIR, key); + mkdir(path.dirname(outFile)); + + // Run database dump command + const output = await dbUtil.dump(dbName, outFile); + + // Get file stats and file checksum + const fileStats = fs.statSync(outFile); + const sha256Hash = await hashFile(outFile, "sha256"); + + await db.transaction(async (tx) => { + await tx + .update(backupModel) + .set({ + status: "success", + output, + key, + hash: sha256Hash, + size: fileStats.size, + }) + .where(eq(backupModel.id, task.id)); + + await tx + .update(databaseModel) + .set({ lastBackupAt: sql`CURRENT_TIMESTAMP` }) + .where(eq(databaseModel.id, task.databaseId)); + }); + } + + if (task.type === "restore") { + if (!task.key) { + throw new Error("Missing backup file key!"); + } + + const filePath = path.join(BACKUP_DIR, task.key); + if (!fs.existsSync(filePath)) { + throw new Error("Backup file not found!"); + } + + const sha256Hash = await hashFile(filePath, "sha256"); + if (sha256Hash !== task.hash) { + throw new Error("Backup file hash mismatch!"); + } + + const output = await dbUtil.restore(filePath); + await db + .update(backupModel) + .set({ status: "success", output }) + .where(eq(backupModel.id, task.id)); + } + } catch (err) { + const output = (err as Error)?.message || "An error occured."; + await db + .update(backupModel) + .set({ status: "failed", output }) + .where(eq(backupModel.id, task.id)); + } +}; + +const getPendingTasks = async () => { + const queue = await db.query.backup.findMany({ + where: (i) => and(eq(i.status, "pending")), + orderBy: (i) => asc(i.createdAt), + with: { + server: { + columns: { connection: true, ssh: true }, + }, + database: { + columns: { name: true }, + }, + }, + }); + + return queue; +}; + +type PendingTasks = Awaited>; + +export const processBackup = async () => { + if (isRunning) return; + + isRunning = true; + const queue = await getPendingTasks(); + const tasks = queue.map(runBackup); + await Promise.all(tasks); + isRunning = false; +}; diff --git a/backend/src/schemas/backup.schema.ts b/backend/src/schemas/backup.schema.ts new file mode 100644 index 0000000..b4a31e2 --- /dev/null +++ b/backend/src/schemas/backup.schema.ts @@ -0,0 +1,25 @@ +import { z } from "zod"; + +export const getAllBackupQuery = z + .object({ + page: z.coerce.number().int(), + limit: z.coerce.number().int(), + serverId: z.string().nanoid(), + databaseId: z.string().nanoid(), + }) + .partial() + .optional(); + +export type GetAllBackupQuery = z.infer; + +export const createBackupSchema = z.object({ + databaseId: z.string().nanoid(), +}); + +export type CreateBackupSchema = z.infer; + +export const restoreBackupSchema = z.object({ + backupId: z.string().nanoid(), +}); + +export type RestoreBackupSchema = z.infer; diff --git a/backend/src/schemas/server.schema.ts b/backend/src/schemas/server.schema.ts index e79b077..190e2ce 100644 --- a/backend/src/schemas/server.schema.ts +++ b/backend/src/schemas/server.schema.ts @@ -1,36 +1,39 @@ import { z } from "zod"; -export const serverTypeEnum = ["postgres"] as const; +const sshSchema = z + .object({ + host: z.string(), + port: z.number().optional(), + user: z.string(), + pass: z.string().optional(), + privateKey: z.string().optional(), + }) + .optional() + .nullable(); -export const serverSchema = z.object({ - name: z.string().min(1), - ssh: z - .object({ - host: z.string(), - port: z.number().optional(), - user: z.string(), - pass: z.string().optional(), - privateKey: z.string().optional(), - }) - .optional() - .nullable(), - isActive: z.boolean().optional(), +const postgresSchema = z.object({ + type: z.literal("postgres"), + host: z.string(), + port: z.number().optional(), + user: z.string(), + pass: z.string(), }); -const postgresSchema = serverSchema.merge( - z.object({ - type: z.literal("postgres"), - connection: z.object({ - host: z.string(), - port: z.number().optional(), - user: z.string(), - pass: z.string().optional(), - }), - }) -); +export const connectionSchema = z.discriminatedUnion("type", [postgresSchema]); -export const createServerSchema = z.discriminatedUnion("type", [ - postgresSchema, -]); +export const createServerSchema = z.object({ + name: z.string().min(1), + ssh: sshSchema, + connection: connectionSchema, + isActive: z.boolean().optional(), + databases: z.string().array().min(1), +}); export type CreateServerSchema = z.infer; + +export const checkServerSchema = z.object({ + ssh: sshSchema, + connection: connectionSchema, +}); + +export type CheckServerSchema = z.infer; diff --git a/backend/src/services/backup.service.ts b/backend/src/services/backup.service.ts new file mode 100644 index 0000000..75d1365 --- /dev/null +++ b/backend/src/services/backup.service.ts @@ -0,0 +1,104 @@ +import db from "@/db"; +import { backupModel, serverModel } from "@/db/models"; +import type { + CreateBackupSchema, + GetAllBackupQuery, + RestoreBackupSchema, +} from "@/schemas/backup.schema"; +import { and, desc, eq, inArray } from "drizzle-orm"; +import DatabaseService from "./database.service"; +import { HTTPException } from "hono/http-exception"; + +export default class BackupService { + private databaseService = new DatabaseService(); + + /** + * Get all backups + */ + async getAll(query: GetAllBackupQuery = {}) { + const { serverId, databaseId } = query; + const page = query.page || 1; + const limit = query.limit || 10; + + const backups = await db.query.backup.findMany({ + where: (i) => + and( + serverId ? eq(i.serverId, serverId) : undefined, + databaseId ? eq(i.databaseId, databaseId) : undefined + ), + orderBy: desc(serverModel.createdAt), + limit, + offset: (page - 1) * limit, + }); + + return backups; + } + + async getOrFail(id: string) { + const backup = await db.query.backup.findFirst({ + where: eq(backupModel.id, id), + }); + if (!backup) { + throw new HTTPException(404, { message: "Backup not found." }); + } + return backup; + } + + /** + * Queue new backup + */ + async create(data: CreateBackupSchema) { + const database = await this.databaseService.getOrFail(data.databaseId); + await this.checkPendingBackup(database.id); + + const [result] = await db + .insert(backupModel) + .values({ + type: "backup", + serverId: database.serverId, + databaseId: database.id, + }) + .returning(); + + return result; + } + + async restore(data: RestoreBackupSchema) { + const backup = await this.getOrFail(data.backupId); + await this.checkPendingBackup(backup.databaseId); + + if (!backup.key) { + throw new HTTPException(400, { + message: "Cannot restore backup without file key.", + }); + } + + const [result] = await db + .insert(backupModel) + .values({ + type: "restore", + serverId: backup.serverId, + databaseId: backup.databaseId, + key: backup.key, + hash: backup.hash, + size: backup.size, + }) + .returning(); + + return result; + } + + async checkPendingBackup(databaseId: string) { + const hasOngoingBackup = await db.query.backup.findFirst({ + where: and( + eq(backupModel.databaseId, databaseId), + inArray(backupModel.status, ["pending", "running"]) + ), + }); + if (hasOngoingBackup) { + throw new HTTPException(400, { + message: "There is already an ongoing backup for this database", + }); + } + } +} diff --git a/backend/src/services/database.service.ts b/backend/src/services/database.service.ts new file mode 100644 index 0000000..ba8dfe4 --- /dev/null +++ b/backend/src/services/database.service.ts @@ -0,0 +1,23 @@ +import db from "@/db"; +import { databaseModel } from "@/db/models"; +import { desc, eq } from "drizzle-orm"; +import { HTTPException } from "hono/http-exception"; + +export default class DatabaseService { + async getAll() { + const servers = await db.query.database.findMany({ + orderBy: desc(databaseModel.createdAt), + }); + return servers; + } + + async getOrFail(id: string) { + const data = await db.query.database.findFirst({ + where: eq(databaseModel.id, id), + }); + if (!data) { + throw new HTTPException(404, { message: "Database not found." }); + } + return data; + } +} diff --git a/backend/src/services/server.service.ts b/backend/src/services/server.service.ts new file mode 100644 index 0000000..2a9408b --- /dev/null +++ b/backend/src/services/server.service.ts @@ -0,0 +1,85 @@ +import db from "@/db"; +import { databaseModel, serverModel, type ServerModel } from "@/db/models"; +import type { CreateServerSchema } from "@/schemas/server.schema"; +import { asc, desc, eq } from "drizzle-orm"; +import { HTTPException } from "hono/http-exception"; + +export default class ServerService { + async getAll() { + const servers = await db.query.servers.findMany({ + columns: { connection: false, ssh: false }, + orderBy: asc(serverModel.createdAt), + with: { + databases: { + columns: { id: true, name: true, lastBackupAt: true }, + orderBy: desc(databaseModel.createdAt), + }, + }, + }); + return servers; + } + + async getOrFail(id: string) { + const server = await db.query.servers.findFirst({ + where: eq(serverModel.id, id), + }); + if (!server) { + throw new HTTPException(404, { message: "Server not found." }); + } + return this.parse(server); + } + + async getById(id: string) { + const server = await db.query.servers.findFirst({ + where: eq(serverModel.id, id), + with: { + databases: true, + }, + }); + return server; + } + + async create(data: CreateServerSchema) { + return db.transaction(async (tx) => { + const isExist = await tx.query.servers.findFirst({ + where: eq(serverModel.name, data.name), + }); + if (isExist) { + throw new HTTPException(400, { message: "Server name already exists" }); + } + + const dataValue = { + ...data, + type: data.connection.type, + connection: data.connection ? JSON.stringify(data.connection) : null, + ssh: data.ssh ? JSON.stringify(data.ssh) : null, + }; + + // Create server + const [result] = await tx + .insert(serverModel) + .values(dataValue) + .returning(); + + // Create databases + await tx.insert(databaseModel).values( + data.databases.map((i) => ({ + serverId: result.id, + name: i, + })) + ); + + return data; + }); + } + + parse(data: ServerModel) { + const result = { + ...data, + connection: data.connection ? JSON.parse(data.connection) : null, + ssh: data.ssh ? JSON.parse(data.ssh) : null, + }; + + return result; + } +} diff --git a/backend/src/utility/hash.ts b/backend/src/utility/hash.ts new file mode 100644 index 0000000..9a4918e --- /dev/null +++ b/backend/src/utility/hash.ts @@ -0,0 +1,16 @@ +import crypto from "crypto"; +import fs from "fs"; + +export const hashFile = ( + filePath: string, + algorithm: "md5" | "sha256" +): Promise => { + return new Promise((resolve, reject) => { + const hash = crypto.createHash(algorithm); + const stream = fs.createReadStream(filePath); + + stream.on("data", (data) => hash.update(data)); + stream.on("end", () => resolve(hash.digest("hex"))); + stream.on("error", (error) => reject(error)); + }); +}; diff --git a/bun.lockb b/bun.lockb index 270fe91..b1f7310 100755 Binary files a/bun.lockb and b/bun.lockb differ diff --git a/package.json b/package.json index 3934e0c..5448f93 100644 --- a/package.json +++ b/package.json @@ -8,10 +8,6 @@ }, "private": false, "license": "MIT", - "workspaces": [ - "backend", - "frontend" - ], "scripts": { "dev": "concurrently \"cd backend && pnpm dev\" \"cd frontend && pnpm dev\"" },