mirror of
https://github.com/khairul169/db-backup-tool.git
synced 2025-04-29 17:19:35 +07:00
feat: init db, add rest api
This commit is contained in:
parent
387fa38e65
commit
68fbe42d6c
0
backend/.env.example
Normal file
0
backend/.env.example
Normal file
1
backend/.gitignore
vendored
1
backend/.gitignore
vendored
@ -3,3 +3,4 @@ node_modules/
|
|||||||
storage/
|
storage/
|
||||||
package-lock.json
|
package-lock.json
|
||||||
bun.lockb
|
bun.lockb
|
||||||
|
.env
|
||||||
|
@ -1,24 +1,11 @@
|
|||||||
FROM alpine:3.19.0
|
FROM oven/bun:alpine
|
||||||
|
|
||||||
ENV GLIBC_VERSION 2.34-r0
|
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install bun
|
COPY ["package.json", "bun.lockb", "./"]
|
||||||
ADD https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip bun-linux-x64.zip
|
RUN bun install
|
||||||
RUN apk add --no-cache --update unzip curl && \
|
|
||||||
curl -Lo /etc/apk/keys/sgerrand.rsa.pub https://alpine-pkgs.sgerrand.com/sgerrand.rsa.pub && \
|
|
||||||
curl -Lo glibc.apk "https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-${GLIBC_VERSION}.apk" && \
|
|
||||||
curl -Lo glibc-bin.apk "https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-bin-${GLIBC_VERSION}.apk" && \
|
|
||||||
apk add --force-overwrite glibc-bin.apk glibc.apk && \
|
|
||||||
/usr/glibc-compat/sbin/ldconfig /lib /usr/glibc-compat/lib && \
|
|
||||||
echo 'hosts: files mdns4_minimal [NOTFOUND=return] dns mdns4' >> /etc/nsswitch.conf && \
|
|
||||||
apk del curl && \
|
|
||||||
rm -rf /var/cache/apk/* glibc.apk glibc-bin.apk
|
|
||||||
|
|
||||||
RUN unzip bun-linux-x64.zip && chmod +x ./bun-linux-x64/bun && mv ./bun-linux-x64/bun /usr/bin && rm -f bun-linux-x64.zip
|
|
||||||
|
|
||||||
# Add db clients
|
# Add db clients
|
||||||
RUN apk --no-cache add postgresql16-client
|
RUN apk --no-cache --repository=http://dl-cdn.alpinelinux.org/alpine/edge/main add postgresql16-client
|
||||||
|
|
||||||
ENTRYPOINT ["bun", "run", "dev"]
|
ENTRYPOINT ["bun", "run", "dev"]
|
||||||
|
@ -1,28 +0,0 @@
|
|||||||
FROM alpine:3.19
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
ENV GLIBC_VERSION 2.35-r1
|
|
||||||
|
|
||||||
RUN apk update && \
|
|
||||||
apk add --no-cache --update unzip curl
|
|
||||||
# curl -Lo /etc/apk/keys/sgerrand.rsa.pub https://alpine-pkgs.sgerrand.com/sgerrand.rsa.pub && \
|
|
||||||
# curl -Lo glibc.apk "https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-${GLIBC_VERSION}.apk" && \
|
|
||||||
# curl -Lo glibc-bin.apk "https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-bin-${GLIBC_VERSION}.apk" && \
|
|
||||||
# apk add --force-overwrite glibc-bin.apk glibc.apk && \
|
|
||||||
# /usr/glibc-compat/sbin/ldconfig /lib /usr/glibc-compat/lib && \
|
|
||||||
# echo 'hosts: files mdns4_minimal [NOTFOUND=return] dns mdns4' >> /etc/nsswitch.conf && \
|
|
||||||
# apk del curl && \
|
|
||||||
# rm -rf /var/cache/apk/* glibc.apk glibc-bin.apk
|
|
||||||
|
|
||||||
ADD https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip bun-linux-x64.zip
|
|
||||||
# RUN unzip bun-linux-x64.zip && chmod +x ./bun-linux-x64/bun && mv ./bun-linux-x64/bun /usr/local/bin && rm -rf bun-linux-x64.zip
|
|
||||||
RUN unzip bun-linux-x64.zip && ls bun-linux-x64 && ./bun-linux-x64/bun --version
|
|
||||||
|
|
||||||
RUN chmod +x /usr/local/bin/bun
|
|
||||||
RUN /usr/local/bin/bun --version
|
|
||||||
|
|
||||||
# CMD ["bun", "--version"]
|
|
||||||
|
|
||||||
# RUN apk --no-cache add postgresql16-client
|
|
||||||
|
|
||||||
# ENTRYPOINT ["bun", "run", "dev"]
|
|
@ -10,3 +10,5 @@ services:
|
|||||||
- ./:/app:rw
|
- ./:/app:rw
|
||||||
extra_hosts:
|
extra_hosts:
|
||||||
- "host.docker.internal:host-gateway"
|
- "host.docker.internal:host-gateway"
|
||||||
|
ports:
|
||||||
|
- "3000:3000"
|
||||||
|
11
backend/drizzle.config.ts
Normal file
11
backend/drizzle.config.ts
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
import { STORAGE_DIR } from "@/consts";
|
||||||
|
import { defineConfig } from "drizzle-kit";
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
dialect: "sqlite",
|
||||||
|
dbCredentials: {
|
||||||
|
url: STORAGE_DIR + "/database.db",
|
||||||
|
},
|
||||||
|
schema: "./src/db/models.ts",
|
||||||
|
out: "./src/db/migrations",
|
||||||
|
});
|
@ -3,15 +3,26 @@
|
|||||||
"module": "index.ts",
|
"module": "index.ts",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "bun --watch index.ts",
|
"dev": "bun --watch src/main.ts",
|
||||||
"dev:compose": "docker compose -f docker-compose.dev.yml up --build",
|
"dev:compose": "cp ../bun.lockb . && docker compose -f docker-compose.dev.yml up --build",
|
||||||
"build": "bun build index.ts --outdir dist --target bun",
|
"build": "bun build index.ts --outdir dist --target bun",
|
||||||
"start": "bun dist/index.js"
|
"start": "bun dist/main.js",
|
||||||
|
"generate": "drizzle-kit generate",
|
||||||
|
"migrate": "bun src/db/migrate.ts",
|
||||||
|
"reset": "rm -f storage/database.db && bun run migrate"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/bun": "latest"
|
"@types/bun": "latest",
|
||||||
|
"drizzle-kit": "^0.21.0"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"typescript": "^5.0.0"
|
"typescript": "^5.0.0"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@hono/zod-validator": "^0.2.1",
|
||||||
|
"drizzle-orm": "^0.30.10",
|
||||||
|
"hono": "^4.3.4",
|
||||||
|
"nanoid": "^5.0.7",
|
||||||
|
"zod": "^3.23.8"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
6
backend/src/consts.ts
Normal file
6
backend/src/consts.ts
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
import path from "path";
|
||||||
|
|
||||||
|
export const DOCKER_HOST = "host.docker.internal";
|
||||||
|
export const STORAGE_DIR = path.resolve(__dirname, "../storage");
|
||||||
|
export const BACKUP_DIR = STORAGE_DIR + "/backups";
|
||||||
|
export const DATABASE_PATH = path.join(STORAGE_DIR, "database.db");
|
16
backend/src/db/index.ts
Normal file
16
backend/src/db/index.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import path from "path";
|
||||||
|
import { drizzle } from "drizzle-orm/bun-sqlite";
|
||||||
|
import { Database } from "bun:sqlite";
|
||||||
|
import { DATABASE_PATH } from "@/consts";
|
||||||
|
import { mkdir } from "@/utility/utils";
|
||||||
|
import schema from "./schema";
|
||||||
|
|
||||||
|
// Create database directory if not exists
|
||||||
|
mkdir(path.dirname(DATABASE_PATH));
|
||||||
|
|
||||||
|
// Initialize database
|
||||||
|
const sqlite = new Database(DATABASE_PATH);
|
||||||
|
const db = drizzle(sqlite, { schema });
|
||||||
|
|
||||||
|
export { sqlite };
|
||||||
|
export default db;
|
17
backend/src/db/migrate.ts
Normal file
17
backend/src/db/migrate.ts
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
import fs from "fs";
|
||||||
|
import { migrate } from "drizzle-orm/bun-sqlite/migrator";
|
||||||
|
import { DATABASE_PATH } from "@/consts";
|
||||||
|
import db, { sqlite } from ".";
|
||||||
|
import { seed } from "./seed";
|
||||||
|
|
||||||
|
const initializeData = fs.existsSync(DATABASE_PATH);
|
||||||
|
|
||||||
|
await migrate(db, {
|
||||||
|
migrationsFolder: __dirname + "/migrations",
|
||||||
|
});
|
||||||
|
|
||||||
|
if (initializeData) {
|
||||||
|
await seed();
|
||||||
|
}
|
||||||
|
|
||||||
|
await sqlite.close();
|
48
backend/src/db/models.ts
Normal file
48
backend/src/db/models.ts
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
import type { DatabaseConfig } from "@/types/database.types";
|
||||||
|
import { sql } from "drizzle-orm";
|
||||||
|
import { integer, sqliteTable, text } from "drizzle-orm/sqlite-core";
|
||||||
|
import { nanoid } from "nanoid";
|
||||||
|
|
||||||
|
export const userModel = sqliteTable("users", {
|
||||||
|
id: text("id")
|
||||||
|
.primaryKey()
|
||||||
|
.$defaultFn(() => nanoid()),
|
||||||
|
username: text("username").notNull().unique(),
|
||||||
|
password: text("password").notNull(),
|
||||||
|
isActive: integer("is_active", { mode: "boolean" }).notNull().default(true),
|
||||||
|
createdAt: text("created_at")
|
||||||
|
.notNull()
|
||||||
|
.default(sql`CURRENT_TIMESTAMP`),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const serverModel = sqliteTable("servers", {
|
||||||
|
id: text("id")
|
||||||
|
.primaryKey()
|
||||||
|
.$defaultFn(() => nanoid()),
|
||||||
|
name: text("name").notNull(),
|
||||||
|
type: text("type", { enum: ["postgres"] }).notNull(),
|
||||||
|
connection: text("connection"),
|
||||||
|
ssh: text("ssh"),
|
||||||
|
isActive: integer("is_active", { mode: "boolean" }).notNull().default(true),
|
||||||
|
createdAt: text("created_at")
|
||||||
|
.notNull()
|
||||||
|
.default(sql`CURRENT_TIMESTAMP`),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const databaseModel = sqliteTable("databases", {
|
||||||
|
id: text("id")
|
||||||
|
.primaryKey()
|
||||||
|
.$defaultFn(() => nanoid()),
|
||||||
|
serverId: text("server_id")
|
||||||
|
.references(() => serverModel.id, {
|
||||||
|
onUpdate: "cascade",
|
||||||
|
onDelete: "cascade",
|
||||||
|
})
|
||||||
|
.notNull(),
|
||||||
|
name: text("name").notNull(),
|
||||||
|
isActive: integer("is_active", { mode: "boolean" }).notNull().default(true),
|
||||||
|
lastBackupAt: text("last_backup_at"),
|
||||||
|
createdAt: text("created_at")
|
||||||
|
.notNull()
|
||||||
|
.default(sql`CURRENT_TIMESTAMP`),
|
||||||
|
});
|
9
backend/src/db/schema.ts
Normal file
9
backend/src/db/schema.ts
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
import { databaseModel, serverModel, userModel } from "./models";
|
||||||
|
|
||||||
|
const schema = {
|
||||||
|
users: userModel,
|
||||||
|
servers: serverModel,
|
||||||
|
database: databaseModel,
|
||||||
|
};
|
||||||
|
|
||||||
|
export default schema;
|
12
backend/src/db/seed.ts
Normal file
12
backend/src/db/seed.ts
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import db from ".";
|
||||||
|
import { userModel } from "./models";
|
||||||
|
|
||||||
|
export const seed = async () => {
|
||||||
|
await db
|
||||||
|
.insert(userModel)
|
||||||
|
.values({
|
||||||
|
username: "admin",
|
||||||
|
password: await Bun.password.hash("admin", { algorithm: "bcrypt" }),
|
||||||
|
})
|
||||||
|
.execute();
|
||||||
|
};
|
@ -1,5 +1,5 @@
|
|||||||
import BaseDbms from "../dbms/base";
|
import BaseDbms from "./dbms/base";
|
||||||
import PostgresDbms from "../dbms/postgres";
|
import PostgresDbms from "./dbms/postgres";
|
||||||
import type { DatabaseConfig, DatabaseListItem } from "../types/database.types";
|
import type { DatabaseConfig, DatabaseListItem } from "../types/database.types";
|
||||||
|
|
||||||
class DatabaseUtil {
|
class DatabaseUtil {
|
@ -1,4 +1,4 @@
|
|||||||
import type { DatabaseListItem } from "../types/database.types";
|
import type { DatabaseListItem } from "../../types/database.types";
|
||||||
|
|
||||||
class BaseDbms {
|
class BaseDbms {
|
||||||
async getDatabases(): Promise<DatabaseListItem[]> {
|
async getDatabases(): Promise<DatabaseListItem[]> {
|
@ -1,5 +1,8 @@
|
|||||||
import type { DatabaseListItem, PostgresConfig } from "../types/database.types";
|
import type {
|
||||||
import { exec } from "../utility/process";
|
DatabaseListItem,
|
||||||
|
PostgresConfig,
|
||||||
|
} from "../../types/database.types";
|
||||||
|
import { exec } from "../../utility/process";
|
||||||
import BaseDbms from "./base";
|
import BaseDbms from "./base";
|
||||||
|
|
||||||
class PostgresDbms extends BaseDbms {
|
class PostgresDbms extends BaseDbms {
|
5
backend/src/main.ts
Normal file
5
backend/src/main.ts
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
import routers from "./routers";
|
||||||
|
|
||||||
|
console.log("Starting app..");
|
||||||
|
|
||||||
|
export default routers;
|
17
backend/src/routers/index.ts
Normal file
17
backend/src/routers/index.ts
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
import { Hono, type Context } from "hono";
|
||||||
|
import server from "./server.router";
|
||||||
|
|
||||||
|
const handleError = (err: Error, c: Context) => {
|
||||||
|
return c.json({
|
||||||
|
success: false,
|
||||||
|
error: err,
|
||||||
|
message: err.message,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const routers = new Hono()
|
||||||
|
.onError(handleError)
|
||||||
|
.get("/health-check", (c) => c.text("OK"))
|
||||||
|
.route("/servers", server);
|
||||||
|
|
||||||
|
export default routers;
|
38
backend/src/routers/server.router.ts
Normal file
38
backend/src/routers/server.router.ts
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
import { Hono } from "hono";
|
||||||
|
import { zValidator } from "@hono/zod-validator";
|
||||||
|
import { createServerSchema } from "@/schemas/server.schema";
|
||||||
|
import db from "@/db";
|
||||||
|
import { asc, eq } from "drizzle-orm";
|
||||||
|
import { HTTPException } from "hono/http-exception";
|
||||||
|
import { serverModel } from "@/db/models";
|
||||||
|
|
||||||
|
const router = new Hono()
|
||||||
|
|
||||||
|
.get("/", async (c) => {
|
||||||
|
const servers = await db.query.servers.findMany({
|
||||||
|
columns: { connection: false, ssh: false },
|
||||||
|
orderBy: asc(serverModel.createdAt),
|
||||||
|
});
|
||||||
|
return c.json(servers);
|
||||||
|
})
|
||||||
|
|
||||||
|
.post("/", zValidator("json", createServerSchema), async (c) => {
|
||||||
|
const data = c.req.valid("json");
|
||||||
|
const isExist = await db.query.servers.findFirst({
|
||||||
|
where: eq(serverModel.name, data.name),
|
||||||
|
});
|
||||||
|
if (isExist) {
|
||||||
|
throw new HTTPException(400, { message: "Server name already exists" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const dataValue = {
|
||||||
|
...data,
|
||||||
|
connection: data.connection ? JSON.stringify(data.connection) : null,
|
||||||
|
ssh: data.ssh ? JSON.stringify(data.ssh) : null,
|
||||||
|
};
|
||||||
|
const [result] = await db.insert(serverModel).values(dataValue).returning();
|
||||||
|
|
||||||
|
return c.json(result);
|
||||||
|
});
|
||||||
|
|
||||||
|
export default router;
|
36
backend/src/schemas/server.schema.ts
Normal file
36
backend/src/schemas/server.schema.ts
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
export const serverTypeEnum = ["postgres"] as const;
|
||||||
|
|
||||||
|
export const serverSchema = z.object({
|
||||||
|
name: z.string().min(1),
|
||||||
|
ssh: z
|
||||||
|
.object({
|
||||||
|
host: z.string(),
|
||||||
|
port: z.number().optional(),
|
||||||
|
user: z.string(),
|
||||||
|
pass: z.string().optional(),
|
||||||
|
privateKey: z.string().optional(),
|
||||||
|
})
|
||||||
|
.optional()
|
||||||
|
.nullable(),
|
||||||
|
isActive: z.boolean().optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const postgresSchema = serverSchema.merge(
|
||||||
|
z.object({
|
||||||
|
type: z.literal("postgres"),
|
||||||
|
connection: z.object({
|
||||||
|
host: z.string(),
|
||||||
|
port: z.number().optional(),
|
||||||
|
user: z.string(),
|
||||||
|
pass: z.string().optional(),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
export const createServerSchema = z.discriminatedUnion("type", [
|
||||||
|
postgresSchema,
|
||||||
|
]);
|
||||||
|
|
||||||
|
export type CreateServerSchema = z.infer<typeof createServerSchema>;
|
@ -1,5 +1,5 @@
|
|||||||
import DatabaseUtil from "@/lib/database";
|
import DatabaseUtil from "@/lib/database-util";
|
||||||
import { DOCKER_HOST, STORAGE_DIR } from "@/utility/consts";
|
import { DOCKER_HOST, BACKUP_DIR } from "@/consts";
|
||||||
import { mkdir } from "@/utility/utils";
|
import { mkdir } from "@/utility/utils";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
|
|
||||||
@ -19,7 +19,7 @@ const main = async () => {
|
|||||||
const dbName = "test";
|
const dbName = "test";
|
||||||
|
|
||||||
// Create backup
|
// Create backup
|
||||||
const outDir = path.join(STORAGE_DIR, db.config.host, dbName);
|
const outDir = path.join(BACKUP_DIR, db.config.host, dbName);
|
||||||
mkdir(outDir);
|
mkdir(outDir);
|
||||||
const outFile = path.join(outDir, `/${Date.now()}.tar`);
|
const outFile = path.join(outDir, `/${Date.now()}.tar`);
|
||||||
console.log(await db.dump(dbName, outFile));
|
console.log(await db.dump(dbName, outFile));
|
@ -1,4 +0,0 @@
|
|||||||
import path from "path";
|
|
||||||
|
|
||||||
export const DOCKER_HOST = "host.docker.internal";
|
|
||||||
export const STORAGE_DIR = path.resolve(__dirname, "../../storage");
|
|
@ -8,6 +8,10 @@
|
|||||||
},
|
},
|
||||||
"private": false,
|
"private": false,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
"workspaces": [
|
||||||
|
"backend",
|
||||||
|
"frontend"
|
||||||
|
],
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "concurrently \"cd backend && pnpm dev\" \"cd frontend && pnpm dev\""
|
"dev": "concurrently \"cd backend && pnpm dev\" \"cd frontend && pnpm dev\""
|
||||||
},
|
},
|
||||||
|
@ -1,3 +0,0 @@
|
|||||||
packages:
|
|
||||||
- backend
|
|
||||||
- frontend
|
|
Loading…
x
Reference in New Issue
Block a user