feat: init db, add rest api

This commit is contained in:
Khairul Hidayat 2024-05-10 23:09:41 +07:00
parent 387fa38e65
commit 68fbe42d6c
26 changed files with 253 additions and 65 deletions

BIN
.DS_Store vendored Normal file

Binary file not shown.

0
backend/.env.example Normal file
View File

1
backend/.gitignore vendored
View File

@ -3,3 +3,4 @@ node_modules/
storage/
package-lock.json
bun.lockb
.env

View File

@ -1,24 +1,11 @@
FROM alpine:3.19.0
ENV GLIBC_VERSION 2.34-r0
FROM oven/bun:alpine
WORKDIR /app
# Install bun
ADD https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip bun-linux-x64.zip
RUN apk add --no-cache --update unzip curl && \
curl -Lo /etc/apk/keys/sgerrand.rsa.pub https://alpine-pkgs.sgerrand.com/sgerrand.rsa.pub && \
curl -Lo glibc.apk "https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-${GLIBC_VERSION}.apk" && \
curl -Lo glibc-bin.apk "https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-bin-${GLIBC_VERSION}.apk" && \
apk add --force-overwrite glibc-bin.apk glibc.apk && \
/usr/glibc-compat/sbin/ldconfig /lib /usr/glibc-compat/lib && \
echo 'hosts: files mdns4_minimal [NOTFOUND=return] dns mdns4' >> /etc/nsswitch.conf && \
apk del curl && \
rm -rf /var/cache/apk/* glibc.apk glibc-bin.apk
RUN unzip bun-linux-x64.zip && chmod +x ./bun-linux-x64/bun && mv ./bun-linux-x64/bun /usr/bin && rm -f bun-linux-x64.zip
COPY ["package.json", "bun.lockb", "./"]
RUN bun install
# Add db clients
RUN apk --no-cache add postgresql16-client
RUN apk --no-cache --repository=http://dl-cdn.alpinelinux.org/alpine/edge/main add postgresql16-client
ENTRYPOINT ["bun", "run", "dev"]

View File

@ -1,28 +0,0 @@
FROM alpine:3.19
WORKDIR /app
ENV GLIBC_VERSION 2.35-r1
RUN apk update && \
apk add --no-cache --update unzip curl
# curl -Lo /etc/apk/keys/sgerrand.rsa.pub https://alpine-pkgs.sgerrand.com/sgerrand.rsa.pub && \
# curl -Lo glibc.apk "https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-${GLIBC_VERSION}.apk" && \
# curl -Lo glibc-bin.apk "https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-bin-${GLIBC_VERSION}.apk" && \
# apk add --force-overwrite glibc-bin.apk glibc.apk && \
# /usr/glibc-compat/sbin/ldconfig /lib /usr/glibc-compat/lib && \
# echo 'hosts: files mdns4_minimal [NOTFOUND=return] dns mdns4' >> /etc/nsswitch.conf && \
# apk del curl && \
# rm -rf /var/cache/apk/* glibc.apk glibc-bin.apk
ADD https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip bun-linux-x64.zip
# RUN unzip bun-linux-x64.zip && chmod +x ./bun-linux-x64/bun && mv ./bun-linux-x64/bun /usr/local/bin && rm -rf bun-linux-x64.zip
RUN unzip bun-linux-x64.zip && ls bun-linux-x64 && ./bun-linux-x64/bun --version
RUN chmod +x /usr/local/bin/bun
RUN /usr/local/bin/bun --version
# CMD ["bun", "--version"]
# RUN apk --no-cache add postgresql16-client
# ENTRYPOINT ["bun", "run", "dev"]

View File

@ -10,3 +10,5 @@ services:
- ./:/app:rw
extra_hosts:
- "host.docker.internal:host-gateway"
ports:
- "3000:3000"

11
backend/drizzle.config.ts Normal file
View File

@ -0,0 +1,11 @@
import { STORAGE_DIR } from "@/consts";
import { defineConfig } from "drizzle-kit";
export default defineConfig({
dialect: "sqlite",
dbCredentials: {
url: STORAGE_DIR + "/database.db",
},
schema: "./src/db/models.ts",
out: "./src/db/migrations",
});

View File

@ -3,15 +3,26 @@
"module": "index.ts",
"type": "module",
"scripts": {
"dev": "bun --watch index.ts",
"dev:compose": "docker compose -f docker-compose.dev.yml up --build",
"dev": "bun --watch src/main.ts",
"dev:compose": "cp ../bun.lockb . && docker compose -f docker-compose.dev.yml up --build",
"build": "bun build index.ts --outdir dist --target bun",
"start": "bun dist/index.js"
"start": "bun dist/main.js",
"generate": "drizzle-kit generate",
"migrate": "bun src/db/migrate.ts",
"reset": "rm -f storage/database.db && bun run migrate"
},
"devDependencies": {
"@types/bun": "latest"
"@types/bun": "latest",
"drizzle-kit": "^0.21.0"
},
"peerDependencies": {
"typescript": "^5.0.0"
},
"dependencies": {
"@hono/zod-validator": "^0.2.1",
"drizzle-orm": "^0.30.10",
"hono": "^4.3.4",
"nanoid": "^5.0.7",
"zod": "^3.23.8"
}
}
}

6
backend/src/consts.ts Normal file
View File

@ -0,0 +1,6 @@
import path from "path";
export const DOCKER_HOST = "host.docker.internal";
export const STORAGE_DIR = path.resolve(__dirname, "../storage");
export const BACKUP_DIR = STORAGE_DIR + "/backups";
export const DATABASE_PATH = path.join(STORAGE_DIR, "database.db");

16
backend/src/db/index.ts Normal file
View File

@ -0,0 +1,16 @@
import path from "path";
import { drizzle } from "drizzle-orm/bun-sqlite";
import { Database } from "bun:sqlite";
import { DATABASE_PATH } from "@/consts";
import { mkdir } from "@/utility/utils";
import schema from "./schema";
// Create database directory if not exists
mkdir(path.dirname(DATABASE_PATH));
// Initialize database
const sqlite = new Database(DATABASE_PATH);
const db = drizzle(sqlite, { schema });
export { sqlite };
export default db;

17
backend/src/db/migrate.ts Normal file
View File

@ -0,0 +1,17 @@
import fs from "fs";
import { migrate } from "drizzle-orm/bun-sqlite/migrator";
import { DATABASE_PATH } from "@/consts";
import db, { sqlite } from ".";
import { seed } from "./seed";
const initializeData = fs.existsSync(DATABASE_PATH);
await migrate(db, {
migrationsFolder: __dirname + "/migrations",
});
if (initializeData) {
await seed();
}
await sqlite.close();

48
backend/src/db/models.ts Normal file
View File

@ -0,0 +1,48 @@
import type { DatabaseConfig } from "@/types/database.types";
import { sql } from "drizzle-orm";
import { integer, sqliteTable, text } from "drizzle-orm/sqlite-core";
import { nanoid } from "nanoid";
export const userModel = sqliteTable("users", {
id: text("id")
.primaryKey()
.$defaultFn(() => nanoid()),
username: text("username").notNull().unique(),
password: text("password").notNull(),
isActive: integer("is_active", { mode: "boolean" }).notNull().default(true),
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export const serverModel = sqliteTable("servers", {
id: text("id")
.primaryKey()
.$defaultFn(() => nanoid()),
name: text("name").notNull(),
type: text("type", { enum: ["postgres"] }).notNull(),
connection: text("connection"),
ssh: text("ssh"),
isActive: integer("is_active", { mode: "boolean" }).notNull().default(true),
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export const databaseModel = sqliteTable("databases", {
id: text("id")
.primaryKey()
.$defaultFn(() => nanoid()),
serverId: text("server_id")
.references(() => serverModel.id, {
onUpdate: "cascade",
onDelete: "cascade",
})
.notNull(),
name: text("name").notNull(),
isActive: integer("is_active", { mode: "boolean" }).notNull().default(true),
lastBackupAt: text("last_backup_at"),
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});

9
backend/src/db/schema.ts Normal file
View File

@ -0,0 +1,9 @@
import { databaseModel, serverModel, userModel } from "./models";
const schema = {
users: userModel,
servers: serverModel,
database: databaseModel,
};
export default schema;

12
backend/src/db/seed.ts Normal file
View File

@ -0,0 +1,12 @@
import db from ".";
import { userModel } from "./models";
export const seed = async () => {
await db
.insert(userModel)
.values({
username: "admin",
password: await Bun.password.hash("admin", { algorithm: "bcrypt" }),
})
.execute();
};

View File

@ -1,5 +1,5 @@
import BaseDbms from "../dbms/base";
import PostgresDbms from "../dbms/postgres";
import BaseDbms from "./dbms/base";
import PostgresDbms from "./dbms/postgres";
import type { DatabaseConfig, DatabaseListItem } from "../types/database.types";
class DatabaseUtil {

View File

@ -1,4 +1,4 @@
import type { DatabaseListItem } from "../types/database.types";
import type { DatabaseListItem } from "../../types/database.types";
class BaseDbms {
async getDatabases(): Promise<DatabaseListItem[]> {

View File

@ -1,5 +1,8 @@
import type { DatabaseListItem, PostgresConfig } from "../types/database.types";
import { exec } from "../utility/process";
import type {
DatabaseListItem,
PostgresConfig,
} from "../../types/database.types";
import { exec } from "../../utility/process";
import BaseDbms from "./base";
class PostgresDbms extends BaseDbms {

5
backend/src/main.ts Normal file
View File

@ -0,0 +1,5 @@
import routers from "./routers";
console.log("Starting app..");
export default routers;

View File

@ -0,0 +1,17 @@
import { Hono, type Context } from "hono";
import server from "./server.router";
const handleError = (err: Error, c: Context) => {
return c.json({
success: false,
error: err,
message: err.message,
});
};
const routers = new Hono()
.onError(handleError)
.get("/health-check", (c) => c.text("OK"))
.route("/servers", server);
export default routers;

View File

@ -0,0 +1,38 @@
import { Hono } from "hono";
import { zValidator } from "@hono/zod-validator";
import { createServerSchema } from "@/schemas/server.schema";
import db from "@/db";
import { asc, eq } from "drizzle-orm";
import { HTTPException } from "hono/http-exception";
import { serverModel } from "@/db/models";
const router = new Hono()
.get("/", async (c) => {
const servers = await db.query.servers.findMany({
columns: { connection: false, ssh: false },
orderBy: asc(serverModel.createdAt),
});
return c.json(servers);
})
.post("/", zValidator("json", createServerSchema), async (c) => {
const data = c.req.valid("json");
const isExist = await db.query.servers.findFirst({
where: eq(serverModel.name, data.name),
});
if (isExist) {
throw new HTTPException(400, { message: "Server name already exists" });
}
const dataValue = {
...data,
connection: data.connection ? JSON.stringify(data.connection) : null,
ssh: data.ssh ? JSON.stringify(data.ssh) : null,
};
const [result] = await db.insert(serverModel).values(dataValue).returning();
return c.json(result);
});
export default router;

View File

@ -0,0 +1,36 @@
import { z } from "zod";
export const serverTypeEnum = ["postgres"] as const;
export const serverSchema = z.object({
name: z.string().min(1),
ssh: z
.object({
host: z.string(),
port: z.number().optional(),
user: z.string(),
pass: z.string().optional(),
privateKey: z.string().optional(),
})
.optional()
.nullable(),
isActive: z.boolean().optional(),
});
const postgresSchema = serverSchema.merge(
z.object({
type: z.literal("postgres"),
connection: z.object({
host: z.string(),
port: z.number().optional(),
user: z.string(),
pass: z.string().optional(),
}),
})
);
export const createServerSchema = z.discriminatedUnion("type", [
postgresSchema,
]);
export type CreateServerSchema = z.infer<typeof createServerSchema>;

View File

@ -1,5 +1,5 @@
import DatabaseUtil from "@/lib/database";
import { DOCKER_HOST, STORAGE_DIR } from "@/utility/consts";
import DatabaseUtil from "@/lib/database-util";
import { DOCKER_HOST, BACKUP_DIR } from "@/consts";
import { mkdir } from "@/utility/utils";
import path from "path";
@ -19,7 +19,7 @@ const main = async () => {
const dbName = "test";
// Create backup
const outDir = path.join(STORAGE_DIR, db.config.host, dbName);
const outDir = path.join(BACKUP_DIR, db.config.host, dbName);
mkdir(outDir);
const outFile = path.join(outDir, `/${Date.now()}.tar`);
console.log(await db.dump(dbName, outFile));

View File

@ -1,4 +0,0 @@
import path from "path";
export const DOCKER_HOST = "host.docker.internal";
export const STORAGE_DIR = path.resolve(__dirname, "../../storage");

BIN
bun.lockb Executable file

Binary file not shown.

View File

@ -8,6 +8,10 @@
},
"private": false,
"license": "MIT",
"workspaces": [
"backend",
"frontend"
],
"scripts": {
"dev": "concurrently \"cd backend && pnpm dev\" \"cd frontend && pnpm dev\""
},

View File

@ -1,3 +0,0 @@
packages:
- backend
- frontend