chore: initial build

This commit is contained in:
Khairul Hidayat 2024-05-13 02:19:53 +07:00
parent 25ffaf93a2
commit bf1476d2fd
30 changed files with 230 additions and 58 deletions

7
.dockerignore Normal file
View File

@ -0,0 +1,7 @@
**/node_modules
**/storage
**/dist
backend/public
.gitignore
.npmrc
*.md

1
.gitignore vendored
View File

@ -1,2 +1,3 @@
storage/
node_modules/ node_modules/
pnpm-lock.yaml pnpm-lock.yaml

32
Dockerfile Normal file
View File

@ -0,0 +1,32 @@
FROM oven/bun:alpine AS build
WORKDIR /app
ENV VITE_BACKEND_URL=/api
COPY ["package.json", "bun.lockb", "./"]
COPY ["frontend/package.json", "frontend/bun.lockb", "./frontend/"]
COPY ["backend/package.json", "backend/bun.lockb", "./backend/"]
RUN cd frontend && bun install && cd ../backend && bun install
COPY . .
RUN cd frontend && bun run build
RUN cd backend && bun run build
FROM oven/bun:alpine AS app
WORKDIR /app
COPY ["backend/package.json", "backend/bun.lockb", "./"]
RUN bun install --production && rm -rf ~/.bun/install/cache
# Add db clients
RUN apk --no-cache --repository=http://dl-cdn.alpinelinux.org/alpine/edge/main add postgresql16-client
COPY --from=build /app/backend .
COPY --from=build /app/frontend/dist ./public/
COPY entrypoint.sh .
EXPOSE 3000
ENTRYPOINT ["sh", "entrypoint.sh"]

9
README.md Normal file
View File

@ -0,0 +1,9 @@
# DB Backup Tool
Web-based Database Backup Tool
## Install
```bash
~$ docker run --name db-backup -p 3000:3000 -v ./storage:/app/storage khairul169/db-backup
```

1
backend/.gitignore vendored
View File

@ -1,6 +1,7 @@
dist/ dist/
node_modules/ node_modules/
storage/ storage/
public/
package-lock.json package-lock.json
bun.lockb bun.lockb
.env .env

View File

@ -5,7 +5,7 @@
"scripts": { "scripts": {
"dev": "bun --watch src/main.ts", "dev": "bun --watch src/main.ts",
"dev:compose": "cp ../bun.lockb . && docker compose -f docker-compose.dev.yml up --build", "dev:compose": "cp ../bun.lockb . && docker compose -f docker-compose.dev.yml up --build",
"build": "bun build src/main.ts --outdir dist --target bun", "build": "NODE_ENV=production bun build src/main.ts --outdir dist --target bun",
"start": "bun dist/main.js", "start": "bun dist/main.js",
"generate": "drizzle-kit generate", "generate": "drizzle-kit generate",
"migrate": "bun src/db/migrate.ts", "migrate": "bun src/db/migrate.ts",
@ -14,10 +14,8 @@
"devDependencies": { "devDependencies": {
"@types/bun": "latest", "@types/bun": "latest",
"@types/node-schedule": "^2.1.7", "@types/node-schedule": "^2.1.7",
"drizzle-kit": "^0.21.0" "drizzle-kit": "^0.21.0",
}, "typescript": "^5.4.5"
"peerDependencies": {
"typescript": "^5.0.0"
}, },
"dependencies": { "dependencies": {
"@hono/zod-validator": "^0.2.1", "@hono/zod-validator": "^0.2.1",

View File

@ -1,6 +1,8 @@
import path from "path"; import path from "path";
export const __PROD = process.env.NODE_ENV === "production";
export const __DEV = !__PROD;
export const DOCKER_HOST = "host.docker.internal"; export const DOCKER_HOST = "host.docker.internal";
export const STORAGE_DIR = path.resolve(__dirname, "../storage"); export const STORAGE_DIR = path.resolve(process.cwd(), "storage");
export const BACKUP_DIR = STORAGE_DIR + "/backups"; export const BACKUP_DIR = STORAGE_DIR + "/backups";
export const DATABASE_PATH = path.join(STORAGE_DIR, "database.db"); export const DATABASE_PATH = path.join(STORAGE_DIR, "database.db");

View File

@ -1,17 +1,21 @@
import fs from "fs"; import fs from "fs";
import { migrate } from "drizzle-orm/bun-sqlite/migrator"; import { migrate as migrator } from "drizzle-orm/bun-sqlite/migrator";
import { DATABASE_PATH } from "../consts"; import { DATABASE_PATH } from "../consts";
import db, { sqlite } from "."; import db, { sqlite } from ".";
import { seed } from "./seed"; import { seed } from "./seed";
const initializeData = fs.existsSync(DATABASE_PATH); const initializeData = !fs.existsSync(DATABASE_PATH);
await migrate(db, { const migrate = async () => {
migrator(db, {
migrationsFolder: __dirname + "/migrations", migrationsFolder: __dirname + "/migrations",
}); });
if (initializeData) { if (initializeData) {
await seed(); await seed();
} }
await sqlite.close(); sqlite.close();
};
migrate();

View File

@ -1,6 +1,10 @@
import BaseDbms from "./dbms/base"; import BaseDbms from "./dbms/base";
import PostgresDbms from "./dbms/postgres"; import PostgresDbms from "./dbms/postgres";
import type { DatabaseConfig, DatabaseListItem } from "../types/database.types"; import type {
DatabaseConfig,
DatabaseListItem,
DumpOptions,
} from "../types/database.types";
class DatabaseUtil { class DatabaseUtil {
private db = new BaseDbms(); private db = new BaseDbms();
@ -19,8 +23,12 @@ class DatabaseUtil {
return this.db.getDatabases(); return this.db.getDatabases();
} }
async dump(dbName: string, path: string): Promise<string> { async dump(
return this.db.dump(dbName, path); dbName: string,
path: string,
options?: DumpOptions
): Promise<string> {
return this.db.dump(dbName, path, options);
} }
async restore(path: string): Promise<string> { async restore(path: string): Promise<string> {

View File

@ -1,11 +1,15 @@
import type { DatabaseListItem } from "../../types/database.types"; import type { DatabaseListItem, DumpOptions } from "../../types/database.types";
class BaseDbms { class BaseDbms {
async getDatabases(): Promise<DatabaseListItem[]> { async getDatabases(): Promise<DatabaseListItem[]> {
return []; return [];
} }
async dump(_dbName: string, _path: string): Promise<string> { async dump(
_dbName: string,
_path: string,
_options?: DumpOptions
): Promise<string> {
return ""; return "";
} }

View File

@ -1,7 +1,9 @@
import type { import type {
DatabaseListItem, DatabaseListItem,
DumpOptions,
PostgresConfig, PostgresConfig,
} from "../../types/database.types"; } from "../../types/database.types";
import path from "path";
import { exec } from "../../utility/process"; import { exec } from "../../utility/process";
import { urlencode } from "../../utility/utils"; import { urlencode } from "../../utility/utils";
import BaseDbms from "./base"; import BaseDbms from "./base";
@ -18,21 +20,33 @@ class PostgresDbms extends BaseDbms {
); );
} }
async dump(dbName: string, path: string) { async dump(dbName: string, path: string, options: DumpOptions = {}) {
return exec(["pg_dump", this.dbUrl + `/${dbName}`, "-Z9", "-f", path]); const { compress } = options;
const ext = compress ? ".gz" : ".sql";
const filename = path + ext;
await exec([
"pg_dump",
this.dbUrl + `/${dbName}`,
"-Cc",
compress ? "-Z9" : null,
"-f",
filename,
]);
return filename;
} }
async restore(path: string) { async restore(backupFile: string) {
return exec([ const ext = path.extname(backupFile);
"pg_restore", const isCompressed = ext === ".gz";
"-d", let cmd = `psql ${this.dbUrl} < ${backupFile}`;
this.dbUrl,
"-cC", if (isCompressed) {
"--if-exists", cmd = `zcat ${backupFile} | psql ${this.dbUrl}`;
"--exit-on-error", }
// "-Ftar",
path, return exec(["sh", "-c", cmd]);
]);
} }
private async sql<T = any>(query: string) { private async sql<T = any>(query: string) {

View File

@ -1,8 +1,22 @@
import { Hono } from "hono";
import routers from "./routers"; import routers from "./routers";
import { initScheduler } from "./schedulers"; import { initScheduler } from "./schedulers";
import { __PROD } from "./consts";
import { serveStatic } from "hono/bun";
console.log("Starting app.."); const app = new Hono();
initScheduler(); initScheduler();
export default routers; // Add API routes
app.route(__PROD ? "/api" : "/", routers);
// Serve frontend
if (__PROD) {
app.use(serveStatic({ root: "./public" }));
app.use("*", serveStatic({ path: "./public/index.html" }));
const PORT = Number(process.env.PORT) || 3000;
console.log(`App listening on http://localhost:${PORT}`);
}
export default app;

View File

@ -1,3 +1,4 @@
import { processBackup } from "../schedulers/process-backup";
import { import {
createBackupSchema, createBackupSchema,
getAllBackupQuery, getAllBackupQuery,
@ -18,12 +19,22 @@ const router = new Hono()
.post("/", zValidator("json", createBackupSchema), async (c) => { .post("/", zValidator("json", createBackupSchema), async (c) => {
const body = c.req.valid("json"); const body = c.req.valid("json");
return c.json(await backupService.create(body)); const result = await backupService.create(body);
// start backup scheduler
processBackup();
return c.json(result);
}) })
.post("/restore", zValidator("json", restoreBackupSchema), async (c) => { .post("/restore", zValidator("json", restoreBackupSchema), async (c) => {
const body = c.req.valid("json"); const body = c.req.valid("json");
return c.json(await backupService.restore(body)); const result = await backupService.restore(body);
// start restore scheduler
processBackup();
return c.json(result);
}); });
export default router; export default router;

View File

@ -46,7 +46,7 @@ const router = new Hono()
return c.json({ success: true, databases }); return c.json({ success: true, databases });
} catch (err) { } catch (err) {
throw new HTTPException(400, { throw new HTTPException(400, {
message: "Cannot connect to the database.", message: (err as any).message || "Cannot connect to the database.",
}); });
} }
}) })

View File

@ -4,6 +4,5 @@ import { backupScheduler } from "./backup-scheduler";
export const initScheduler = () => { export const initScheduler = () => {
scheduler.scheduleJob("*/10 * * * * *", processBackup); scheduler.scheduleJob("*/10 * * * * *", processBackup);
// scheduler.scheduleJob("* * * * * *", backupScheduler); scheduler.scheduleJob("* * * * * *", backupScheduler);
backupScheduler();
}; };

View File

@ -25,11 +25,15 @@ const runBackup = async (task: PendingTasks[number]) => {
if (task.type === "backup") { if (task.type === "backup") {
const key = path.join(server.connection.host, dbName, `${Date.now()}`); const key = path.join(server.connection.host, dbName, `${Date.now()}`);
const outFile = path.join(BACKUP_DIR, key); let outFile = path.join(BACKUP_DIR, key);
mkdir(path.dirname(outFile)); mkdir(path.dirname(outFile));
// Run database dump command // Run database dump command
const output = await dbUtil.dump(dbName, outFile); const filename = await dbUtil.dump(dbName, outFile, {
compress: task.server.backup?.compress,
});
const ext = path.extname(filename);
outFile = outFile + ext;
// Get file stats and file checksum // Get file stats and file checksum
const fileStats = fs.statSync(outFile); const fileStats = fs.statSync(outFile);
@ -40,8 +44,8 @@ const runBackup = async (task: PendingTasks[number]) => {
.update(backupModel) .update(backupModel)
.set({ .set({
status: "success", status: "success",
output, output: "",
key, key: key + ext,
hash: sha256Hash, hash: sha256Hash,
size: fileStats.size, size: fileStats.size,
}) })
@ -90,7 +94,7 @@ const getPendingTasks = async () => {
orderBy: (i) => asc(i.createdAt), orderBy: (i) => asc(i.createdAt),
with: { with: {
server: { server: {
columns: { connection: true, ssh: true }, columns: { connection: true, ssh: true, backup: true },
}, },
database: { database: {
columns: { name: true }, columns: { name: true },

View File

@ -106,6 +106,12 @@ export default class BackupService {
const backup = await this.getOrFail(data.backupId); const backup = await this.getOrFail(data.backupId);
await this.checkPendingBackup(backup.databaseId); await this.checkPendingBackup(backup.databaseId);
if (backup.status !== "success") {
throw new HTTPException(400, {
message: "Cannot restore backup that is not success.",
});
}
if (!backup.key) { if (!backup.key) {
throw new HTTPException(400, { throw new HTTPException(400, {
message: "Cannot restore backup without file key.", message: "Cannot restore backup without file key.",

View File

@ -82,7 +82,12 @@ export default class ServerService {
})) }))
); );
return data; const server = this.parse(result);
if (server.connection?.pass) {
delete server.connection.pass;
}
return server;
}); });
} }

View File

@ -12,3 +12,7 @@ export type DatabaseListItem = {
name: string; name: string;
size: string; size: string;
}; };
export type DumpOptions = Partial<{
compress: boolean;
}>;

View File

@ -5,10 +5,10 @@ type ExecOptions = {
}; };
export const exec = async ( export const exec = async (
cmds: string[], cmds: (string | null | undefined)[],
options: Partial<ExecOptions> = {} options: Partial<ExecOptions> = {}
) => { ) => {
const proc = Bun.spawn(cmds, { const proc = Bun.spawn(cmds.filter((i) => i != null) as string[], {
env: options.env, env: options.env,
stderr: "pipe", stderr: "pipe",
}); });

13
docker-compose.yml Normal file
View File

@ -0,0 +1,13 @@
version: "3"
services:
app:
container_name: db-backup
build:
context: .
volumes:
- ./storage:/app/storage:rw
extra_hosts:
- "host.docker.internal:host-gateway"
ports:
- "3000:3000"

11
entrypoint.sh Normal file
View File

@ -0,0 +1,11 @@
#!/bin/sh
set -e
# Run migration
bun run migrate & PID=$!
wait $PID
# Start app
bun start & PID=$!
wait $PID

View File

@ -1,7 +1,8 @@
import { ClientResponse, hc } from "hono/client"; import { ClientResponse, hc } from "hono/client";
import type { AppRouter } from "@backend/routers"; import type { AppRouter } from "@backend/routers";
const api = hc<AppRouter>("http://localhost:3000/"); const BACKEND_URL = import.meta.env.VITE_BACKEND_URL;
const api = hc<AppRouter>(BACKEND_URL || "http://localhost:3000/");
export const parseJson = async <T>(res: ClientResponse<T>) => { export const parseJson = async <T>(res: ClientResponse<T>) => {
const json = await res.json(); const json = await res.json();

View File

@ -16,7 +16,13 @@ const ServerSection = () => {
return ( return (
<section> <section>
<PageTitle setTitle={false}>Servers</PageTitle> <div className="flex items-center gap-2 mt-2 md:mt-4">
<PageTitle className="flex-1">Servers</PageTitle>
<Button onClick={() => serverFormDlg.onOpen({ ...initialServerData })}>
Add Server
</Button>
</div>
{isLoading ? ( {isLoading ? (
<div>Loading...</div> <div>Loading...</div>

View File

@ -45,7 +45,7 @@ const BackupStatus = ({ status, output }: Props) => {
<Popover> <Popover>
<PopoverTrigger <PopoverTrigger
disabled={!output} disabled={!output}
title={output} title={output?.substring(0, 120)}
className={cn( className={cn(
"flex items-center gap-2 px-2 py-1 rounded-lg text-white shrink-0", "flex items-center gap-2 px-2 py-1 rounded-lg text-white shrink-0",
colors[status] colors[status]
@ -57,8 +57,10 @@ const BackupStatus = ({ status, output }: Props) => {
<p className="text-sm">{labels[status]}</p> <p className="text-sm">{labels[status]}</p>
</PopoverTrigger> </PopoverTrigger>
<PopoverContent className="max-w-lg w-screen"> <PopoverContent className="max-w-lg w-screen p-0">
<p className="font-mono text-sm">{output}</p> <textarea className="font-mono text-sm w-full h-[200px] border-none outline-none p-4">
{output}
</textarea>
</PopoverContent> </PopoverContent>
</Popover> </Popover>
); );

View File

@ -54,7 +54,11 @@ const BackupTab = () => {
return ( return (
<TabsContent value="backup" className="mt-4"> <TabsContent value="backup" className="mt-4">
<CheckboxField form={form} name="backup.compress" label="Compressed" /> <CheckboxField
form={form}
name="backup.compress"
label="Enable Compression"
/>
<CheckboxField <CheckboxField
className="mt-4" className="mt-4"

View File

@ -18,9 +18,12 @@ import ConnectionTab from "./server-form-connection-tab";
import { Tabs, TabsList, TabsTrigger } from "@/components/ui/tabs"; import { Tabs, TabsList, TabsTrigger } from "@/components/ui/tabs";
import BackupTab from "./server-form-backup-tab"; import BackupTab from "./server-form-backup-tab";
import { ServerFormSchema, serverFormSchema } from "../schema"; import { ServerFormSchema, serverFormSchema } from "../schema";
import { useNavigate } from "react-router-dom";
import { toast } from "sonner";
const ServerFormDialog = () => { const ServerFormDialog = () => {
const { isOpen, data } = serverFormDlg.useState(); const { isOpen, data } = serverFormDlg.useState();
const navigate = useNavigate();
const form = useForm({ const form = useForm({
resolver: zodResolver(serverFormSchema), resolver: zodResolver(serverFormSchema),
defaultValues: data, defaultValues: data,
@ -44,9 +47,13 @@ const ServerFormDialog = () => {
return parseJson(res); return parseJson(res);
} }
}, },
onSuccess: () => { onSuccess: (data) => {
serverFormDlg.onClose(); serverFormDlg.onClose();
queryClient.invalidateQueries("servers"); queryClient.invalidateQueries("servers");
navigate(`/servers/${data.id}`);
},
onError: (err) => {
toast.error((err as Error)?.message || "Failed to save server");
}, },
}); });
@ -78,7 +85,7 @@ const ServerFormDialog = () => {
</Button> </Button>
<Button <Button
type="submit" type="submit"
disabled={!databases.length} disabled={!databases?.length}
isLoading={saveServer.isLoading} isLoading={saveServer.isLoading}
> >
Submit Submit

View File

@ -25,6 +25,7 @@ const BackupSection = ({ databases }: BackupSectionProps) => {
const [query, setQuery] = useQueryParams<QueryParams>({ const [query, setQuery] = useQueryParams<QueryParams>({
page: 1, page: 1,
limit: 10, limit: 10,
databaseId: undefined,
}); });
const backups = useQuery({ const backups = useQuery({

View File

@ -158,6 +158,7 @@ export const backupsColumns: TableColumn<BackupType>[] = [
</DropdownMenuTrigger> </DropdownMenuTrigger>
<DropdownMenuContent> <DropdownMenuContent>
<DropdownMenuItem <DropdownMenuItem
disabled={row.status !== "success"}
onClick={() => { onClick={() => {
confirmDlg.onOpen({ confirmDlg.onOpen({
title: "Restore Backup", title: "Restore Backup",

View File

@ -9,7 +9,10 @@
"private": false, "private": false,
"license": "MIT", "license": "MIT",
"scripts": { "scripts": {
"dev": "concurrently \"cd backend && pnpm dev\" \"cd frontend && pnpm dev\"" "dev": "concurrently \"cd backend && npm run dev:compose\" \"cd frontend && npm run dev\"",
"build:frontend": "cd frontend && VITE_BACKEND_URL=/api npm run build",
"build:backend": "cd backend && npm run build",
"build": "npm run build:frontend && npm run build:backend"
}, },
"devDependencies": { "devDependencies": {
"concurrently": "^8.2.2" "concurrently": "^8.2.2"