mirror of
https://github.com/khairul169/db-backup-tool.git
synced 2025-05-14 16:39:33 +07:00
chore: initial build
This commit is contained in:
parent
25ffaf93a2
commit
bf1476d2fd
7
.dockerignore
Normal file
7
.dockerignore
Normal file
@ -0,0 +1,7 @@
|
||||
**/node_modules
|
||||
**/storage
|
||||
**/dist
|
||||
backend/public
|
||||
.gitignore
|
||||
.npmrc
|
||||
*.md
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,2 +1,3 @@
|
||||
storage/
|
||||
node_modules/
|
||||
pnpm-lock.yaml
|
||||
|
32
Dockerfile
Normal file
32
Dockerfile
Normal file
@ -0,0 +1,32 @@
|
||||
FROM oven/bun:alpine AS build
|
||||
WORKDIR /app
|
||||
|
||||
ENV VITE_BACKEND_URL=/api
|
||||
|
||||
COPY ["package.json", "bun.lockb", "./"]
|
||||
COPY ["frontend/package.json", "frontend/bun.lockb", "./frontend/"]
|
||||
COPY ["backend/package.json", "backend/bun.lockb", "./backend/"]
|
||||
|
||||
RUN cd frontend && bun install && cd ../backend && bun install
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN cd frontend && bun run build
|
||||
RUN cd backend && bun run build
|
||||
|
||||
FROM oven/bun:alpine AS app
|
||||
WORKDIR /app
|
||||
|
||||
COPY ["backend/package.json", "backend/bun.lockb", "./"]
|
||||
RUN bun install --production && rm -rf ~/.bun/install/cache
|
||||
|
||||
# Add db clients
|
||||
RUN apk --no-cache --repository=http://dl-cdn.alpinelinux.org/alpine/edge/main add postgresql16-client
|
||||
|
||||
COPY --from=build /app/backend .
|
||||
COPY --from=build /app/frontend/dist ./public/
|
||||
COPY entrypoint.sh .
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
ENTRYPOINT ["sh", "entrypoint.sh"]
|
9
README.md
Normal file
9
README.md
Normal file
@ -0,0 +1,9 @@
|
||||
# DB Backup Tool
|
||||
|
||||
Web-based Database Backup Tool
|
||||
|
||||
## Install
|
||||
|
||||
```bash
|
||||
~$ docker run --name db-backup -p 3000:3000 -v ./storage:/app/storage khairul169/db-backup
|
||||
```
|
1
backend/.gitignore
vendored
1
backend/.gitignore
vendored
@ -1,6 +1,7 @@
|
||||
dist/
|
||||
node_modules/
|
||||
storage/
|
||||
public/
|
||||
package-lock.json
|
||||
bun.lockb
|
||||
.env
|
||||
|
@ -5,7 +5,7 @@
|
||||
"scripts": {
|
||||
"dev": "bun --watch src/main.ts",
|
||||
"dev:compose": "cp ../bun.lockb . && docker compose -f docker-compose.dev.yml up --build",
|
||||
"build": "bun build src/main.ts --outdir dist --target bun",
|
||||
"build": "NODE_ENV=production bun build src/main.ts --outdir dist --target bun",
|
||||
"start": "bun dist/main.js",
|
||||
"generate": "drizzle-kit generate",
|
||||
"migrate": "bun src/db/migrate.ts",
|
||||
@ -14,10 +14,8 @@
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest",
|
||||
"@types/node-schedule": "^2.1.7",
|
||||
"drizzle-kit": "^0.21.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5.0.0"
|
||||
"drizzle-kit": "^0.21.0",
|
||||
"typescript": "^5.4.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"@hono/zod-validator": "^0.2.1",
|
||||
|
@ -1,6 +1,8 @@
|
||||
import path from "path";
|
||||
|
||||
export const __PROD = process.env.NODE_ENV === "production";
|
||||
export const __DEV = !__PROD;
|
||||
export const DOCKER_HOST = "host.docker.internal";
|
||||
export const STORAGE_DIR = path.resolve(__dirname, "../storage");
|
||||
export const STORAGE_DIR = path.resolve(process.cwd(), "storage");
|
||||
export const BACKUP_DIR = STORAGE_DIR + "/backups";
|
||||
export const DATABASE_PATH = path.join(STORAGE_DIR, "database.db");
|
||||
|
@ -1,17 +1,21 @@
|
||||
import fs from "fs";
|
||||
import { migrate } from "drizzle-orm/bun-sqlite/migrator";
|
||||
import { migrate as migrator } from "drizzle-orm/bun-sqlite/migrator";
|
||||
import { DATABASE_PATH } from "../consts";
|
||||
import db, { sqlite } from ".";
|
||||
import { seed } from "./seed";
|
||||
|
||||
const initializeData = fs.existsSync(DATABASE_PATH);
|
||||
const initializeData = !fs.existsSync(DATABASE_PATH);
|
||||
|
||||
await migrate(db, {
|
||||
migrationsFolder: __dirname + "/migrations",
|
||||
});
|
||||
const migrate = async () => {
|
||||
migrator(db, {
|
||||
migrationsFolder: __dirname + "/migrations",
|
||||
});
|
||||
|
||||
if (initializeData) {
|
||||
await seed();
|
||||
}
|
||||
if (initializeData) {
|
||||
await seed();
|
||||
}
|
||||
|
||||
await sqlite.close();
|
||||
sqlite.close();
|
||||
};
|
||||
|
||||
migrate();
|
||||
|
@ -1,6 +1,10 @@
|
||||
import BaseDbms from "./dbms/base";
|
||||
import PostgresDbms from "./dbms/postgres";
|
||||
import type { DatabaseConfig, DatabaseListItem } from "../types/database.types";
|
||||
import type {
|
||||
DatabaseConfig,
|
||||
DatabaseListItem,
|
||||
DumpOptions,
|
||||
} from "../types/database.types";
|
||||
|
||||
class DatabaseUtil {
|
||||
private db = new BaseDbms();
|
||||
@ -19,8 +23,12 @@ class DatabaseUtil {
|
||||
return this.db.getDatabases();
|
||||
}
|
||||
|
||||
async dump(dbName: string, path: string): Promise<string> {
|
||||
return this.db.dump(dbName, path);
|
||||
async dump(
|
||||
dbName: string,
|
||||
path: string,
|
||||
options?: DumpOptions
|
||||
): Promise<string> {
|
||||
return this.db.dump(dbName, path, options);
|
||||
}
|
||||
|
||||
async restore(path: string): Promise<string> {
|
||||
|
@ -1,11 +1,15 @@
|
||||
import type { DatabaseListItem } from "../../types/database.types";
|
||||
import type { DatabaseListItem, DumpOptions } from "../../types/database.types";
|
||||
|
||||
class BaseDbms {
|
||||
async getDatabases(): Promise<DatabaseListItem[]> {
|
||||
return [];
|
||||
}
|
||||
|
||||
async dump(_dbName: string, _path: string): Promise<string> {
|
||||
async dump(
|
||||
_dbName: string,
|
||||
_path: string,
|
||||
_options?: DumpOptions
|
||||
): Promise<string> {
|
||||
return "";
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,9 @@
|
||||
import type {
|
||||
DatabaseListItem,
|
||||
DumpOptions,
|
||||
PostgresConfig,
|
||||
} from "../../types/database.types";
|
||||
import path from "path";
|
||||
import { exec } from "../../utility/process";
|
||||
import { urlencode } from "../../utility/utils";
|
||||
import BaseDbms from "./base";
|
||||
@ -18,21 +20,33 @@ class PostgresDbms extends BaseDbms {
|
||||
);
|
||||
}
|
||||
|
||||
async dump(dbName: string, path: string) {
|
||||
return exec(["pg_dump", this.dbUrl + `/${dbName}`, "-Z9", "-f", path]);
|
||||
async dump(dbName: string, path: string, options: DumpOptions = {}) {
|
||||
const { compress } = options;
|
||||
const ext = compress ? ".gz" : ".sql";
|
||||
const filename = path + ext;
|
||||
|
||||
await exec([
|
||||
"pg_dump",
|
||||
this.dbUrl + `/${dbName}`,
|
||||
"-Cc",
|
||||
compress ? "-Z9" : null,
|
||||
"-f",
|
||||
filename,
|
||||
]);
|
||||
|
||||
return filename;
|
||||
}
|
||||
|
||||
async restore(path: string) {
|
||||
return exec([
|
||||
"pg_restore",
|
||||
"-d",
|
||||
this.dbUrl,
|
||||
"-cC",
|
||||
"--if-exists",
|
||||
"--exit-on-error",
|
||||
// "-Ftar",
|
||||
path,
|
||||
]);
|
||||
async restore(backupFile: string) {
|
||||
const ext = path.extname(backupFile);
|
||||
const isCompressed = ext === ".gz";
|
||||
let cmd = `psql ${this.dbUrl} < ${backupFile}`;
|
||||
|
||||
if (isCompressed) {
|
||||
cmd = `zcat ${backupFile} | psql ${this.dbUrl}`;
|
||||
}
|
||||
|
||||
return exec(["sh", "-c", cmd]);
|
||||
}
|
||||
|
||||
private async sql<T = any>(query: string) {
|
||||
|
@ -1,8 +1,22 @@
|
||||
import { Hono } from "hono";
|
||||
import routers from "./routers";
|
||||
import { initScheduler } from "./schedulers";
|
||||
import { __PROD } from "./consts";
|
||||
import { serveStatic } from "hono/bun";
|
||||
|
||||
console.log("Starting app..");
|
||||
|
||||
const app = new Hono();
|
||||
initScheduler();
|
||||
|
||||
export default routers;
|
||||
// Add API routes
|
||||
app.route(__PROD ? "/api" : "/", routers);
|
||||
|
||||
// Serve frontend
|
||||
if (__PROD) {
|
||||
app.use(serveStatic({ root: "./public" }));
|
||||
app.use("*", serveStatic({ path: "./public/index.html" }));
|
||||
|
||||
const PORT = Number(process.env.PORT) || 3000;
|
||||
console.log(`App listening on http://localhost:${PORT}`);
|
||||
}
|
||||
|
||||
export default app;
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { processBackup } from "../schedulers/process-backup";
|
||||
import {
|
||||
createBackupSchema,
|
||||
getAllBackupQuery,
|
||||
@ -18,12 +19,22 @@ const router = new Hono()
|
||||
|
||||
.post("/", zValidator("json", createBackupSchema), async (c) => {
|
||||
const body = c.req.valid("json");
|
||||
return c.json(await backupService.create(body));
|
||||
const result = await backupService.create(body);
|
||||
|
||||
// start backup scheduler
|
||||
processBackup();
|
||||
|
||||
return c.json(result);
|
||||
})
|
||||
|
||||
.post("/restore", zValidator("json", restoreBackupSchema), async (c) => {
|
||||
const body = c.req.valid("json");
|
||||
return c.json(await backupService.restore(body));
|
||||
const result = await backupService.restore(body);
|
||||
|
||||
// start restore scheduler
|
||||
processBackup();
|
||||
|
||||
return c.json(result);
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
@ -46,7 +46,7 @@ const router = new Hono()
|
||||
return c.json({ success: true, databases });
|
||||
} catch (err) {
|
||||
throw new HTTPException(400, {
|
||||
message: "Cannot connect to the database.",
|
||||
message: (err as any).message || "Cannot connect to the database.",
|
||||
});
|
||||
}
|
||||
})
|
||||
|
@ -4,6 +4,5 @@ import { backupScheduler } from "./backup-scheduler";
|
||||
|
||||
export const initScheduler = () => {
|
||||
scheduler.scheduleJob("*/10 * * * * *", processBackup);
|
||||
// scheduler.scheduleJob("* * * * * *", backupScheduler);
|
||||
backupScheduler();
|
||||
scheduler.scheduleJob("* * * * * *", backupScheduler);
|
||||
};
|
||||
|
@ -25,11 +25,15 @@ const runBackup = async (task: PendingTasks[number]) => {
|
||||
|
||||
if (task.type === "backup") {
|
||||
const key = path.join(server.connection.host, dbName, `${Date.now()}`);
|
||||
const outFile = path.join(BACKUP_DIR, key);
|
||||
let outFile = path.join(BACKUP_DIR, key);
|
||||
mkdir(path.dirname(outFile));
|
||||
|
||||
// Run database dump command
|
||||
const output = await dbUtil.dump(dbName, outFile);
|
||||
const filename = await dbUtil.dump(dbName, outFile, {
|
||||
compress: task.server.backup?.compress,
|
||||
});
|
||||
const ext = path.extname(filename);
|
||||
outFile = outFile + ext;
|
||||
|
||||
// Get file stats and file checksum
|
||||
const fileStats = fs.statSync(outFile);
|
||||
@ -40,8 +44,8 @@ const runBackup = async (task: PendingTasks[number]) => {
|
||||
.update(backupModel)
|
||||
.set({
|
||||
status: "success",
|
||||
output,
|
||||
key,
|
||||
output: "",
|
||||
key: key + ext,
|
||||
hash: sha256Hash,
|
||||
size: fileStats.size,
|
||||
})
|
||||
@ -90,7 +94,7 @@ const getPendingTasks = async () => {
|
||||
orderBy: (i) => asc(i.createdAt),
|
||||
with: {
|
||||
server: {
|
||||
columns: { connection: true, ssh: true },
|
||||
columns: { connection: true, ssh: true, backup: true },
|
||||
},
|
||||
database: {
|
||||
columns: { name: true },
|
||||
|
@ -106,6 +106,12 @@ export default class BackupService {
|
||||
const backup = await this.getOrFail(data.backupId);
|
||||
await this.checkPendingBackup(backup.databaseId);
|
||||
|
||||
if (backup.status !== "success") {
|
||||
throw new HTTPException(400, {
|
||||
message: "Cannot restore backup that is not success.",
|
||||
});
|
||||
}
|
||||
|
||||
if (!backup.key) {
|
||||
throw new HTTPException(400, {
|
||||
message: "Cannot restore backup without file key.",
|
||||
|
@ -82,7 +82,12 @@ export default class ServerService {
|
||||
}))
|
||||
);
|
||||
|
||||
return data;
|
||||
const server = this.parse(result);
|
||||
if (server.connection?.pass) {
|
||||
delete server.connection.pass;
|
||||
}
|
||||
|
||||
return server;
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -12,3 +12,7 @@ export type DatabaseListItem = {
|
||||
name: string;
|
||||
size: string;
|
||||
};
|
||||
|
||||
export type DumpOptions = Partial<{
|
||||
compress: boolean;
|
||||
}>;
|
||||
|
@ -5,10 +5,10 @@ type ExecOptions = {
|
||||
};
|
||||
|
||||
export const exec = async (
|
||||
cmds: string[],
|
||||
cmds: (string | null | undefined)[],
|
||||
options: Partial<ExecOptions> = {}
|
||||
) => {
|
||||
const proc = Bun.spawn(cmds, {
|
||||
const proc = Bun.spawn(cmds.filter((i) => i != null) as string[], {
|
||||
env: options.env,
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
13
docker-compose.yml
Normal file
13
docker-compose.yml
Normal file
@ -0,0 +1,13 @@
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
app:
|
||||
container_name: db-backup
|
||||
build:
|
||||
context: .
|
||||
volumes:
|
||||
- ./storage:/app/storage:rw
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
ports:
|
||||
- "3000:3000"
|
11
entrypoint.sh
Normal file
11
entrypoint.sh
Normal file
@ -0,0 +1,11 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
# Run migration
|
||||
bun run migrate & PID=$!
|
||||
wait $PID
|
||||
|
||||
# Start app
|
||||
bun start & PID=$!
|
||||
wait $PID
|
@ -1,7 +1,8 @@
|
||||
import { ClientResponse, hc } from "hono/client";
|
||||
import type { AppRouter } from "@backend/routers";
|
||||
|
||||
const api = hc<AppRouter>("http://localhost:3000/");
|
||||
const BACKEND_URL = import.meta.env.VITE_BACKEND_URL;
|
||||
const api = hc<AppRouter>(BACKEND_URL || "http://localhost:3000/");
|
||||
|
||||
export const parseJson = async <T>(res: ClientResponse<T>) => {
|
||||
const json = await res.json();
|
||||
|
@ -16,7 +16,13 @@ const ServerSection = () => {
|
||||
|
||||
return (
|
||||
<section>
|
||||
<PageTitle setTitle={false}>Servers</PageTitle>
|
||||
<div className="flex items-center gap-2 mt-2 md:mt-4">
|
||||
<PageTitle className="flex-1">Servers</PageTitle>
|
||||
|
||||
<Button onClick={() => serverFormDlg.onOpen({ ...initialServerData })}>
|
||||
Add Server
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{isLoading ? (
|
||||
<div>Loading...</div>
|
||||
|
@ -45,7 +45,7 @@ const BackupStatus = ({ status, output }: Props) => {
|
||||
<Popover>
|
||||
<PopoverTrigger
|
||||
disabled={!output}
|
||||
title={output}
|
||||
title={output?.substring(0, 120)}
|
||||
className={cn(
|
||||
"flex items-center gap-2 px-2 py-1 rounded-lg text-white shrink-0",
|
||||
colors[status]
|
||||
@ -57,8 +57,10 @@ const BackupStatus = ({ status, output }: Props) => {
|
||||
<p className="text-sm">{labels[status]}</p>
|
||||
</PopoverTrigger>
|
||||
|
||||
<PopoverContent className="max-w-lg w-screen">
|
||||
<p className="font-mono text-sm">{output}</p>
|
||||
<PopoverContent className="max-w-lg w-screen p-0">
|
||||
<textarea className="font-mono text-sm w-full h-[200px] border-none outline-none p-4">
|
||||
{output}
|
||||
</textarea>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
);
|
||||
|
@ -54,7 +54,11 @@ const BackupTab = () => {
|
||||
|
||||
return (
|
||||
<TabsContent value="backup" className="mt-4">
|
||||
<CheckboxField form={form} name="backup.compress" label="Compressed" />
|
||||
<CheckboxField
|
||||
form={form}
|
||||
name="backup.compress"
|
||||
label="Enable Compression"
|
||||
/>
|
||||
|
||||
<CheckboxField
|
||||
className="mt-4"
|
||||
|
@ -18,9 +18,12 @@ import ConnectionTab from "./server-form-connection-tab";
|
||||
import { Tabs, TabsList, TabsTrigger } from "@/components/ui/tabs";
|
||||
import BackupTab from "./server-form-backup-tab";
|
||||
import { ServerFormSchema, serverFormSchema } from "../schema";
|
||||
import { useNavigate } from "react-router-dom";
|
||||
import { toast } from "sonner";
|
||||
|
||||
const ServerFormDialog = () => {
|
||||
const { isOpen, data } = serverFormDlg.useState();
|
||||
const navigate = useNavigate();
|
||||
const form = useForm({
|
||||
resolver: zodResolver(serverFormSchema),
|
||||
defaultValues: data,
|
||||
@ -44,9 +47,13 @@ const ServerFormDialog = () => {
|
||||
return parseJson(res);
|
||||
}
|
||||
},
|
||||
onSuccess: () => {
|
||||
onSuccess: (data) => {
|
||||
serverFormDlg.onClose();
|
||||
queryClient.invalidateQueries("servers");
|
||||
navigate(`/servers/${data.id}`);
|
||||
},
|
||||
onError: (err) => {
|
||||
toast.error((err as Error)?.message || "Failed to save server");
|
||||
},
|
||||
});
|
||||
|
||||
@ -78,7 +85,7 @@ const ServerFormDialog = () => {
|
||||
</Button>
|
||||
<Button
|
||||
type="submit"
|
||||
disabled={!databases.length}
|
||||
disabled={!databases?.length}
|
||||
isLoading={saveServer.isLoading}
|
||||
>
|
||||
Submit
|
||||
|
@ -25,6 +25,7 @@ const BackupSection = ({ databases }: BackupSectionProps) => {
|
||||
const [query, setQuery] = useQueryParams<QueryParams>({
|
||||
page: 1,
|
||||
limit: 10,
|
||||
databaseId: undefined,
|
||||
});
|
||||
|
||||
const backups = useQuery({
|
||||
|
@ -158,6 +158,7 @@ export const backupsColumns: TableColumn<BackupType>[] = [
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent>
|
||||
<DropdownMenuItem
|
||||
disabled={row.status !== "success"}
|
||||
onClick={() => {
|
||||
confirmDlg.onOpen({
|
||||
title: "Restore Backup",
|
||||
|
@ -9,7 +9,10 @@
|
||||
"private": false,
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"dev": "concurrently \"cd backend && pnpm dev\" \"cd frontend && pnpm dev\""
|
||||
"dev": "concurrently \"cd backend && npm run dev:compose\" \"cd frontend && npm run dev\"",
|
||||
"build:frontend": "cd frontend && VITE_BACKEND_URL=/api npm run build",
|
||||
"build:backend": "cd backend && npm run build",
|
||||
"build": "npm run build:frontend && npm run build:backend"
|
||||
},
|
||||
"devDependencies": {
|
||||
"concurrently": "^8.2.2"
|
||||
|
Loading…
x
Reference in New Issue
Block a user