mirror of
https://github.com/fosrl/pangolin.git
synced 2025-05-13 13:50:40 +01:00
Initial pass at migrations
This commit is contained in:
parent
29bd88ebdf
commit
2a265e5cdd
9 changed files with 191 additions and 26 deletions
|
@ -8,7 +8,7 @@ RUN npm install --legacy-peer-deps
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
RUN npx drizzle-kit generate --dialect sqlite --schema ./server/db/schema.ts --out migrations
|
RUN npx drizzle-kit generate --dialect sqlite --schema ./server/db/schema.ts --out init
|
||||||
|
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
|
|
|
@ -65,6 +65,7 @@
|
||||||
"react-dom": "19.0.0-rc.1",
|
"react-dom": "19.0.0-rc.1",
|
||||||
"react-hook-form": "7.53.0",
|
"react-hook-form": "7.53.0",
|
||||||
"rebuild": "0.1.2",
|
"rebuild": "0.1.2",
|
||||||
|
"semver": "7.6.3",
|
||||||
"tailwind-merge": "2.5.3",
|
"tailwind-merge": "2.5.3",
|
||||||
"tailwindcss-animate": "1.0.7",
|
"tailwindcss-animate": "1.0.7",
|
||||||
"vaul": "1.1.1",
|
"vaul": "1.1.1",
|
||||||
|
@ -86,6 +87,7 @@
|
||||||
"@types/nodemailer": "6.4.16",
|
"@types/nodemailer": "6.4.16",
|
||||||
"@types/react": "npm:types-react@19.0.0-rc.1",
|
"@types/react": "npm:types-react@19.0.0-rc.1",
|
||||||
"@types/react-dom": "npm:types-react-dom@19.0.0-rc.1",
|
"@types/react-dom": "npm:types-react-dom@19.0.0-rc.1",
|
||||||
|
"@types/semver": "7.5.8",
|
||||||
"@types/ws": "8.5.13",
|
"@types/ws": "8.5.13",
|
||||||
"@types/yargs": "17.0.33",
|
"@types/yargs": "17.0.33",
|
||||||
"drizzle-kit": "0.24.2",
|
"drizzle-kit": "0.24.2",
|
||||||
|
|
|
@ -1,33 +1,12 @@
|
||||||
import { drizzle } from "drizzle-orm/better-sqlite3";
|
import { drizzle } from "drizzle-orm/better-sqlite3";
|
||||||
import Database from "better-sqlite3";
|
import Database from "better-sqlite3";
|
||||||
import * as schema from "@server/db/schema";
|
import * as schema from "@server/db/schema";
|
||||||
import { __DIRNAME, APP_PATH } from "@server/config";
|
import { APP_PATH } from "@server/config";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
import fs from "fs";
|
|
||||||
import logger from "@server/logger";
|
|
||||||
import { migrate } from "drizzle-orm/better-sqlite3/migrator";
|
|
||||||
|
|
||||||
const location = path.join(APP_PATH, "db", "db.sqlite");
|
export const location = path.join(APP_PATH, "db", "db.sqlite");
|
||||||
|
|
||||||
let dbExists = true;
|
|
||||||
if (!fs.existsSync(location)) {
|
|
||||||
dbExists = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const sqlite = new Database(location);
|
const sqlite = new Database(location);
|
||||||
export const db = drizzle(sqlite, { schema });
|
export const db = drizzle(sqlite, { schema });
|
||||||
|
|
||||||
if (!dbExists && process.env.ENVIRONMENT === "prod") {
|
|
||||||
logger.info("Running migrations...");
|
|
||||||
try {
|
|
||||||
migrate(db, {
|
|
||||||
migrationsFolder: path.join(__DIRNAME, "migrations"),
|
|
||||||
});
|
|
||||||
logger.info("Migrations completed successfully.");
|
|
||||||
} catch (error) {
|
|
||||||
logger.error("Error running migrations:", error);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export default db;
|
export default db;
|
||||||
|
|
|
@ -356,6 +356,11 @@ export const resourceOtp = sqliteTable("resourceOtp", {
|
||||||
expiresAt: integer("expiresAt").notNull()
|
expiresAt: integer("expiresAt").notNull()
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export const versionMigrations = sqliteTable("versionMigrations", {
|
||||||
|
version: text("version").primaryKey(),
|
||||||
|
executedAt: integer("executedAt").notNull()
|
||||||
|
});
|
||||||
|
|
||||||
export type Org = InferSelectModel<typeof orgs>;
|
export type Org = InferSelectModel<typeof orgs>;
|
||||||
export type User = InferSelectModel<typeof users>;
|
export type User = InferSelectModel<typeof users>;
|
||||||
export type Site = InferSelectModel<typeof sites>;
|
export type Site = InferSelectModel<typeof sites>;
|
||||||
|
|
|
@ -9,8 +9,8 @@ async function startServers() {
|
||||||
|
|
||||||
// Start all servers
|
// Start all servers
|
||||||
const apiServer = createApiServer();
|
const apiServer = createApiServer();
|
||||||
const nextServer = await createNextServer();
|
|
||||||
const internalServer = createInternalServer();
|
const internalServer = createInternalServer();
|
||||||
|
const nextServer = await createNextServer();
|
||||||
|
|
||||||
return {
|
return {
|
||||||
apiServer,
|
apiServer,
|
||||||
|
|
|
@ -11,5 +11,5 @@ export async function copyInConfig() {
|
||||||
// update the domain on all of the orgs where the domain is not equal to the new domain
|
// update the domain on all of the orgs where the domain is not equal to the new domain
|
||||||
// TODO: eventually each org could have a unique domain that we do not want to overwrite, so this will be unnecessary
|
// TODO: eventually each org could have a unique domain that we do not want to overwrite, so this will be unnecessary
|
||||||
await db.update(orgs).set({ domain }).where(ne(orgs.domain, domain));
|
await db.update(orgs).set({ domain }).where(ne(orgs.domain, domain));
|
||||||
logger.info("Updated orgs with new domain");
|
logger.debug("Updated orgs with new domain");
|
||||||
}
|
}
|
|
@ -1,7 +1,11 @@
|
||||||
import { ensureActions } from "./ensureActions";
|
import { ensureActions } from "./ensureActions";
|
||||||
import { copyInConfig } from "./copyInConfig";
|
import { copyInConfig } from "./copyInConfig";
|
||||||
|
import logger from "@server/logger";
|
||||||
|
import { runMigrations } from "./migrations";
|
||||||
|
|
||||||
export async function runSetupFunctions() {
|
export async function runSetupFunctions() {
|
||||||
|
logger.info(`Setup for version ${process.env.APP_VERSION}`);
|
||||||
|
await runMigrations(); // run the migrations
|
||||||
await ensureActions(); // make sure all of the actions are in the db and the roles
|
await ensureActions(); // make sure all of the actions are in the db and the roles
|
||||||
await copyInConfig(); // copy in the config to the db as needed
|
await copyInConfig(); // copy in the config to the db as needed
|
||||||
}
|
}
|
168
server/setup/migrations.ts
Normal file
168
server/setup/migrations.ts
Normal file
|
@ -0,0 +1,168 @@
|
||||||
|
import logger from "@server/logger";
|
||||||
|
import { __DIRNAME } from "@server/config";
|
||||||
|
import { migrate } from "drizzle-orm/better-sqlite3/migrator";
|
||||||
|
import db, { location } from "@server/db";
|
||||||
|
import path from "path";
|
||||||
|
import * as fs from "fs/promises";
|
||||||
|
import semver from "semver";
|
||||||
|
import { versionMigrations } from "@server/db/schema";
|
||||||
|
import { desc, eq } from "drizzle-orm";
|
||||||
|
|
||||||
|
export async function runMigrations() {
|
||||||
|
if (!process.env.APP_VERSION) {
|
||||||
|
throw new Error("APP_VERSION is not set in the environment");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.ENVIRONMENT !== "prod") {
|
||||||
|
logger.info("Skipping migrations in non-prod environment");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (await checkFileExists(location)) {
|
||||||
|
try {
|
||||||
|
const directoryPath = path.join(__DIRNAME, "setup/scripts");
|
||||||
|
// Get the last executed version from the database
|
||||||
|
const lastExecuted = await db
|
||||||
|
.select()
|
||||||
|
.from(versionMigrations)
|
||||||
|
.orderBy(desc(versionMigrations.version))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
// Use provided baseVersion or last executed version
|
||||||
|
const startVersion = lastExecuted[0]?.version;
|
||||||
|
|
||||||
|
// Read all files in directory
|
||||||
|
const files = await fs.readdir(directoryPath);
|
||||||
|
|
||||||
|
// Filter for .ts files and extract versions
|
||||||
|
const versionedFiles = files
|
||||||
|
.filter((file) => file.endsWith(".ts"))
|
||||||
|
.map((file) => {
|
||||||
|
const version = path.parse(file).name;
|
||||||
|
return {
|
||||||
|
version,
|
||||||
|
path: path.join(directoryPath, file)
|
||||||
|
};
|
||||||
|
})
|
||||||
|
.filter((file) => {
|
||||||
|
// Validate that filename is a valid semver
|
||||||
|
if (!semver.valid(file.version)) {
|
||||||
|
console.warn(
|
||||||
|
`Skipping invalid semver filename: ${file.path}`
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// Filter versions based on startVersion if provided
|
||||||
|
if (startVersion) {
|
||||||
|
return semver.gt(file.version, startVersion);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Sort files by semver
|
||||||
|
const sortedFiles = versionedFiles.sort((a, b) =>
|
||||||
|
semver.compare(a.version, b.version)
|
||||||
|
);
|
||||||
|
|
||||||
|
const results: FileExecutionResult[] = [];
|
||||||
|
|
||||||
|
// Execute files in order
|
||||||
|
for (const file of sortedFiles) {
|
||||||
|
try {
|
||||||
|
// Start a transaction for each file execution
|
||||||
|
await db.transaction(async (tx) => {
|
||||||
|
// Check if version was already executed (double-check within transaction)
|
||||||
|
const executed = await tx
|
||||||
|
.select()
|
||||||
|
.from(versionMigrations)
|
||||||
|
.where(eq(versionMigrations.version, file.version));
|
||||||
|
|
||||||
|
if (executed.length > 0) {
|
||||||
|
throw new Error(
|
||||||
|
`Version ${file.version} was already executed`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dynamic import of the TypeScript file
|
||||||
|
const module = await import(file.path);
|
||||||
|
|
||||||
|
// Execute default export if it's a function
|
||||||
|
if (typeof module.default === "function") {
|
||||||
|
await module.default();
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`No default export function in ${file.path}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Record successful execution
|
||||||
|
const executedAt = Date.now();
|
||||||
|
await tx.insert(versionMigrations).values({
|
||||||
|
version: file.version,
|
||||||
|
executedAt: executedAt
|
||||||
|
});
|
||||||
|
|
||||||
|
results.push({
|
||||||
|
version: file.version,
|
||||||
|
success: true,
|
||||||
|
executedAt
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
const executedAt = Date.now();
|
||||||
|
results.push({
|
||||||
|
version: file.version,
|
||||||
|
success: false,
|
||||||
|
executedAt,
|
||||||
|
error:
|
||||||
|
error instanceof Error
|
||||||
|
? error
|
||||||
|
: new Error(String(error))
|
||||||
|
});
|
||||||
|
|
||||||
|
// Log error but continue processing other files
|
||||||
|
console.error(`Error executing ${file.path}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Failed to process directory: ${error}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.info("Running migrations...");
|
||||||
|
try {
|
||||||
|
migrate(db, {
|
||||||
|
migrationsFolder: path.join(__DIRNAME, "init")
|
||||||
|
});
|
||||||
|
logger.info("Migrations completed successfully.");
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Error running migrations:", error);
|
||||||
|
}
|
||||||
|
|
||||||
|
// insert process.env.APP_VERSION into the versionMigrations table
|
||||||
|
await db
|
||||||
|
.insert(versionMigrations)
|
||||||
|
.values({
|
||||||
|
version: process.env.APP_VERSION,
|
||||||
|
executedAt: Date.now()
|
||||||
|
})
|
||||||
|
.execute();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function checkFileExists(filePath: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await fs.access(filePath);
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FileExecutionResult {
|
||||||
|
version: string;
|
||||||
|
success: boolean;
|
||||||
|
executedAt: number;
|
||||||
|
error?: Error;
|
||||||
|
}
|
7
server/setup/scripts/1.0.0.ts
Normal file
7
server/setup/scripts/1.0.0.ts
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
import db from "@server/db";
|
||||||
|
import logger from "@server/logger";
|
||||||
|
|
||||||
|
export default async function run() {
|
||||||
|
logger.info("Running setup script 1.0.0");
|
||||||
|
logger.info("Done...");
|
||||||
|
}
|
Loading…
Reference in a new issue