Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Revert "Revert "Root encrypted data to kms encryption"" #3109

Merged
merged 1 commit into from
Feb 12, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 23 additions & 11 deletions backend/e2e-test/vitest-environment-knex.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,14 @@ export default {
name: "knex-env",
transformMode: "ssr",
async setup() {
const logger = await initLogger();
const cfg = initEnvConfig(logger);
const logger = initLogger();
const envConfig = initEnvConfig(logger);
const db = initDbConnection({
dbConnectionUri: cfg.DB_CONNECTION_URI,
dbRootCert: cfg.DB_ROOT_CERT
dbConnectionUri: envConfig.DB_CONNECTION_URI,
dbRootCert: envConfig.DB_ROOT_CERT
});

const redis = new Redis(cfg.REDIS_URL);
const redis = new Redis(envConfig.REDIS_URL);
await redis.flushdb("SYNC");

try {
Expand All @@ -42,6 +42,7 @@ export default {
},
true
);

await db.migrate.latest({
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
Expand All @@ -52,14 +53,24 @@ export default {
directory: path.join(__dirname, "../src/db/seeds"),
extension: "ts"
});

const smtp = mockSmtpServer();
const queue = queueServiceFactory(cfg.REDIS_URL, { dbConnectionUrl: cfg.DB_CONNECTION_URI });
const keyStore = keyStoreFactory(cfg.REDIS_URL);
const queue = queueServiceFactory(envConfig.REDIS_URL, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
const keyStore = keyStoreFactory(envConfig.REDIS_URL);

const hsmModule = initializeHsmModule();
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();

const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
const server = await main({
db,
smtp,
logger,
queue,
keyStore,
hsmModule: hsmModule.getModule(),
redis,
envConfig
});

// @ts-expect-error type
globalThis.testServer = server;
Expand All @@ -73,8 +84,8 @@ export default {
organizationId: seedData1.organization.id,
accessVersion: 1
},
cfg.AUTH_SECRET,
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
envConfig.AUTH_SECRET,
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
);
} catch (error) {
// eslint-disable-next-line
Expand Down Expand Up @@ -109,3 +120,4 @@ export default {
};
}
};

28 changes: 14 additions & 14 deletions backend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -45,21 +45,21 @@
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
"generate:component": "tsx ./scripts/create-backend-file.ts",
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.mjs --client pg migrate:latest",
"auditlog-migration:up": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:up",
"auditlog-migration:down": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:down",
"auditlog-migration:list": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:list",
"auditlog-migration:status": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:status",
"auditlog-migration:unlock": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:unlock",
"auditlog-migration:rollback": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:rollback",
"migration:new": "tsx ./scripts/create-migration.ts",
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:up",
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:down",
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:list",
"migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && npm run auditlog-migration:latest && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:latest",
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
"migrate:org": "tsx ./scripts/migrate-organization.ts",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
Expand Down
6 changes: 6 additions & 0 deletions backend/src/@types/fastify.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,12 @@ import { TUserEngagementServiceFactory } from "@app/services/user-engagement/use
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";

declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
}
}

declare module "fastify" {
interface Session {
callbackPort: string;
Expand Down
105 changes: 105 additions & 0 deletions backend/src/auto-start-migrations.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
import path from "node:path";

import dotenv from "dotenv";
import { Knex } from "knex";
import { Logger } from "pino";

import { PgSqlLock } from "./keystore/keystore";

dotenv.config();

type TArgs = {
auditLogDb?: Knex;
applicationDb: Knex;
logger: Logger;
};

const isProduction = process.env.NODE_ENV === "production";
const migrationConfig = {
directory: path.join(__dirname, "./db/migrations"),
loadExtensions: [".mjs", ".ts"],
tableName: "infisical_migrations"
};

const migrationStatusCheckErrorHandler = (err: Error) => {
// happens for first time in which the migration table itself is not created yet
// error: select * from "infisical_migrations" - relation "infisical_migrations" does not exist
if (err?.message?.includes("does not exist")) {
return true;
}
throw err;
};

export const runMigrations = async ({ applicationDb, auditLogDb, logger }: TArgs) => {
try {
// akhilmhdh(Feb 10 2025): 2 years from now remove this
if (isProduction) {
const migrationTable = migrationConfig.tableName;
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
if (hasMigrationTable) {
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await applicationDb(migrationTable).update({
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
if (auditLogDb) {
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
if (hasMigrationTableInAuditLog) {
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await auditLogDb(migrationTable).update({
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
}
}

const shouldRunMigration = Boolean(
await applicationDb.migrate.status(migrationConfig).catch(migrationStatusCheckErrorHandler)
); // db.length - code.length
if (!shouldRunMigration) {
logger.info("No migrations pending: Skipping migration process.");
return;
}

if (auditLogDb) {
await auditLogDb.transaction(async (tx) => {
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
logger.info("Running audit log migrations.");

const didPreviousInstanceRunMigration = !(await auditLogDb.migrate
.status(migrationConfig)
.catch(migrationStatusCheckErrorHandler));
if (didPreviousInstanceRunMigration) {
logger.info("No audit log migrations pending: Applied by previous instance. Skipping migration process.");
return;
}

await auditLogDb.migrate.latest(migrationConfig);
logger.info("Finished audit log migrations.");
});
}

await applicationDb.transaction(async (tx) => {
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
logger.info("Running application migrations.");

const didPreviousInstanceRunMigration = !(await applicationDb.migrate
.status(migrationConfig)
.catch(migrationStatusCheckErrorHandler));
if (didPreviousInstanceRunMigration) {
logger.info("No application migrations pending: Applied by previous instance. Skipping migration process.");
return;
}

await applicationDb.migrate.latest(migrationConfig);
logger.info("Finished application migrations.");
});
} catch (err) {
logger.error(err, "Boot up migration failed");
process.exit(1);
}
};
9 changes: 9 additions & 0 deletions backend/src/db/instance.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,9 @@ export const initDbConnection = ({
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});

Expand All @@ -64,6 +67,9 @@ export const initDbConnection = ({
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});
});
Expand Down Expand Up @@ -98,6 +104,9 @@ export const initAuditLogDbConnection = ({
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});

Expand Down
6 changes: 4 additions & 2 deletions backend/src/db/knexfile.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ export default {
directory: "./seeds"
},
migrations: {
tableName: "infisical_migrations"
tableName: "infisical_migrations",
loadExtensions: [".mjs"]
}
},
production: {
Expand All @@ -62,7 +63,8 @@ export default {
max: 10
},
migrations: {
tableName: "infisical_migrations"
tableName: "infisical_migrations",
loadExtensions: [".mjs"]
}
}
} as Knex.Config;
Loading
Loading