feat: sql-tools overrides (#19796)

This commit is contained in:
Jason Rasmussen
2025-07-08 08:17:40 -04:00
committed by GitHub
parent 1f9813a28e
commit df4a27e8a7
114 changed files with 775 additions and 289 deletions

View File

@@ -1,8 +1,8 @@
import { sql } from 'kysely';
import { jsonArrayFrom } from 'kysely/helpers/postgres';
import { ColumnType, DatabaseColumn, DatabaseReader } from 'src/sql-tools/types';
import { ColumnType, DatabaseColumn, Reader } from 'src/sql-tools/types';
export const readColumns: DatabaseReader = async (schema, db) => {
export const readColumns: Reader = async (ctx, db) => {
const columns = await db
.selectFrom('information_schema.columns as c')
.leftJoin('information_schema.element_types as o', (join) =>
@@ -42,13 +42,13 @@ export const readColumns: DatabaseReader = async (schema, db) => {
// data type for ARRAYs
'o.data_type as array_type',
])
.where('table_schema', '=', schema.schemaName)
.where('table_schema', '=', ctx.schemaName)
.execute();
const enumRaw = await db
.selectFrom('pg_type')
.innerJoin('pg_namespace', (join) =>
join.onRef('pg_namespace.oid', '=', 'pg_type.typnamespace').on('pg_namespace.nspname', '=', schema.schemaName),
join.onRef('pg_namespace.oid', '=', 'pg_type.typnamespace').on('pg_namespace.nspname', '=', ctx.schemaName),
)
.where('typtype', '=', sql.lit('e'))
.select((eb) => [
@@ -61,13 +61,13 @@ export const readColumns: DatabaseReader = async (schema, db) => {
const enums = enumRaw.map((item) => ({ name: item.name, values: item.values.map(({ value }) => value) }));
for (const { name, values } of enums) {
schema.enums.push({ name, values, synchronize: true });
ctx.enums.push({ name, values, synchronize: true });
}
const enumMap = Object.fromEntries(enums.map((e) => [e.name, e.values]));
// add columns to tables
for (const column of columns) {
const table = schema.tables.find((table) => table.name === column.table_name);
const table = ctx.getTableByName(column.table_name);
if (!table) {
continue;
}
@@ -93,7 +93,7 @@ export const readColumns: DatabaseReader = async (schema, db) => {
// array types
case 'ARRAY': {
if (!column.array_type) {
schema.warnings.push(`Unable to find type for ${columnLabel} (ARRAY)`);
ctx.warnings.push(`Unable to find type for ${columnLabel} (ARRAY)`);
continue;
}
item.type = column.array_type as ColumnType;
@@ -103,7 +103,7 @@ export const readColumns: DatabaseReader = async (schema, db) => {
// enum types
case 'USER-DEFINED': {
if (!enumMap[column.udt_name]) {
schema.warnings.push(`Unable to find type for ${columnLabel} (ENUM)`);
ctx.warnings.push(`Unable to find type for ${columnLabel} (ENUM)`);
continue;
}

View File

@@ -1,6 +1,6 @@
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readComments: DatabaseReader = async (schema, db) => {
export const readComments: Reader = async (ctx, db) => {
const comments = await db
.selectFrom('pg_description as d')
.innerJoin('pg_class as c', 'd.objoid', 'c.oid')
@@ -20,7 +20,7 @@ export const readComments: DatabaseReader = async (schema, db) => {
for (const comment of comments) {
if (comment.object_type === 'r') {
const table = schema.tables.find((table) => table.name === comment.object_name);
const table = ctx.getTableByName(comment.object_name);
if (!table) {
continue;
}

View File

@@ -1,7 +1,7 @@
import { sql } from 'kysely';
import { ActionType, ConstraintType, DatabaseReader } from 'src/sql-tools/types';
import { ActionType, ConstraintType, Reader } from 'src/sql-tools/types';
export const readConstraints: DatabaseReader = async (schema, db) => {
export const readConstraints: Reader = async (ctx, db) => {
const constraints = await db
.selectFrom('pg_constraint')
.innerJoin('pg_namespace', 'pg_namespace.oid', 'pg_constraint.connamespace') // namespace
@@ -40,11 +40,11 @@ export const readConstraints: DatabaseReader = async (schema, db) => {
.as('reference_column_names'),
eb.fn<string>('pg_get_constraintdef', ['pg_constraint.oid']).as('expression'),
])
.where('pg_namespace.nspname', '=', schema.schemaName)
.where('pg_namespace.nspname', '=', ctx.schemaName)
.execute();
for (const constraint of constraints) {
const table = schema.tables.find((table) => table.name === constraint.table_name);
const table = ctx.getTableByName(constraint.table_name);
if (!table) {
continue;
}
@@ -55,7 +55,7 @@ export const readConstraints: DatabaseReader = async (schema, db) => {
// primary key constraint
case 'p': {
if (!constraint.column_names) {
schema.warnings.push(`Skipping CONSTRAINT "${constraintName}", no columns found`);
ctx.warnings.push(`Skipping CONSTRAINT "${constraintName}", no columns found`);
continue;
}
table.constraints.push({
@@ -71,7 +71,7 @@ export const readConstraints: DatabaseReader = async (schema, db) => {
// foreign key constraint
case 'f': {
if (!constraint.column_names || !constraint.reference_table_name || !constraint.reference_column_names) {
schema.warnings.push(
ctx.warnings.push(
`Skipping CONSTRAINT "${constraintName}", missing either columns, referenced table, or referenced columns,`,
);
continue;

View File

@@ -1,6 +1,6 @@
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readExtensions: DatabaseReader = async (schema, db) => {
export const readExtensions: Reader = async (ctx, db) => {
const extensions = await db
.selectFrom('pg_catalog.pg_extension')
// .innerJoin('pg_namespace', 'pg_namespace.oid', 'pg_catalog.pg_extension.extnamespace')
@@ -9,6 +9,6 @@ export const readExtensions: DatabaseReader = async (schema, db) => {
.execute();
for (const { name } of extensions) {
schema.extensions.push({ name, synchronize: true });
ctx.extensions.push({ name, synchronize: true });
}
};

View File

@@ -1,14 +1,14 @@
import { sql } from 'kysely';
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readFunctions: DatabaseReader = async (schema, db) => {
export const readFunctions: Reader = async (ctx, db) => {
const routines = await db
.selectFrom('pg_proc as p')
.innerJoin('pg_namespace', 'pg_namespace.oid', 'p.pronamespace')
.leftJoin('pg_depend as d', (join) => join.onRef('d.objid', '=', 'p.oid').on('d.deptype', '=', sql.lit('e')))
.where('d.objid', 'is', sql.lit(null))
.where('p.prokind', '=', sql.lit('f'))
.where('pg_namespace.nspname', '=', schema.schemaName)
.where('pg_namespace.nspname', '=', ctx.schemaName)
.select((eb) => [
'p.proname as name',
eb.fn<string>('pg_get_function_identity_arguments', ['p.oid']).as('arguments'),
@@ -17,7 +17,7 @@ export const readFunctions: DatabaseReader = async (schema, db) => {
.execute();
for (const { name, expression } of routines) {
schema.functions.push({
ctx.functions.push({
name,
// TODO read expression from the overrides table
expression,

View File

@@ -1,7 +1,7 @@
import { sql } from 'kysely';
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readIndexes: DatabaseReader = async (schema, db) => {
export const readIndexes: Reader = async (ctx, db) => {
const indexes = await db
.selectFrom('pg_index as ix')
// matching index, which has column information
@@ -34,12 +34,12 @@ export const readIndexes: DatabaseReader = async (schema, db) => {
.select((eb) => eb.fn<string[]>('json_agg', ['a.attname']).as('column_name'))
.as('column_names'),
])
.where('pg_namespace.nspname', '=', schema.schemaName)
.where('pg_namespace.nspname', '=', ctx.schemaName)
.where('ix.indisprimary', '=', sql.lit(false))
.execute();
for (const index of indexes) {
const table = schema.tables.find((table) => table.name === index.table_name);
const table = ctx.getTableByName(index.table_name);
if (!table) {
continue;
}

View File

@@ -5,13 +5,13 @@ import { readExtensions } from 'src/sql-tools/readers/extension.reader';
import { readFunctions } from 'src/sql-tools/readers/function.reader';
import { readIndexes } from 'src/sql-tools/readers/index.reader';
import { readName } from 'src/sql-tools/readers/name.reader';
import { readOverrides } from 'src/sql-tools/readers/override.reader';
import { readParameters } from 'src/sql-tools/readers/parameter.reader';
import { readTables } from 'src/sql-tools/readers/table.reader';
import { readTriggers } from 'src/sql-tools/readers/trigger.reader';
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readers: DatabaseReader[] = [
//
export const readers: Reader[] = [
readName,
readParameters,
readExtensions,
@@ -22,4 +22,5 @@ export const readers: DatabaseReader[] = [
readConstraints,
readTriggers,
readComments,
readOverrides,
];

View File

@@ -1,8 +1,8 @@
import { QueryResult, sql } from 'kysely';
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readName: DatabaseReader = async (schema, db) => {
export const readName: Reader = async (ctx, db) => {
const result = (await sql`SELECT current_database() as name`.execute(db)) as QueryResult<{ name: string }>;
schema.databaseName = result.rows[0].name;
ctx.databaseName = result.rows[0].name;
};

View File

@@ -0,0 +1,19 @@
import { sql } from 'kysely';
import { OverrideType, Reader } from 'src/sql-tools/types';
export const readOverrides: Reader = async (ctx, db) => {
try {
const result = await sql
.raw<{
name: string;
value: { type: OverrideType; name: string; sql: string };
}>(`SELECT name, value FROM "${ctx.overrideTableName}"`)
.execute(db);
for (const { name, value } of result.rows) {
ctx.overrides.push({ name, value, synchronize: true });
}
} catch (error) {
ctx.warn('Overrides', `Error reading override table: ${error}`);
}
};

View File

@@ -1,7 +1,7 @@
import { sql } from 'kysely';
import { DatabaseReader, ParameterScope } from 'src/sql-tools/types';
import { ParameterScope, Reader } from 'src/sql-tools/types';
export const readParameters: DatabaseReader = async (schema, db) => {
export const readParameters: Reader = async (ctx, db) => {
const parameters = await db
.selectFrom('pg_settings')
.where('source', 'in', [sql.lit('database'), sql.lit('user')])
@@ -9,10 +9,10 @@ export const readParameters: DatabaseReader = async (schema, db) => {
.execute();
for (const parameter of parameters) {
schema.parameters.push({
ctx.parameters.push({
name: parameter.name,
value: parameter.value,
databaseName: schema.databaseName,
databaseName: ctx.databaseName,
scope: parameter.scope as ParameterScope,
synchronize: true,
});

View File

@@ -1,16 +1,16 @@
import { sql } from 'kysely';
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readTables: DatabaseReader = async (schema, db) => {
export const readTables: Reader = async (ctx, db) => {
const tables = await db
.selectFrom('information_schema.tables')
.where('table_schema', '=', schema.schemaName)
.where('table_schema', '=', ctx.schemaName)
.where('table_type', '=', sql.lit('BASE TABLE'))
.selectAll()
.execute();
for (const table of tables) {
schema.tables.push({
ctx.tables.push({
name: table.table_name,
columns: [],
indexes: [],

View File

@@ -1,6 +1,6 @@
import { DatabaseReader, TriggerAction, TriggerScope, TriggerTiming } from 'src/sql-tools/types';
import { Reader, TriggerAction, TriggerScope, TriggerTiming } from 'src/sql-tools/types';
export const readTriggers: DatabaseReader = async (schema, db) => {
export const readTriggers: Reader = async (ctx, db) => {
const triggers = await db
.selectFrom('pg_trigger as t')
.innerJoin('pg_proc as p', 't.tgfoid', 'p.oid')
@@ -21,12 +21,12 @@ export const readTriggers: DatabaseReader = async (schema, db) => {
'c.relname as table_name',
])
.where('t.tgisinternal', '=', false) // Exclude internal system triggers
.where('n.nspname', '=', schema.schemaName)
.where('n.nspname', '=', ctx.schemaName)
.execute();
// add triggers to tables
for (const trigger of triggers) {
const table = schema.tables.find((table) => table.name === trigger.table_name);
const table = ctx.getTableByName(trigger.table_name);
if (!table) {
continue;
}