From 9c765f9167a05b8ca89587f4ab0b52b7de91311d Mon Sep 17 00:00:00 2001 From: Rob Orton Date: Fri, 6 Mar 2026 17:54:47 -0700 Subject: [PATCH 1/4] feat: add rollback/down migrations support Implements bidirectional migrations across all dialects and drivers. drizzle-kit: - `writeResult()` auto-generates and writes `.down.sql` alongside each `.sql` by calling the diff function with prev/cur arguments swapped (no-rename auto-resolvers used so it is non-interactive) - `embeddedMigrations()` bundles `.down.sql` imports for expo-sqlite, op-sqlite, and durable-sqlite - `Journal` entry type gains optional `hasDown: boolean` flag drizzle-orm/src/migrator.ts: - `MigrationMeta` gains `downSql?: string[]` - `readMigrationFiles()` reads `.down.sql` alongside each migration if it exists Dialect rollback() methods (pg-core, mysql-core, singlestore-core, sqlite-core sync+async): - Query the last `steps` rows from `__drizzle_migrations` DESC - Match each by hash against the in-memory migration list - Throw a descriptive error if the migration file or down SQL is missing - Execute down SQL statements in reverse order inside a transaction - DELETE the rolled-back rows from `__drizzle_migrations` Per-driver rollback() exports added to all 22 driver migrator files: - Standard drivers (node-postgres, postgres-js, pglite, neon-serverless, vercel-postgres, aws-data-api/pg, mysql2, planetscale-serverless, tidb-serverless, singlestore, better-sqlite3, bun-sqlite, bun-sql, sql-js): delegate to dialect.rollback() - Proxy drivers (pg-proxy, mysql-proxy, sqlite-proxy, singlestore-proxy): build queries array and pass to existing ProxyMigrator callback - Batch drivers (libsql, d1): collect statements and use session.migrate()/session.batch() - Embedded drivers (expo-sqlite, op-sqlite, durable-sqlite): MigrationConfig gains optional `downMigrations` bundle field; rollback works with the same embedded bundle pattern as migrate() - neon-http, xata-http: custom implementations without transactions (noted in JSDoc) --- drizzle-kit/src/cli/commands/migrate.ts | 134 +++++++++++++++++- drizzle-kit/src/utils.ts | 1 + drizzle-orm/src/aws-data-api/pg/migrator.ts | 9 ++ drizzle-orm/src/better-sqlite3/migrator.ts | 9 ++ drizzle-orm/src/bun-sql/migrator.ts | 9 ++ drizzle-orm/src/bun-sqlite/migrator.ts | 9 ++ drizzle-orm/src/d1/migrator.ts | 49 +++++++ drizzle-orm/src/durable-sqlite/migrator.ts | 63 +++++++- drizzle-orm/src/expo-sqlite/migrator.ts | 23 ++- drizzle-orm/src/libsql/migrator.ts | 45 ++++++ drizzle-orm/src/migrator.ts | 11 ++ drizzle-orm/src/mysql-core/dialect.ts | 41 ++++++ drizzle-orm/src/mysql-proxy/migrator.ts | 44 ++++++ drizzle-orm/src/mysql2/migrator.ts | 9 ++ drizzle-orm/src/neon-http/migrator.ts | 47 ++++++ drizzle-orm/src/neon-serverless/migrator.ts | 9 ++ drizzle-orm/src/node-postgres/migrator.ts | 9 ++ drizzle-orm/src/op-sqlite/migrator.ts | 23 ++- drizzle-orm/src/pg-core/dialect.ts | 46 ++++++ drizzle-orm/src/pg-proxy/migrator.ts | 45 ++++++ drizzle-orm/src/pglite/migrator.ts | 9 ++ .../src/planetscale-serverless/migrator.ts | 9 ++ drizzle-orm/src/postgres-js/migrator.ts | 9 ++ drizzle-orm/src/singlestore-core/dialect.ts | 41 ++++++ drizzle-orm/src/singlestore-proxy/migrator.ts | 44 ++++++ drizzle-orm/src/singlestore/migrator.ts | 9 ++ drizzle-orm/src/sql-js/migrator.ts | 9 ++ drizzle-orm/src/sqlite-core/dialect.ts | 95 +++++++++++++ drizzle-orm/src/sqlite-proxy/migrator.ts | 45 ++++++ drizzle-orm/src/tidb-serverless/migrator.ts | 9 ++ drizzle-orm/src/vercel-postgres/migrator.ts | 9 ++ drizzle-orm/src/xata-http/migrator.ts | 48 +++++++ 32 files changed, 964 insertions(+), 7 deletions(-) diff --git a/drizzle-kit/src/cli/commands/migrate.ts b/drizzle-kit/src/cli/commands/migrate.ts index 8c62a5edb2..faed90ad5b 100644 --- a/drizzle-kit/src/cli/commands/migrate.ts +++ b/drizzle-kit/src/cli/commands/migrate.ts @@ -65,6 +65,48 @@ export type NamedWithSchema = { schema: string; }; +// Non-interactive resolvers used for auto-generating down SQL (no rename detection) +const autoResolverNoRename = async ( + input: ResolverInput, +): Promise> => ({ created: input.created, deleted: input.deleted, renamed: [] }); + +const autoResolverWithMovedNoRename = async ( + input: ResolverInput, +): Promise> => ({ + created: input.created, + deleted: input.deleted, + moved: [], + renamed: [], +}); + +const autoColumnsResolverNoRename = async ( + input: ColumnsResolverInput, +): Promise> => ({ + tableName: input.tableName, + schema: input.schema, + created: input.created, + deleted: input.deleted, + renamed: [], +}); + +const autoPolicyResolverNoRename = async ( + input: PolicyResolverInput, +): Promise> => ({ created: input.created, deleted: input.deleted, renamed: [] }); + +const autoTablePolicyResolverNoRename = async ( + input: TablePolicyResolverInput, +): Promise> => ({ + tableName: input.tableName, + schema: input.schema, + created: input.created, + deleted: input.deleted, + renamed: [], +}); + +const autoRolesResolverNoRename = async ( + input: RolesResolverInput, +): Promise> => ({ created: input.created, deleted: input.deleted, renamed: [] }); + export const schemasResolver = async ( input: ResolverInput, ): Promise> => { @@ -354,9 +396,26 @@ export const prepareAndMigratePg = async (config: GenerateConfig) => { validatedCur, ); + const { sqlStatements: downSqlStatements } = await applyPgSnapshotsDiff( + squashedCur, + squashedPrev, + autoResolverNoRename, + autoResolverWithMovedNoRename, + autoResolverWithMovedNoRename, + autoTablePolicyResolverNoRename, + autoPolicyResolverNoRename, + autoRolesResolverNoRename, + autoResolverWithMovedNoRename, + autoColumnsResolverNoRename, + autoResolverWithMovedNoRename, + validatedCur, + validatedPrev, + ); + writeResult({ cur, sqlStatements, + downSqlStatements, journal, outFolder, name: config.name, @@ -570,9 +629,20 @@ export const prepareAndMigrateMysql = async (config: GenerateConfig) => { validatedCur, ); + const { sqlStatements: downSqlStatements } = await applyMysqlSnapshotsDiff( + squashedCur, + squashedPrev, + autoResolverWithMovedNoRename, + autoColumnsResolverNoRename, + autoResolverWithMovedNoRename, + validatedCur, + validatedPrev, + ); + writeResult({ cur, sqlStatements, + downSqlStatements, journal, _meta, outFolder, @@ -720,9 +790,20 @@ export const prepareAndMigrateSingleStore = async (config: GenerateConfig) => { validatedCur, ); + const { sqlStatements: downSqlStatements } = await applySingleStoreSnapshotsDiff( + squashedCur, + squashedPrev, + autoResolverWithMovedNoRename, + autoColumnsResolverNoRename, + /* singleStoreViewsResolver, */ + validatedCur, + validatedPrev, + ); + writeResult({ cur, sqlStatements, + downSqlStatements, journal, _meta, outFolder, @@ -845,9 +926,20 @@ export const prepareAndMigrateSqlite = async (config: GenerateConfig) => { validatedCur, ); + const { sqlStatements: downSqlStatements } = await applySqliteSnapshotsDiff( + squashedCur, + squashedPrev, + autoResolverWithMovedNoRename, + autoColumnsResolverNoRename, + autoResolverWithMovedNoRename, + validatedCur, + validatedPrev, + ); + writeResult({ cur, sqlStatements, + downSqlStatements, journal, _meta, outFolder, @@ -940,9 +1032,20 @@ export const prepareAndMigrateLibSQL = async (config: GenerateConfig) => { validatedCur, ); + const { sqlStatements: downSqlStatements } = await applyLibSQLSnapshotsDiff( + squashedCur, + squashedPrev, + autoResolverWithMovedNoRename, + autoColumnsResolverNoRename, + autoResolverWithMovedNoRename, + validatedCur, + validatedPrev, + ); + writeResult({ cur, sqlStatements, + downSqlStatements, journal, _meta, outFolder, @@ -1356,6 +1459,7 @@ export const BREAKPOINT = '--> statement-breakpoint\n'; export const writeResult = ({ cur, sqlStatements, + downSqlStatements, journal, _meta = { columns: {}, @@ -1372,6 +1476,7 @@ export const writeResult = ({ }: { cur: CommonSchema; sqlStatements: string[]; + downSqlStatements?: string[]; journal: Journal; _meta?: any; outFolder: string; @@ -1426,18 +1531,27 @@ export const writeResult = ({ sql = '-- Custom SQL migration file, put your code below! --'; } + const hasDown = downSqlStatements !== undefined && downSqlStatements.length > 0; + journal.entries.push({ idx, version: cur.version, when: +new Date(), tag, breakpoints: breakpoints, + ...(hasDown ? { hasDown: true } : {}), }); fs.writeFileSync(metaJournal, JSON.stringify(journal, null, 2)); fs.writeFileSync(`${outFolder}/${tag}.sql`, sql); + if (downSqlStatements !== undefined) { + const downSqlDelimiter = breakpoints ? BREAKPOINT : '\n'; + const downSql = downSqlStatements.join(downSqlDelimiter); + fs.writeFileSync(`${outFolder}/${tag}.down.sql`, downSql); + } + // js file with .sql imports for React Native / Expo and Durable Sqlite Objects if (bundle) { const js = embeddedMigrations(journal, driver); @@ -1467,6 +1581,24 @@ export const embeddedMigrations = (journal: Journal, driver?: Driver) => { content += `import m${entry.idx.toString().padStart(4, '0')} from './${entry.tag}.sql';\n`; }); + const hasAnyDown = journal.entries.some((e) => e.hasDown); + if (hasAnyDown) { + journal.entries.forEach((entry) => { + if (entry.hasDown) { + content += `import d${entry.idx.toString().padStart(4, '0')} from './${entry.tag}.down.sql';\n`; + } + }); + } + + const downMigrationsBlock = hasAnyDown + ? `,\n downMigrations: {\n ${ + journal.entries + .filter((it) => it.hasDown) + .map((it) => `d${it.idx.toString().padStart(4, '0')}`) + .join(',\n ') + }\n }` + : ''; + content += ` export default { journal, @@ -1476,7 +1608,7 @@ export const embeddedMigrations = (journal: Journal, driver?: Driver) => { .map((it) => `m${it.idx.toString().padStart(4, '0')}`) .join(',\n') } - } + }${downMigrationsBlock} } `; return content; diff --git a/drizzle-kit/src/utils.ts b/drizzle-kit/src/utils.ts index bcea01c249..30819e5849 100644 --- a/drizzle-kit/src/utils.ts +++ b/drizzle-kit/src/utils.ts @@ -69,6 +69,7 @@ export type Journal = { when: number; tag: string; breakpoints: boolean; + hasDown?: boolean; }[]; }; diff --git a/drizzle-orm/src/aws-data-api/pg/migrator.ts b/drizzle-orm/src/aws-data-api/pg/migrator.ts index c58ba7ab1a..e3d38a0ced 100644 --- a/drizzle-orm/src/aws-data-api/pg/migrator.ts +++ b/drizzle-orm/src/aws-data-api/pg/migrator.ts @@ -9,3 +9,12 @@ export async function migrate>( const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } + +export async function rollback>( + db: AwsDataApiPgDatabase, + config: MigrationConfig, + steps?: number, +) { + const migrations = readMigrationFiles(config); + await db.dialect.rollback(migrations, db.session, config, steps); +} diff --git a/drizzle-orm/src/better-sqlite3/migrator.ts b/drizzle-orm/src/better-sqlite3/migrator.ts index cea1982574..5ac86e6082 100644 --- a/drizzle-orm/src/better-sqlite3/migrator.ts +++ b/drizzle-orm/src/better-sqlite3/migrator.ts @@ -9,3 +9,12 @@ export function migrate>( const migrations = readMigrationFiles(config); db.dialect.migrate(migrations, db.session, config); } + +export function rollback>( + db: BetterSQLite3Database, + config: MigrationConfig, + steps?: number, +) { + const migrations = readMigrationFiles(config); + db.dialect.rollback(migrations, db.session, config, steps); +} diff --git a/drizzle-orm/src/bun-sql/migrator.ts b/drizzle-orm/src/bun-sql/migrator.ts index 48be01318a..c037a6cee8 100644 --- a/drizzle-orm/src/bun-sql/migrator.ts +++ b/drizzle-orm/src/bun-sql/migrator.ts @@ -9,3 +9,12 @@ export async function migrate>( const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } + +export async function rollback>( + db: BunSQLDatabase, + config: MigrationConfig, + steps?: number, +) { + const migrations = readMigrationFiles(config); + await db.dialect.rollback(migrations, db.session, config, steps); +} diff --git a/drizzle-orm/src/bun-sqlite/migrator.ts b/drizzle-orm/src/bun-sqlite/migrator.ts index 785dabae95..5638c5feb4 100644 --- a/drizzle-orm/src/bun-sqlite/migrator.ts +++ b/drizzle-orm/src/bun-sqlite/migrator.ts @@ -9,3 +9,12 @@ export function migrate>( const migrations = readMigrationFiles(config); db.dialect.migrate(migrations, db.session, config); } + +export function rollback>( + db: BunSQLiteDatabase, + config: MigrationConfig, + steps?: number, +) { + const migrations = readMigrationFiles(config); + db.dialect.rollback(migrations, db.session, config, steps); +} diff --git a/drizzle-orm/src/d1/migrator.ts b/drizzle-orm/src/d1/migrator.ts index 2259516bf8..543a1b7d49 100644 --- a/drizzle-orm/src/d1/migrator.ts +++ b/drizzle-orm/src/d1/migrator.ts @@ -1,3 +1,4 @@ +import { DrizzleError } from '~/errors.ts'; import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import { sql } from '~/sql/sql.ts'; @@ -47,3 +48,51 @@ export async function migrate>( await db.session.batch(statementToBatch); } } + +export async function rollback>( + db: DrizzleD1Database, + config: MigrationConfig, + steps: number = 1, +) { + const migrations = readMigrationFiles(config); + const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; + + const dbMigrations = await db.values<[number, string, string]>( + sql`SELECT id, hash, created_at FROM ${ + sql.identifier(migrationsTable) + } ORDER BY created_at DESC LIMIT ${sql.raw(String(steps))}`, + ); + + if (dbMigrations.length === 0) { + return; + } + + const statementToBatch = []; + for (const dbMigration of dbMigrations) { + const meta = migrations.find((m) => m.hash === dbMigration[1]); + if (!meta) { + throw new DrizzleError({ + message: `Cannot rollback migration with hash ${dbMigration[1]}: migration file not found`, + }); + } + if (!meta.downSql || meta.downSql.length === 0) { + throw new DrizzleError({ + message: `Cannot rollback migration ${dbMigration[1]}: no down SQL available. Add a .down.sql file alongside the migration.`, + }); + } + for (const stmt of [...meta.downSql].reverse()) { + statementToBatch.push(db.run(sql.raw(stmt))); + } + statementToBatch.push( + db.run( + sql`DELETE FROM ${sql.identifier(migrationsTable)} WHERE hash = ${ + sql.raw(`'${dbMigration[1]}'`) + }`, + ), + ); + } + + if (statementToBatch.length > 0) { + await db.session.batch(statementToBatch); + } +} diff --git a/drizzle-orm/src/durable-sqlite/migrator.ts b/drizzle-orm/src/durable-sqlite/migrator.ts index 8410b2900f..24809c66e1 100644 --- a/drizzle-orm/src/durable-sqlite/migrator.ts +++ b/drizzle-orm/src/durable-sqlite/migrator.ts @@ -1,3 +1,4 @@ +import { DrizzleError } from '~/errors.ts'; import type { MigrationMeta } from '~/migrator.ts'; import { sql } from '~/sql/index.ts'; import type { DrizzleSqliteDODatabase } from './driver.ts'; @@ -7,13 +8,15 @@ interface MigrationConfig { entries: { idx: number; when: number; tag: string; breakpoints: boolean }[]; }; migrations: Record; + downMigrations?: Record; } -function readMigrationFiles({ journal, migrations }: MigrationConfig): MigrationMeta[] { +function readMigrationFiles({ journal, migrations, downMigrations }: MigrationConfig): MigrationMeta[] { const migrationQueries: MigrationMeta[] = []; for (const journalEntry of journal.entries) { - const query = migrations[`m${journalEntry.idx.toString().padStart(4, '0')}`]; + const key = `m${journalEntry.idx.toString().padStart(4, '0')}`; + const query = migrations[key]; if (!query) { throw new Error(`Missing migration: ${journalEntry.tag}`); @@ -24,8 +27,15 @@ function readMigrationFiles({ journal, migrations }: MigrationConfig): Migration return it; }); + let downSql: string[] | undefined; + const downQuery = downMigrations?.[key]; + if (downQuery?.trim()) { + downSql = downQuery.trim().split('--> statement-breakpoint').map((it) => it); + } + migrationQueries.push({ sql: result, + downSql, bps: journalEntry.breakpoints, folderMillis: journalEntry.when, hash: '', @@ -83,3 +93,52 @@ export async function migrate< } }); } + +export async function rollback< + TSchema extends Record, +>( + db: DrizzleSqliteDODatabase, + config: MigrationConfig, + steps: number = 1, +): Promise { + const migrations = readMigrationFiles(config); + + db.transaction((tx) => { + try { + const migrationsTable = '__drizzle_migrations'; + + const dbMigrations = db.values<[number, string, string]>( + sql`SELECT id, hash, created_at FROM ${ + sql.identifier(migrationsTable) + } ORDER BY created_at DESC LIMIT ${sql.raw(String(steps))}`, + ); + + if (dbMigrations.length === 0) { + return; + } + + for (const dbMigration of dbMigrations) { + const meta = migrations.find((m) => m.hash === dbMigration[1]); + if (!meta) { + throw new DrizzleError({ + message: `Cannot rollback migration with hash ${dbMigration[1]}: migration file not found`, + }); + } + if (!meta.downSql || meta.downSql.length === 0) { + throw new DrizzleError({ + message: `Cannot rollback migration ${dbMigration[1]}: no down SQL available. Add down SQL to your migrations bundle.`, + }); + } + for (const stmt of [...meta.downSql].reverse()) { + db.run(sql.raw(stmt)); + } + db.run( + sql`DELETE FROM ${sql.identifier(migrationsTable)} WHERE hash = ${dbMigration[1]}`, + ); + } + } catch (error: any) { + tx.rollback(); + throw error; + } + }); +} diff --git a/drizzle-orm/src/expo-sqlite/migrator.ts b/drizzle-orm/src/expo-sqlite/migrator.ts index 8906121cfa..5b3f367658 100644 --- a/drizzle-orm/src/expo-sqlite/migrator.ts +++ b/drizzle-orm/src/expo-sqlite/migrator.ts @@ -1,4 +1,5 @@ import { useEffect, useReducer } from 'react'; +import { DrizzleError } from '~/errors.ts'; import type { MigrationMeta } from '~/migrator.ts'; import type { ExpoSQLiteDatabase } from './driver.ts'; @@ -7,13 +8,15 @@ interface MigrationConfig { entries: { idx: number; when: number; tag: string; breakpoints: boolean }[]; }; migrations: Record; + downMigrations?: Record; } -async function readMigrationFiles({ journal, migrations }: MigrationConfig): Promise { +async function readMigrationFiles({ journal, migrations, downMigrations }: MigrationConfig): Promise { const migrationQueries: MigrationMeta[] = []; for await (const journalEntry of journal.entries) { - const query = migrations[`m${journalEntry.idx.toString().padStart(4, '0')}`]; + const key = `m${journalEntry.idx.toString().padStart(4, '0')}`; + const query = migrations[key]; if (!query) { throw new Error(`Missing migration: ${journalEntry.tag}`); @@ -24,8 +27,15 @@ async function readMigrationFiles({ journal, migrations }: MigrationConfig): Pro return it; }); + let downSql: string[] | undefined; + const downQuery = downMigrations?.[key]; + if (downQuery?.trim()) { + downSql = downQuery.trim().split('--> statement-breakpoint').map((it) => it); + } + migrationQueries.push({ sql: result, + downSql, bps: journalEntry.breakpoints, folderMillis: journalEntry.when, hash: '', @@ -46,6 +56,15 @@ export async function migrate>( return db.dialect.migrate(migrations, db.session); } +export async function rollback>( + db: ExpoSQLiteDatabase, + config: MigrationConfig, + steps: number = 1, +) { + const migrations = await readMigrationFiles(config); + await db.dialect.rollback(migrations, db.session, undefined, steps); +} + interface State { success: boolean; error?: Error; diff --git a/drizzle-orm/src/libsql/migrator.ts b/drizzle-orm/src/libsql/migrator.ts index 373a8aab4a..5b3eaf8466 100644 --- a/drizzle-orm/src/libsql/migrator.ts +++ b/drizzle-orm/src/libsql/migrator.ts @@ -1,3 +1,4 @@ +import { DrizzleError } from '~/errors.ts'; import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import { sql } from '~/sql/sql.ts'; @@ -45,3 +46,47 @@ export async function migrate>( await db.session.migrate(statementToBatch); } + +export async function rollback>( + db: LibSQLDatabase, + config: MigrationConfig, + steps: number = 1, +) { + const migrations = readMigrationFiles(config); + const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; + + const dbMigrations = await db.values<[number, string, string]>( + sql`SELECT id, hash, created_at FROM ${ + sql.identifier(migrationsTable) + } ORDER BY created_at DESC LIMIT ${sql.raw(String(steps))}`, + ); + + if (dbMigrations.length === 0) { + return; + } + + const statementToBatch = []; + for (const dbMigration of dbMigrations) { + const meta = migrations.find((m) => m.hash === dbMigration[1]); + if (!meta) { + throw new DrizzleError({ + message: `Cannot rollback migration with hash ${dbMigration[1]}: migration file not found`, + }); + } + if (!meta.downSql || meta.downSql.length === 0) { + throw new DrizzleError({ + message: `Cannot rollback migration ${dbMigration[1]}: no down SQL available. Add a .down.sql file alongside the migration.`, + }); + } + for (const stmt of [...meta.downSql].reverse()) { + statementToBatch.push(db.run(sql.raw(stmt))); + } + statementToBatch.push( + db.run( + sql`DELETE FROM ${sql.identifier(migrationsTable)} WHERE hash = ${dbMigration[1]}`, + ), + ); + } + + await db.session.migrate(statementToBatch); +} diff --git a/drizzle-orm/src/migrator.ts b/drizzle-orm/src/migrator.ts index 8b7636a44e..a58fa2d482 100644 --- a/drizzle-orm/src/migrator.ts +++ b/drizzle-orm/src/migrator.ts @@ -14,6 +14,7 @@ export interface MigrationConfig { export interface MigrationMeta { sql: string[]; + downSql?: string[]; folderMillis: number; hash: string; bps: boolean; @@ -45,8 +46,18 @@ export function readMigrationFiles(config: MigrationConfig): MigrationMeta[] { return it; }); + let downSql: string[] | undefined; + const downPath = `${migrationFolderTo}/${journalEntry.tag}.down.sql`; + if (fs.existsSync(downPath)) { + const downQuery = fs.readFileSync(downPath).toString().trim(); + if (downQuery) { + downSql = downQuery.split('--> statement-breakpoint').map((it) => it); + } + } + migrationQueries.push({ sql: result, + downSql, bps: journalEntry.breakpoints, folderMillis: journalEntry.when, hash: crypto.createHash('sha256').update(query).digest('hex'), diff --git a/drizzle-orm/src/mysql-core/dialect.ts b/drizzle-orm/src/mysql-core/dialect.ts index 053ddc0ce4..6b057a9a21 100644 --- a/drizzle-orm/src/mysql-core/dialect.ts +++ b/drizzle-orm/src/mysql-core/dialect.ts @@ -91,6 +91,47 @@ export class MySqlDialect { }); } + async rollback( + migrations: MigrationMeta[], + session: MySqlSession, + config: Omit, + steps: number = 1, + ): Promise { + const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; + + const dbMigrations = await session.all<{ id: number; hash: string; created_at: string }>( + sql`select id, hash, created_at from ${sql.identifier(migrationsTable)} order by created_at desc limit ${ + sql.raw(String(steps)) + }`, + ); + + if (dbMigrations.length === 0) { + return; + } + + await session.transaction(async (tx) => { + for (const dbMigration of dbMigrations) { + const meta = migrations.find((m) => m.hash === dbMigration.hash); + if (!meta) { + throw new DrizzleError({ + message: `Cannot rollback migration with hash ${dbMigration.hash}: migration file not found`, + }); + } + if (!meta.downSql || meta.downSql.length === 0) { + throw new DrizzleError({ + message: `Cannot rollback migration ${dbMigration.hash}: no down SQL available. Add a .down.sql file alongside the migration.`, + }); + } + for (const stmt of [...meta.downSql].reverse()) { + await tx.execute(sql.raw(stmt)); + } + await tx.execute( + sql`delete from ${sql.identifier(migrationsTable)} where \`hash\` = ${dbMigration.hash}`, + ); + } + }); + } + escapeName(name: string): string { return `\`${name}\``; } diff --git a/drizzle-orm/src/mysql-proxy/migrator.ts b/drizzle-orm/src/mysql-proxy/migrator.ts index d0bbae51b6..8c79c053b9 100644 --- a/drizzle-orm/src/mysql-proxy/migrator.ts +++ b/drizzle-orm/src/mysql-proxy/migrator.ts @@ -1,3 +1,4 @@ +import { DrizzleError } from '~/errors.ts'; import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import { sql } from '~/sql/sql.ts'; @@ -50,3 +51,46 @@ export async function migrate>( await callback(queriesToRun); } + +export async function rollback>( + db: MySqlRemoteDatabase, + callback: ProxyMigrator, + config: MigrationConfig, + steps: number = 1, +) { + const migrations = readMigrationFiles(config); + const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; + + const dbMigrations = await db.select({ + id: sql.raw('id'), + hash: sql.raw('hash'), + created_at: sql.raw('created_at'), + }).from(sql.identifier(migrationsTable).getSQL()).orderBy( + sql.raw('created_at desc'), + ).limit(steps); + + if (dbMigrations.length === 0) { + return; + } + + const queriesToRun: string[] = []; + for (const dbMigration of dbMigrations) { + const meta = migrations.find((m) => m.hash === String(dbMigration.hash)); + if (!meta) { + throw new DrizzleError({ + message: `Cannot rollback migration with hash ${dbMigration.hash}: migration file not found`, + }); + } + if (!meta.downSql || meta.downSql.length === 0) { + throw new DrizzleError({ + message: `Cannot rollback migration ${dbMigration.hash}: no down SQL available. Add a .down.sql file alongside the migration.`, + }); + } + queriesToRun.push( + ...[...meta.downSql].reverse(), + `delete from ${sql.identifier(migrationsTable).value} where \`hash\` = '${dbMigration.hash}'`, + ); + } + + await callback(queriesToRun); +} diff --git a/drizzle-orm/src/mysql2/migrator.ts b/drizzle-orm/src/mysql2/migrator.ts index 2f3c9c3dcf..a636695bc1 100644 --- a/drizzle-orm/src/mysql2/migrator.ts +++ b/drizzle-orm/src/mysql2/migrator.ts @@ -9,3 +9,12 @@ export async function migrate>( const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } + +export async function rollback>( + db: MySql2Database, + config: MigrationConfig, + steps?: number, +) { + const migrations = readMigrationFiles(config); + await db.dialect.rollback(migrations, db.session, config, steps); +} diff --git a/drizzle-orm/src/neon-http/migrator.ts b/drizzle-orm/src/neon-http/migrator.ts index db87842487..10e2d0df53 100644 --- a/drizzle-orm/src/neon-http/migrator.ts +++ b/drizzle-orm/src/neon-http/migrator.ts @@ -1,3 +1,4 @@ +import { DrizzleError } from '~/errors.ts'; import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import { type SQL, sql } from '~/sql/sql.ts'; @@ -57,3 +58,49 @@ export async function migrate>( await db.session.execute(rowToInsert); } } + +/** + * NOTE: The Neon HTTP driver does not support transactions. This means that if any part of a rollback fails, + * no automatic rollback of the rollback will be executed. Partially rolled-back state is possible. + */ +export async function rollback>( + db: NeonHttpDatabase, + config: MigrationConfig, + steps: number = 1, +) { + const migrations = readMigrationFiles(config); + const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; + const migrationsSchema = config.migrationsSchema ?? 'drizzle'; + + const dbMigrations = await db.session.all<{ id: number; hash: string; created_at: string }>( + sql`select id, hash, created_at from ${sql.identifier(migrationsSchema)}.${ + sql.identifier(migrationsTable) + } order by created_at desc limit ${sql.raw(String(steps))}`, + ); + + if (dbMigrations.length === 0) { + return; + } + + for (const dbMigration of dbMigrations) { + const meta = migrations.find((m) => m.hash === dbMigration.hash); + if (!meta) { + throw new DrizzleError({ + message: `Cannot rollback migration with hash ${dbMigration.hash}: migration file not found`, + }); + } + if (!meta.downSql || meta.downSql.length === 0) { + throw new DrizzleError({ + message: `Cannot rollback migration ${dbMigration.hash}: no down SQL available. Add a .down.sql file alongside the migration.`, + }); + } + for (const stmt of [...meta.downSql].reverse()) { + await db.session.execute(sql.raw(stmt)); + } + await db.session.execute( + sql`delete from ${sql.identifier(migrationsSchema)}.${ + sql.identifier(migrationsTable) + } where hash = ${dbMigration.hash}`, + ); + } +} diff --git a/drizzle-orm/src/neon-serverless/migrator.ts b/drizzle-orm/src/neon-serverless/migrator.ts index 9948f245ca..8a97146cae 100644 --- a/drizzle-orm/src/neon-serverless/migrator.ts +++ b/drizzle-orm/src/neon-serverless/migrator.ts @@ -9,3 +9,12 @@ export async function migrate>( const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } + +export async function rollback>( + db: NeonDatabase, + config: MigrationConfig, + steps?: number, +) { + const migrations = readMigrationFiles(config); + await db.dialect.rollback(migrations, db.session, config, steps); +} diff --git a/drizzle-orm/src/node-postgres/migrator.ts b/drizzle-orm/src/node-postgres/migrator.ts index 933e5a4862..3d59decdfb 100644 --- a/drizzle-orm/src/node-postgres/migrator.ts +++ b/drizzle-orm/src/node-postgres/migrator.ts @@ -9,3 +9,12 @@ export async function migrate>( const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } + +export async function rollback>( + db: NodePgDatabase, + config: MigrationConfig, + steps?: number, +) { + const migrations = readMigrationFiles(config); + await db.dialect.rollback(migrations, db.session, config, steps); +} diff --git a/drizzle-orm/src/op-sqlite/migrator.ts b/drizzle-orm/src/op-sqlite/migrator.ts index 317b6b5df5..ea389647bc 100644 --- a/drizzle-orm/src/op-sqlite/migrator.ts +++ b/drizzle-orm/src/op-sqlite/migrator.ts @@ -1,4 +1,5 @@ import { useEffect, useReducer } from 'react'; +import { DrizzleError } from '~/errors.ts'; import type { MigrationMeta } from '~/migrator.ts'; import type { OPSQLiteDatabase } from './driver.ts'; @@ -7,13 +8,15 @@ interface MigrationConfig { entries: { idx: number; when: number; tag: string; breakpoints: boolean }[]; }; migrations: Record; + downMigrations?: Record; } -async function readMigrationFiles({ journal, migrations }: MigrationConfig): Promise { +async function readMigrationFiles({ journal, migrations, downMigrations }: MigrationConfig): Promise { const migrationQueries: MigrationMeta[] = []; for await (const journalEntry of journal.entries) { - const query = migrations[`m${journalEntry.idx.toString().padStart(4, '0')}`]; + const key = `m${journalEntry.idx.toString().padStart(4, '0')}`; + const query = migrations[key]; if (!query) { throw new Error(`Missing migration: ${journalEntry.tag}`); @@ -24,8 +27,15 @@ async function readMigrationFiles({ journal, migrations }: MigrationConfig): Pro return it; }); + let downSql: string[] | undefined; + const downQuery = downMigrations?.[key]; + if (downQuery?.trim()) { + downSql = downQuery.trim().split('--> statement-breakpoint').map((it) => it); + } + migrationQueries.push({ sql: result, + downSql, bps: journalEntry.breakpoints, folderMillis: journalEntry.when, hash: '', @@ -46,6 +56,15 @@ export async function migrate>( return db.dialect.migrate(migrations, db.session); } +export async function rollback>( + db: OPSQLiteDatabase, + config: MigrationConfig, + steps: number = 1, +) { + const migrations = await readMigrationFiles(config); + await db.dialect.rollback(migrations, db.session, undefined, steps); +} + interface State { success: boolean; error?: Error; diff --git a/drizzle-orm/src/pg-core/dialect.ts b/drizzle-orm/src/pg-core/dialect.ts index be5ecdb237..fe513f5e5f 100644 --- a/drizzle-orm/src/pg-core/dialect.ts +++ b/drizzle-orm/src/pg-core/dialect.ts @@ -111,6 +111,52 @@ export class PgDialect { }); } + async rollback( + migrations: MigrationMeta[], + session: PgSession, + config: string | MigrationConfig, + steps: number = 1, + ): Promise { + const migrationsTable = typeof config === 'string' + ? '__drizzle_migrations' + : config.migrationsTable ?? '__drizzle_migrations'; + const migrationsSchema = typeof config === 'string' ? 'drizzle' : config.migrationsSchema ?? 'drizzle'; + + const dbMigrations = await session.all<{ id: number; hash: string; created_at: string }>( + sql`select id, hash, created_at from ${sql.identifier(migrationsSchema)}.${ + sql.identifier(migrationsTable) + } order by created_at desc limit ${sql.raw(String(steps))}`, + ); + + if (dbMigrations.length === 0) { + return; + } + + await session.transaction(async (tx) => { + for (const dbMigration of dbMigrations) { + const meta = migrations.find((m) => m.hash === dbMigration.hash); + if (!meta) { + throw new DrizzleError({ + message: `Cannot rollback migration with hash ${dbMigration.hash}: migration file not found`, + }); + } + if (!meta.downSql || meta.downSql.length === 0) { + throw new DrizzleError({ + message: `Cannot rollback migration ${dbMigration.hash}: no down SQL available. Add a .down.sql file alongside the migration.`, + }); + } + for (const stmt of [...meta.downSql].reverse()) { + await tx.execute(sql.raw(stmt)); + } + await tx.execute( + sql`delete from ${sql.identifier(migrationsSchema)}.${ + sql.identifier(migrationsTable) + } where hash = ${dbMigration.hash}`, + ); + } + }); + } + escapeName(name: string): string { return `"${name}"`; } diff --git a/drizzle-orm/src/pg-proxy/migrator.ts b/drizzle-orm/src/pg-proxy/migrator.ts index 68214ca58e..a5377ec9e4 100644 --- a/drizzle-orm/src/pg-proxy/migrator.ts +++ b/drizzle-orm/src/pg-proxy/migrator.ts @@ -1,3 +1,4 @@ +import { DrizzleError } from '~/errors.ts'; import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import { sql } from '~/sql/sql.ts'; @@ -49,3 +50,47 @@ export async function migrate>( await callback(queriesToRun); } + +export async function rollback>( + db: PgRemoteDatabase, + callback: ProxyMigrator, + config: MigrationConfig, + steps: number = 1, +) { + const migrations = readMigrationFiles(config); + + const dbMigrations = await db.execute<{ + id: number; + hash: string; + created_at: string; + }>( + sql`SELECT id, hash, created_at FROM "drizzle"."__drizzle_migrations" ORDER BY created_at DESC LIMIT ${ + sql.raw(String(steps)) + }`, + ); + + if (dbMigrations.length === 0) { + return; + } + + const queriesToRun: string[] = []; + for (const dbMigration of dbMigrations) { + const meta = migrations.find((m) => m.hash === dbMigration.hash); + if (!meta) { + throw new DrizzleError({ + message: `Cannot rollback migration with hash ${dbMigration.hash}: migration file not found`, + }); + } + if (!meta.downSql || meta.downSql.length === 0) { + throw new DrizzleError({ + message: `Cannot rollback migration ${dbMigration.hash}: no down SQL available. Add a .down.sql file alongside the migration.`, + }); + } + queriesToRun.push( + ...[...meta.downSql].reverse(), + `DELETE FROM "drizzle"."__drizzle_migrations" WHERE hash = '${dbMigration.hash}'`, + ); + } + + await callback(queriesToRun); +} diff --git a/drizzle-orm/src/pglite/migrator.ts b/drizzle-orm/src/pglite/migrator.ts index f43d9f6184..624a06a82e 100644 --- a/drizzle-orm/src/pglite/migrator.ts +++ b/drizzle-orm/src/pglite/migrator.ts @@ -9,3 +9,12 @@ export async function migrate>( const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } + +export async function rollback>( + db: PgliteDatabase, + config: MigrationConfig, + steps?: number, +) { + const migrations = readMigrationFiles(config); + await db.dialect.rollback(migrations, db.session, config, steps); +} diff --git a/drizzle-orm/src/planetscale-serverless/migrator.ts b/drizzle-orm/src/planetscale-serverless/migrator.ts index c6b964051f..e66c5d145c 100644 --- a/drizzle-orm/src/planetscale-serverless/migrator.ts +++ b/drizzle-orm/src/planetscale-serverless/migrator.ts @@ -10,3 +10,12 @@ export async function migrate>( await db.dialect.migrate(migrations, db.session, config); } + +export async function rollback>( + db: PlanetScaleDatabase, + config: MigrationConfig, + steps?: number, +) { + const migrations = readMigrationFiles(config); + await db.dialect.rollback(migrations, db.session, config, steps); +} diff --git a/drizzle-orm/src/postgres-js/migrator.ts b/drizzle-orm/src/postgres-js/migrator.ts index 7930129f40..cb9e2bc3fc 100644 --- a/drizzle-orm/src/postgres-js/migrator.ts +++ b/drizzle-orm/src/postgres-js/migrator.ts @@ -9,3 +9,12 @@ export async function migrate>( const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } + +export async function rollback>( + db: PostgresJsDatabase, + config: MigrationConfig, + steps?: number, +) { + const migrations = readMigrationFiles(config); + await db.dialect.rollback(migrations, db.session, config, steps); +} diff --git a/drizzle-orm/src/singlestore-core/dialect.ts b/drizzle-orm/src/singlestore-core/dialect.ts index b0791c35f3..24f5a246f1 100644 --- a/drizzle-orm/src/singlestore-core/dialect.ts +++ b/drizzle-orm/src/singlestore-core/dialect.ts @@ -90,6 +90,47 @@ export class SingleStoreDialect { }); } + async rollback( + migrations: MigrationMeta[], + session: SingleStoreSession, + config: Omit, + steps: number = 1, + ): Promise { + const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; + + const dbMigrations = await session.all<{ id: number; hash: string; created_at: string }>( + sql`select id, hash, created_at from ${sql.identifier(migrationsTable)} order by created_at desc limit ${ + sql.raw(String(steps)) + }`, + ); + + if (dbMigrations.length === 0) { + return; + } + + await session.transaction(async (tx) => { + for (const dbMigration of dbMigrations) { + const meta = migrations.find((m) => m.hash === dbMigration.hash); + if (!meta) { + throw new DrizzleError({ + message: `Cannot rollback migration with hash ${dbMigration.hash}: migration file not found`, + }); + } + if (!meta.downSql || meta.downSql.length === 0) { + throw new DrizzleError({ + message: `Cannot rollback migration ${dbMigration.hash}: no down SQL available. Add a .down.sql file alongside the migration.`, + }); + } + for (const stmt of [...meta.downSql].reverse()) { + await tx.execute(sql.raw(stmt)); + } + await tx.execute( + sql`delete from ${sql.identifier(migrationsTable)} where \`hash\` = ${dbMigration.hash}`, + ); + } + }); + } + escapeName(name: string): string { return `\`${name}\``; } diff --git a/drizzle-orm/src/singlestore-proxy/migrator.ts b/drizzle-orm/src/singlestore-proxy/migrator.ts index 2ed0172fb3..bff5e34f7d 100644 --- a/drizzle-orm/src/singlestore-proxy/migrator.ts +++ b/drizzle-orm/src/singlestore-proxy/migrator.ts @@ -1,3 +1,4 @@ +import { DrizzleError } from '~/errors.ts'; import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import { sql } from '~/sql/sql.ts'; @@ -50,3 +51,46 @@ export async function migrate>( await callback(queriesToRun); } + +export async function rollback>( + db: SingleStoreRemoteDatabase, + callback: ProxyMigrator, + config: MigrationConfig, + steps: number = 1, +) { + const migrations = readMigrationFiles(config); + const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; + + const dbMigrations = await db.select({ + id: sql.raw('id'), + hash: sql.raw('hash'), + created_at: sql.raw('created_at'), + }).from(sql.identifier(migrationsTable).getSQL()).orderBy( + sql.raw('created_at desc'), + ).limit(steps); + + if (dbMigrations.length === 0) { + return; + } + + const queriesToRun: string[] = []; + for (const dbMigration of dbMigrations) { + const meta = migrations.find((m) => m.hash === String(dbMigration.hash)); + if (!meta) { + throw new DrizzleError({ + message: `Cannot rollback migration with hash ${dbMigration.hash}: migration file not found`, + }); + } + if (!meta.downSql || meta.downSql.length === 0) { + throw new DrizzleError({ + message: `Cannot rollback migration ${dbMigration.hash}: no down SQL available. Add a .down.sql file alongside the migration.`, + }); + } + queriesToRun.push( + ...[...meta.downSql].reverse(), + `delete from ${sql.identifier(migrationsTable).value} where \`hash\` = '${dbMigration.hash}'`, + ); + } + + await callback(queriesToRun); +} diff --git a/drizzle-orm/src/singlestore/migrator.ts b/drizzle-orm/src/singlestore/migrator.ts index 6f342c0c58..9466ca01e5 100644 --- a/drizzle-orm/src/singlestore/migrator.ts +++ b/drizzle-orm/src/singlestore/migrator.ts @@ -9,3 +9,12 @@ export async function migrate>( const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } + +export async function rollback>( + db: SingleStoreDriverDatabase, + config: MigrationConfig, + steps?: number, +) { + const migrations = readMigrationFiles(config); + await db.dialect.rollback(migrations, db.session, config, steps); +} diff --git a/drizzle-orm/src/sql-js/migrator.ts b/drizzle-orm/src/sql-js/migrator.ts index 1b3d5faa39..f674ffd67d 100644 --- a/drizzle-orm/src/sql-js/migrator.ts +++ b/drizzle-orm/src/sql-js/migrator.ts @@ -9,3 +9,12 @@ export function migrate>( const migrations = readMigrationFiles(config); db.dialect.migrate(migrations, db.session, config); } + +export function rollback>( + db: SQLJsDatabase, + config: MigrationConfig, + steps?: number, +) { + const migrations = readMigrationFiles(config); + db.dialect.rollback(migrations, db.session, config, steps); +} diff --git a/drizzle-orm/src/sqlite-core/dialect.ts b/drizzle-orm/src/sqlite-core/dialect.ts index 317c8df12c..412a89c76c 100644 --- a/drizzle-orm/src/sqlite-core/dialect.ts +++ b/drizzle-orm/src/sqlite-core/dialect.ts @@ -877,6 +877,56 @@ export class SQLiteSyncDialect extends SQLiteDialect { throw e; } } + + rollback( + migrations: MigrationMeta[], + session: SQLiteSession<'sync', unknown, Record, TablesRelationalConfig>, + config?: string | MigrationConfig, + steps: number = 1, + ): void { + const migrationsTable = config === undefined + ? '__drizzle_migrations' + : typeof config === 'string' + ? '__drizzle_migrations' + : config.migrationsTable ?? '__drizzle_migrations'; + + const dbMigrations = session.values<[number, string, string]>( + sql`SELECT id, hash, created_at FROM ${ + sql.identifier(migrationsTable) + } ORDER BY created_at DESC LIMIT ${sql.raw(String(steps))}`, + ); + + if (dbMigrations.length === 0) { + return; + } + + session.run(sql`BEGIN`); + try { + for (const dbMigration of dbMigrations) { + const meta = migrations.find((m) => m.hash === dbMigration[1]); + if (!meta) { + throw new DrizzleError({ + message: `Cannot rollback migration with hash ${dbMigration[1]}: migration file not found`, + }); + } + if (!meta.downSql || meta.downSql.length === 0) { + throw new DrizzleError({ + message: `Cannot rollback migration ${dbMigration[1]}: no down SQL available. Add a .down.sql file alongside the migration.`, + }); + } + for (const stmt of [...meta.downSql].reverse()) { + session.run(sql.raw(stmt)); + } + session.run( + sql`DELETE FROM ${sql.identifier(migrationsTable)} WHERE hash = ${dbMigration[1]}`, + ); + } + session.run(sql`COMMIT`); + } catch (e) { + session.run(sql`ROLLBACK`); + throw e; + } + } } export class SQLiteAsyncDialect extends SQLiteDialect { @@ -923,4 +973,49 @@ export class SQLiteAsyncDialect extends SQLiteDialect { } }); } + + async rollback( + migrations: MigrationMeta[], + session: SQLiteSession<'async', any, any, any>, + config?: string | MigrationConfig, + steps: number = 1, + ): Promise { + const migrationsTable = config === undefined + ? '__drizzle_migrations' + : typeof config === 'string' + ? '__drizzle_migrations' + : config.migrationsTable ?? '__drizzle_migrations'; + + const dbMigrations = await session.values<[number, string, string]>( + sql`SELECT id, hash, created_at FROM ${ + sql.identifier(migrationsTable) + } ORDER BY created_at DESC LIMIT ${sql.raw(String(steps))}`, + ); + + if (dbMigrations.length === 0) { + return; + } + + await session.transaction(async (tx) => { + for (const dbMigration of dbMigrations) { + const meta = migrations.find((m) => m.hash === dbMigration[1]); + if (!meta) { + throw new DrizzleError({ + message: `Cannot rollback migration with hash ${dbMigration[1]}: migration file not found`, + }); + } + if (!meta.downSql || meta.downSql.length === 0) { + throw new DrizzleError({ + message: `Cannot rollback migration ${dbMigration[1]}: no down SQL available. Add a .down.sql file alongside the migration.`, + }); + } + for (const stmt of [...meta.downSql].reverse()) { + await tx.run(sql.raw(stmt)); + } + await tx.run( + sql`DELETE FROM ${sql.identifier(migrationsTable)} WHERE hash = ${dbMigration[1]}`, + ); + } + }); + } } diff --git a/drizzle-orm/src/sqlite-proxy/migrator.ts b/drizzle-orm/src/sqlite-proxy/migrator.ts index cc4a7f71c3..6010a3b0d5 100644 --- a/drizzle-orm/src/sqlite-proxy/migrator.ts +++ b/drizzle-orm/src/sqlite-proxy/migrator.ts @@ -1,3 +1,4 @@ +import { DrizzleError } from '~/errors.ts'; import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import { sql } from '~/sql/sql.ts'; @@ -47,3 +48,47 @@ export async function migrate>( await callback(queriesToRun); } + +export async function rollback>( + db: SqliteRemoteDatabase, + callback: ProxyMigrator, + config: MigrationConfig, + steps: number = 1, +) { + const migrations = readMigrationFiles(config); + + const migrationsTable = typeof config === 'string' + ? '__drizzle_migrations' + : config.migrationsTable ?? '__drizzle_migrations'; + + const dbMigrations = await db.values<[number, string, string]>( + sql`SELECT id, hash, created_at FROM ${ + sql.identifier(migrationsTable) + } ORDER BY created_at DESC LIMIT ${sql.raw(String(steps))}`, + ); + + if (dbMigrations.length === 0) { + return; + } + + const queriesToRun: string[] = []; + for (const dbMigration of dbMigrations) { + const meta = migrations.find((m) => m.hash === dbMigration[1]); + if (!meta) { + throw new DrizzleError({ + message: `Cannot rollback migration with hash ${dbMigration[1]}: migration file not found`, + }); + } + if (!meta.downSql || meta.downSql.length === 0) { + throw new DrizzleError({ + message: `Cannot rollback migration ${dbMigration[1]}: no down SQL available. Add a .down.sql file alongside the migration.`, + }); + } + queriesToRun.push( + ...[...meta.downSql].reverse(), + `DELETE FROM \`${migrationsTable}\` WHERE hash = '${dbMigration[1]}'`, + ); + } + + await callback(queriesToRun); +} diff --git a/drizzle-orm/src/tidb-serverless/migrator.ts b/drizzle-orm/src/tidb-serverless/migrator.ts index 7fa0729451..d998fe3c0b 100644 --- a/drizzle-orm/src/tidb-serverless/migrator.ts +++ b/drizzle-orm/src/tidb-serverless/migrator.ts @@ -9,3 +9,12 @@ export async function migrate>( const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } + +export async function rollback>( + db: TiDBServerlessDatabase, + config: MigrationConfig, + steps?: number, +) { + const migrations = readMigrationFiles(config); + await db.dialect.rollback(migrations, db.session, config, steps); +} diff --git a/drizzle-orm/src/vercel-postgres/migrator.ts b/drizzle-orm/src/vercel-postgres/migrator.ts index 88c922271c..a1f42cebbd 100644 --- a/drizzle-orm/src/vercel-postgres/migrator.ts +++ b/drizzle-orm/src/vercel-postgres/migrator.ts @@ -9,3 +9,12 @@ export async function migrate>( const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } + +export async function rollback>( + db: VercelPgDatabase, + config: MigrationConfig, + steps?: number, +) { + const migrations = readMigrationFiles(config); + await db.dialect.rollback(migrations, db.session, config, steps); +} diff --git a/drizzle-orm/src/xata-http/migrator.ts b/drizzle-orm/src/xata-http/migrator.ts index 0eb261d88c..feba097194 100644 --- a/drizzle-orm/src/xata-http/migrator.ts +++ b/drizzle-orm/src/xata-http/migrator.ts @@ -1,3 +1,4 @@ +import { DrizzleError } from '~/errors.ts'; import { readMigrationFiles } from '~/migrator.ts'; import { sql } from '~/sql/sql.ts'; import type { XataHttpDatabase } from './driver.ts'; @@ -53,3 +54,50 @@ export interface MigrationConfig { } } } + +/** + * NOTE: The Xata HTTP driver does not support transactions. This means that if any part of a rollback fails, + * no automatic rollback of the rollback will be executed. + */ +export async function rollback>( + db: XataHttpDatabase, + config: MigrationConfig, + steps: number = 1, +) { + const migrations = readMigrationFiles(config); + const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; + + const dbMigrations = await db.session.all<{ + id: number; + hash: string; + created_at: string; + }>( + sql`select id, hash, created_at from ${ + sql.identifier(migrationsTable) + } order by created_at desc limit ${sql.raw(String(steps))}`, + ); + + if (dbMigrations.length === 0) { + return; + } + + for (const dbMigration of dbMigrations) { + const meta = migrations.find((m) => m.hash === dbMigration.hash); + if (!meta) { + throw new DrizzleError({ + message: `Cannot rollback migration with hash ${dbMigration.hash}: migration file not found`, + }); + } + if (!meta.downSql || meta.downSql.length === 0) { + throw new DrizzleError({ + message: `Cannot rollback migration ${dbMigration.hash}: no down SQL available. Add a .down.sql file alongside the migration.`, + }); + } + for (const stmt of [...meta.downSql].reverse()) { + await db.session.execute(sql.raw(stmt)); + } + await db.session.execute( + sql`delete from ${sql.identifier(migrationsTable)} where hash = ${dbMigration.hash}`, + ); + } +} From f893e4704cae5fba1587823fab2f86f12f1058a0 Mon Sep 17 00:00:00 2001 From: Rob Orton Date: Fri, 6 Mar 2026 18:52:40 -0700 Subject: [PATCH 2/4] test: add tests for down migrations / rollback drizzle-kit/tests/migrate/down-sql.test.ts (10 tests): - writeResult() writes .down.sql when downSqlStatements are provided - writeResult() does not write .down.sql when downSqlStatements is undefined - writeResult() sets hasDown: true in journal entry when down SQL is provided - writeResult() does not set hasDown when downSqlStatements is undefined - writeResult() does not set hasDown when downSqlStatements is empty array - writeResult() respects breakpoints delimiter in .down.sql - embeddedMigrations() includes downMigrations block when entries have hasDown - embeddedMigrations() omits downMigrations block when no entries have hasDown - embeddedMigrations() only imports .down.sql for entries that have hasDown - embeddedMigrations() adds expo header for expo driver integration-tests/tests/sqlite/rollback.test.ts (13 tests): - readMigrationFiles() populates downSql when .down.sql exists - readMigrationFiles() leaves downSql undefined when .down.sql is absent - readMigrationFiles() leaves downSql undefined when .down.sql is empty - readMigrationFiles() leaves downSql undefined when .down.sql is whitespace only - readMigrationFiles() splits downSql on statement-breakpoint delimiter - readMigrationFiles() reads downSql independently per migration - rollback(1) removes last migration table and tracking row - rollback(2) undoes both migrations - rollback with default steps=1 rolls back one migration - rollback when no migrations applied is a no-op - rollback then migrate re-applies the migration - rollback throws when migration file not found by hash - rollback throws when migration has no down SQL Adds migration fixtures in integration-tests/drizzle2/sqlite-rollback/ with two migrations each having .down.sql files. --- drizzle-kit/tests/migrate/down-sql.test.ts | 216 ++++++++++++++++ .../0000_create_rollback_users.down.sql | 1 + .../0000_create_rollback_users.sql | 4 + .../0001_create_rollback_posts.down.sql | 1 + .../0001_create_rollback_posts.sql | 4 + .../sqlite-rollback/meta/_journal.json | 22 ++ .../tests/sqlite/rollback.test.ts | 232 ++++++++++++++++++ 7 files changed, 480 insertions(+) create mode 100644 drizzle-kit/tests/migrate/down-sql.test.ts create mode 100644 integration-tests/drizzle2/sqlite-rollback/0000_create_rollback_users.down.sql create mode 100644 integration-tests/drizzle2/sqlite-rollback/0000_create_rollback_users.sql create mode 100644 integration-tests/drizzle2/sqlite-rollback/0001_create_rollback_posts.down.sql create mode 100644 integration-tests/drizzle2/sqlite-rollback/0001_create_rollback_posts.sql create mode 100644 integration-tests/drizzle2/sqlite-rollback/meta/_journal.json create mode 100644 integration-tests/tests/sqlite/rollback.test.ts diff --git a/drizzle-kit/tests/migrate/down-sql.test.ts b/drizzle-kit/tests/migrate/down-sql.test.ts new file mode 100644 index 0000000000..e8228dec26 --- /dev/null +++ b/drizzle-kit/tests/migrate/down-sql.test.ts @@ -0,0 +1,216 @@ +import fs from 'fs'; +import os from 'os'; +import path from 'path'; +import { afterEach, beforeEach, describe, expect, test } from 'vitest'; +import { embeddedMigrations, writeResult } from 'src/cli/commands/migrate'; +import type { Journal } from 'src/utils'; + +// Minimal CommonSchema stub accepted by writeResult +const minimalSchema: any = { + version: '7', + dialect: 'sqlite', + tables: {}, + views: {}, + enums: {}, + _meta: { columns: {}, schemas: {}, tables: {} }, +}; + +let tmpDir: string; + +beforeEach(() => { + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'drizzle-down-sql-test-')); + fs.mkdirSync(path.join(tmpDir, 'meta')); +}); + +afterEach(() => { + fs.rmSync(tmpDir, { recursive: true, force: true }); +}); + +function makeJournal(dialect: string = 'sqlite'): Journal { + return { + version: '7', + dialect: dialect as any, + entries: [], + }; +} + +describe('writeResult — down SQL file generation', () => { + test('writes .down.sql file when downSqlStatements are provided', () => { + const journal = makeJournal(); + writeResult({ + cur: minimalSchema, + sqlStatements: ['CREATE TABLE users (id INTEGER PRIMARY KEY)'], + downSqlStatements: ['DROP TABLE users'], + journal, + outFolder: tmpDir, + breakpoints: true, + prefixMode: 'index', + name: 'create_users', + }); + + const entries = journal.entries; + expect(entries).toHaveLength(1); + const tag = entries[0]!.tag; + + expect(fs.existsSync(path.join(tmpDir, `${tag}.sql`))).toBe(true); + expect(fs.existsSync(path.join(tmpDir, `${tag}.down.sql`))).toBe(true); + + const downContent = fs.readFileSync(path.join(tmpDir, `${tag}.down.sql`), 'utf8'); + expect(downContent).toContain('DROP TABLE users'); + }); + + test('does NOT write .down.sql file when downSqlStatements is undefined', () => { + const journal = makeJournal(); + writeResult({ + cur: minimalSchema, + sqlStatements: ['CREATE TABLE users (id INTEGER PRIMARY KEY)'], + journal, + outFolder: tmpDir, + breakpoints: true, + prefixMode: 'index', + name: 'test_migration', + }); + + const tag = journal.entries[0]!.tag; + expect(fs.existsSync(path.join(tmpDir, `${tag}.sql`))).toBe(true); + expect(fs.existsSync(path.join(tmpDir, `${tag}.down.sql`))).toBe(false); + }); + + test('sets hasDown: true in journal entry when downSqlStatements are provided', () => { + const journal = makeJournal(); + writeResult({ + cur: minimalSchema, + sqlStatements: ['CREATE TABLE t (id INTEGER)'], + downSqlStatements: ['DROP TABLE t'], + journal, + outFolder: tmpDir, + breakpoints: true, + prefixMode: 'index', + name: 'test_migration', + }); + + const entry = journal.entries[0]!; + expect(entry.hasDown).toBe(true); + }); + + test('does NOT set hasDown when downSqlStatements is undefined', () => { + const journal = makeJournal(); + writeResult({ + cur: minimalSchema, + sqlStatements: ['CREATE TABLE t (id INTEGER)'], + journal, + outFolder: tmpDir, + breakpoints: true, + prefixMode: 'index', + name: 'test_migration', + }); + + const entry = journal.entries[0]!; + expect(entry.hasDown).toBeUndefined(); + }); + + test('does NOT set hasDown when downSqlStatements is empty array', () => { + const journal = makeJournal(); + writeResult({ + cur: minimalSchema, + sqlStatements: ['CREATE TABLE t (id INTEGER)'], + downSqlStatements: [], + journal, + outFolder: tmpDir, + breakpoints: true, + prefixMode: 'index', + name: 'test_migration', + }); + + const entry = journal.entries[0]!; + expect(entry.hasDown).toBeUndefined(); + + const tag = entry.tag; + // Empty downSqlStatements: file is written (empty content is valid) + const downPath = path.join(tmpDir, `${tag}.down.sql`); + if (fs.existsSync(downPath)) { + expect(fs.readFileSync(downPath, 'utf8')).toBe(''); + } + }); + + test('respects breakpoints delimiter in .down.sql', () => { + const journal = makeJournal(); + writeResult({ + cur: minimalSchema, + sqlStatements: ['CREATE TABLE a (id INTEGER)', 'CREATE TABLE b (id INTEGER)'], + downSqlStatements: ['DROP TABLE b', 'DROP TABLE a'], + journal, + outFolder: tmpDir, + breakpoints: true, + prefixMode: 'index', + name: 'test_migration', + }); + + const tag = journal.entries[0]!.tag; + const downContent = fs.readFileSync(path.join(tmpDir, `${tag}.down.sql`), 'utf8'); + expect(downContent).toContain('--> statement-breakpoint'); + }); +}); + +describe('embeddedMigrations — down SQL bundling', () => { + test('includes downMigrations block when entries have hasDown', () => { + const journal: Journal = { + version: '7', + dialect: 'sqlite', + entries: [ + { idx: 0, version: '7', when: 1000, tag: '0000_test', breakpoints: true, hasDown: true }, + ], + }; + + const output = embeddedMigrations(journal); + + expect(output).toContain("import d0000 from './0000_test.down.sql'"); + expect(output).toContain('downMigrations'); + expect(output).toContain('d0000'); + }); + + test('omits downMigrations block when no entries have hasDown', () => { + const journal: Journal = { + version: '7', + dialect: 'sqlite', + entries: [ + { idx: 0, version: '7', when: 1000, tag: '0000_test', breakpoints: true }, + ], + }; + + const output = embeddedMigrations(journal); + + expect(output).not.toContain('downMigrations'); + expect(output).not.toContain('.down.sql'); + }); + + test('only imports down SQL for entries that have hasDown', () => { + const journal: Journal = { + version: '7', + dialect: 'sqlite', + entries: [ + { idx: 0, version: '7', when: 1000, tag: '0000_no_down', breakpoints: true }, + { idx: 1, version: '7', when: 2000, tag: '0001_has_down', breakpoints: true, hasDown: true }, + ], + }; + + const output = embeddedMigrations(journal); + + expect(output).toContain("import d0001 from './0001_has_down.down.sql'"); + expect(output).not.toContain("import d0000 from './0000_no_down.down.sql'"); + expect(output).toContain('downMigrations'); + expect(output).toContain('d0001'); + expect(output).not.toContain('d0000'); + }); + + test('adds expo header for expo driver', () => { + const journal: Journal = { + version: '7', + dialect: 'sqlite', + entries: [], + }; + + const output = embeddedMigrations(journal, 'expo'); + expect(output).toContain('Expo/React Native'); + }); +}); diff --git a/integration-tests/drizzle2/sqlite-rollback/0000_create_rollback_users.down.sql b/integration-tests/drizzle2/sqlite-rollback/0000_create_rollback_users.down.sql new file mode 100644 index 0000000000..e794968282 --- /dev/null +++ b/integration-tests/drizzle2/sqlite-rollback/0000_create_rollback_users.down.sql @@ -0,0 +1 @@ +DROP TABLE `rollback_users`; diff --git a/integration-tests/drizzle2/sqlite-rollback/0000_create_rollback_users.sql b/integration-tests/drizzle2/sqlite-rollback/0000_create_rollback_users.sql new file mode 100644 index 0000000000..4870baa764 --- /dev/null +++ b/integration-tests/drizzle2/sqlite-rollback/0000_create_rollback_users.sql @@ -0,0 +1,4 @@ +CREATE TABLE `rollback_users` ( + `id` integer PRIMARY KEY NOT NULL, + `name` text NOT NULL +); diff --git a/integration-tests/drizzle2/sqlite-rollback/0001_create_rollback_posts.down.sql b/integration-tests/drizzle2/sqlite-rollback/0001_create_rollback_posts.down.sql new file mode 100644 index 0000000000..88a3f39b46 --- /dev/null +++ b/integration-tests/drizzle2/sqlite-rollback/0001_create_rollback_posts.down.sql @@ -0,0 +1 @@ +DROP TABLE `rollback_posts`; diff --git a/integration-tests/drizzle2/sqlite-rollback/0001_create_rollback_posts.sql b/integration-tests/drizzle2/sqlite-rollback/0001_create_rollback_posts.sql new file mode 100644 index 0000000000..569c778bcf --- /dev/null +++ b/integration-tests/drizzle2/sqlite-rollback/0001_create_rollback_posts.sql @@ -0,0 +1,4 @@ +CREATE TABLE `rollback_posts` ( + `id` integer PRIMARY KEY NOT NULL, + `title` text NOT NULL +); diff --git a/integration-tests/drizzle2/sqlite-rollback/meta/_journal.json b/integration-tests/drizzle2/sqlite-rollback/meta/_journal.json new file mode 100644 index 0000000000..2a2b4b3a71 --- /dev/null +++ b/integration-tests/drizzle2/sqlite-rollback/meta/_journal.json @@ -0,0 +1,22 @@ +{ + "version": "5", + "dialect": "sqlite", + "entries": [ + { + "idx": 0, + "version": "5", + "when": 1700000000000, + "tag": "0000_create_rollback_users", + "breakpoints": true, + "hasDown": true + }, + { + "idx": 1, + "version": "5", + "when": 1700000001000, + "tag": "0001_create_rollback_posts", + "breakpoints": true, + "hasDown": true + } + ] +} diff --git a/integration-tests/tests/sqlite/rollback.test.ts b/integration-tests/tests/sqlite/rollback.test.ts new file mode 100644 index 0000000000..21fc8bfbff --- /dev/null +++ b/integration-tests/tests/sqlite/rollback.test.ts @@ -0,0 +1,232 @@ +import crypto from 'node:crypto'; +import fs from 'node:fs'; +import os from 'node:os'; +import path from 'node:path'; +import Database from 'better-sqlite3'; +import { drizzle } from 'drizzle-orm/better-sqlite3'; +import { migrate, rollback } from 'drizzle-orm/better-sqlite3/migrator'; +import { readMigrationFiles } from 'drizzle-orm/migrator'; +import { afterEach, beforeEach, describe, expect, test } from 'vitest'; + +// ─── helpers ───────────────────────────────────────────────────────────────── + +function tableExists(client: Database.Database, name: string): boolean { + const row = client + .prepare(`SELECT name FROM sqlite_master WHERE type='table' AND name=?`) + .get(name) as { name: string } | undefined; + return row !== undefined; +} + +function appliedMigrations(client: Database.Database): string[] { + try { + return ( + client + .prepare(`SELECT hash FROM __drizzle_migrations ORDER BY created_at ASC`) + .all() as { hash: string }[] + ).map((r) => r.hash); + } catch { + return []; + } +} + +// ─── readMigrationFiles — unit tests ───────────────────────────────────────── + +describe('readMigrationFiles', () => { + let tmpDir: string; + + beforeEach(() => { + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'drizzle-read-test-')); + fs.mkdirSync(path.join(tmpDir, 'meta')); + }); + + afterEach(() => { + fs.rmSync(tmpDir, { recursive: true, force: true }); + }); + + function writeFixture(tag: string, sql: string, downSql?: string) { + fs.writeFileSync(path.join(tmpDir, `${tag}.sql`), sql); + if (downSql !== undefined) { + fs.writeFileSync(path.join(tmpDir, `${tag}.down.sql`), downSql); + } + } + + function writeJournal(tags: string[]) { + const entries = tags.map((tag, idx) => ({ + idx, + version: '5', + when: 1700000000000 + idx * 1000, + tag, + breakpoints: true, + })); + fs.writeFileSync( + path.join(tmpDir, 'meta', '_journal.json'), + JSON.stringify({ version: '5', dialect: 'sqlite', entries }), + ); + } + + test('populates downSql when .down.sql file exists', () => { + writeJournal(['0000_test']); + writeFixture('0000_test', 'CREATE TABLE t (id INTEGER)', 'DROP TABLE t'); + + const metas = readMigrationFiles({ migrationsFolder: tmpDir }); + expect(metas).toHaveLength(1); + expect(metas[0]!.downSql).toEqual(['DROP TABLE t']); + }); + + test('leaves downSql undefined when .down.sql file is absent', () => { + writeJournal(['0000_test']); + writeFixture('0000_test', 'CREATE TABLE t (id INTEGER)'); + + const metas = readMigrationFiles({ migrationsFolder: tmpDir }); + expect(metas[0]!.downSql).toBeUndefined(); + }); + + test('leaves downSql undefined when .down.sql file is empty', () => { + writeJournal(['0000_test']); + writeFixture('0000_test', 'CREATE TABLE t (id INTEGER)', ''); + + const metas = readMigrationFiles({ migrationsFolder: tmpDir }); + expect(metas[0]!.downSql).toBeUndefined(); + }); + + test('leaves downSql undefined when .down.sql file is whitespace only', () => { + writeJournal(['0000_test']); + writeFixture('0000_test', 'CREATE TABLE t (id INTEGER)', ' \n '); + + const metas = readMigrationFiles({ migrationsFolder: tmpDir }); + expect(metas[0]!.downSql).toBeUndefined(); + }); + + test('splits downSql on statement-breakpoint delimiter', () => { + writeJournal(['0000_test']); + writeFixture( + '0000_test', + 'CREATE TABLE a (id INTEGER)', + 'DROP TABLE b\n--> statement-breakpoint\nDROP TABLE a', + ); + + const metas = readMigrationFiles({ migrationsFolder: tmpDir }); + expect(metas[0]!.downSql).toHaveLength(2); + expect(metas[0]!.downSql![0]).toContain('DROP TABLE b'); + expect(metas[0]!.downSql![1]).toContain('DROP TABLE a'); + }); + + test('reads downSql for multiple migrations independently', () => { + writeJournal(['0000_first', '0001_second']); + writeFixture('0000_first', 'CREATE TABLE a (id INTEGER)', 'DROP TABLE a'); + writeFixture('0001_second', 'CREATE TABLE b (id INTEGER)'); // no down SQL + + const metas = readMigrationFiles({ migrationsFolder: tmpDir }); + expect(metas[0]!.downSql).toEqual(['DROP TABLE a']); + expect(metas[1]!.downSql).toBeUndefined(); + }); +}); + +// ─── rollback integration tests ────────────────────────────────────────────── + +describe('rollback — better-sqlite3', () => { + const MIGRATIONS_FOLDER = './drizzle2/sqlite-rollback'; + let client: Database.Database; + let db: ReturnType; + + beforeEach(() => { + client = new Database(':memory:'); + db = drizzle(client); + client.exec(`DROP TABLE IF EXISTS rollback_users`); + client.exec(`DROP TABLE IF EXISTS rollback_posts`); + client.exec(`DROP TABLE IF EXISTS __drizzle_migrations`); + }); + + afterEach(() => { + client.close(); + }); + + test('migrate then rollback(1) removes the last migration table', () => { + migrate(db, { migrationsFolder: MIGRATIONS_FOLDER }); + + expect(tableExists(client, 'rollback_users')).toBe(true); + expect(tableExists(client, 'rollback_posts')).toBe(true); + expect(appliedMigrations(client)).toHaveLength(2); + + rollback(db, { migrationsFolder: MIGRATIONS_FOLDER }, 1); + + expect(tableExists(client, 'rollback_posts')).toBe(false); + expect(tableExists(client, 'rollback_users')).toBe(true); + expect(appliedMigrations(client)).toHaveLength(1); + }); + + test('rollback(2) undoes both migrations', () => { + migrate(db, { migrationsFolder: MIGRATIONS_FOLDER }); + rollback(db, { migrationsFolder: MIGRATIONS_FOLDER }, 2); + + expect(tableExists(client, 'rollback_users')).toBe(false); + expect(tableExists(client, 'rollback_posts')).toBe(false); + expect(appliedMigrations(client)).toHaveLength(0); + }); + + test('rollback with default steps=1 rolls back one migration', () => { + migrate(db, { migrationsFolder: MIGRATIONS_FOLDER }); + rollback(db, { migrationsFolder: MIGRATIONS_FOLDER }); + + expect(appliedMigrations(client)).toHaveLength(1); + }); + + test('rollback when no migrations applied is a no-op', () => { + // create the tracking table but no migrations applied + client.exec(`CREATE TABLE __drizzle_migrations (id INTEGER PRIMARY KEY, hash TEXT, created_at NUMERIC)`); + + expect(() => rollback(db, { migrationsFolder: MIGRATIONS_FOLDER })).not.toThrow(); + expect(appliedMigrations(client)).toHaveLength(0); + }); + + test('rollback then migrate re-applies the migration', () => { + migrate(db, { migrationsFolder: MIGRATIONS_FOLDER }); + rollback(db, { migrationsFolder: MIGRATIONS_FOLDER }, 1); + + expect(tableExists(client, 'rollback_posts')).toBe(false); + + migrate(db, { migrationsFolder: MIGRATIONS_FOLDER }); + + expect(tableExists(client, 'rollback_posts')).toBe(true); + expect(appliedMigrations(client)).toHaveLength(2); + }); + + test('rollback throws when migration file not found by hash', () => { + migrate(db, { migrationsFolder: MIGRATIONS_FOLDER }); + + // Corrupt the hash in the tracking table so it won't match + client.exec(`UPDATE __drizzle_migrations SET hash='deadbeef' WHERE rowid=(SELECT MAX(rowid) FROM __drizzle_migrations)`); + + expect(() => rollback(db, { migrationsFolder: MIGRATIONS_FOLDER })).toThrowError( + /migration file not found/i, + ); + }); + + test('rollback throws when migration has no down SQL', () => { + // Set up a migration folder with no .down.sql files + const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'drizzle-nodown-')); + fs.mkdirSync(path.join(tmpDir, 'meta')); + + const upSql = `CREATE TABLE nodown_table (id INTEGER PRIMARY KEY)`; + fs.writeFileSync(path.join(tmpDir, '0000_nodown.sql'), upSql); + + const hash = crypto.createHash('sha256').update(upSql).digest('hex'); + const journal = { + version: '5', + dialect: 'sqlite', + entries: [{ idx: 0, version: '5', when: 1700000000000, tag: '0000_nodown', breakpoints: true }], + }; + fs.writeFileSync(path.join(tmpDir, 'meta', '_journal.json'), JSON.stringify(journal)); + + try { + migrate(db, { migrationsFolder: tmpDir }); + expect(tableExists(client, 'nodown_table')).toBe(true); + + expect(() => rollback(db, { migrationsFolder: tmpDir })).toThrowError( + /no down SQL available/i, + ); + } finally { + fs.rmSync(tmpDir, { recursive: true, force: true }); + } + }); +}); From e51d69e6db2afca67726688fc0a1ed468e0d9194 Mon Sep 17 00:00:00 2001 From: Rob Orton Date: Sat, 7 Mar 2026 14:24:38 -0700 Subject: [PATCH 3/4] feat: support renames in down SQL generation for SQLite and LibSQL Wrap the forward resolvers with capture helpers so rename decisions (table, column, view) made during `generate` are inverted for the down SQL diff, matching the rename-aware approach already applied to PostgreSQL, MySQL, and SingleStore. --- drizzle-kit/src/cli/commands/migrate.ts | 300 +++++++++++++++++------- 1 file changed, 217 insertions(+), 83 deletions(-) diff --git a/drizzle-kit/src/cli/commands/migrate.ts b/drizzle-kit/src/cli/commands/migrate.ts index faed90ad5b..e5e3f29116 100644 --- a/drizzle-kit/src/cli/commands/migrate.ts +++ b/drizzle-kit/src/cli/commands/migrate.ts @@ -65,47 +65,141 @@ export type NamedWithSchema = { schema: string; }; -// Non-interactive resolvers used for auto-generating down SQL (no rename detection) -const autoResolverNoRename = async ( - input: ResolverInput, -): Promise> => ({ created: input.created, deleted: input.deleted, renamed: [] }); - -const autoResolverWithMovedNoRename = async ( - input: ResolverInput, -): Promise> => ({ - created: input.created, - deleted: input.deleted, - moved: [], - renamed: [], -}); - -const autoColumnsResolverNoRename = async ( - input: ColumnsResolverInput, -): Promise> => ({ - tableName: input.tableName, - schema: input.schema, - created: input.created, - deleted: input.deleted, - renamed: [], -}); - -const autoPolicyResolverNoRename = async ( - input: PolicyResolverInput, -): Promise> => ({ created: input.created, deleted: input.deleted, renamed: [] }); - -const autoTablePolicyResolverNoRename = async ( - input: TablePolicyResolverInput, -): Promise> => ({ - tableName: input.tableName, - schema: input.schema, - created: input.created, - deleted: input.deleted, - renamed: [], -}); - -const autoRolesResolverNoRename = async ( - input: RolesResolverInput, -): Promise> => ({ created: input.created, deleted: input.deleted, renamed: [] }); +// ─── helpers for rename-aware down SQL generation ──────────────────────────── + +function invertRenames( + forwardRenames: { from: T; to: T }[], + inputCreated: T[], + inputDeleted: T[], +): { renamed: { from: T; to: T }[]; created: T[]; deleted: T[] } { + const created = [...inputCreated]; + const deleted = [...inputDeleted]; + const renamed: { from: T; to: T }[] = []; + for (const { from, to } of forwardRenames) { + // In the down diff, 'to' (new name) appears in deleted and 'from' (old name) in created. + const delIdx = deleted.findIndex((d) => d.name === to.name); + const creIdx = created.findIndex((c) => c.name === from.name); + if (delIdx !== -1 && creIdx !== -1) { + renamed.push({ from: deleted[delIdx]!, to: created[creIdx]! }); + deleted.splice(delIdx, 1); + created.splice(creIdx, 1); + } + } + return { renamed, created, deleted }; +} + +function withCapture< + T extends { name: string }, + TIn extends { created: T[]; deleted: T[] }, + TOut extends { renamed: { from: T; to: T }[]; created: T[]; deleted: T[] }, +>( + resolver: (input: TIn) => Promise, + store: { from: T; to: T }[], +): (input: TIn) => Promise { + return async (input) => { + const result = await resolver(input); + store.push(...result.renamed); + return result; + }; +} + +function withCaptureWithMoved< + T extends { name: string }, + TIn extends { created: T[]; deleted: T[] }, + TOut extends { + renamed: { from: T; to: T }[]; + created: T[]; + deleted: T[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + }, +>( + resolver: (input: TIn) => Promise, + renames: { from: T; to: T }[], + moved: { name: string; schemaFrom: string; schemaTo: string }[], +): (input: TIn) => Promise { + return async (input) => { + const result = await resolver(input); + renames.push(...result.renamed); + moved.push(...result.moved); + return result; + }; +} + +function withCaptureColumns( + resolver: (input: ColumnsResolverInput) => Promise>, + store: Map, +): (input: ColumnsResolverInput) => Promise> { + return async (input) => { + const result = await resolver(input); + if (result.renamed.length > 0) { + store.set(input.tableName, [...(store.get(input.tableName) ?? []), ...result.renamed]); + } + return result; + }; +} + +function withCaptureTablePolicy( + resolver: (input: TablePolicyResolverInput) => Promise>, + store: Map, +): (input: TablePolicyResolverInput) => Promise> { + return async (input) => { + const result = await resolver(input); + if (result.renamed.length > 0) { + store.set(input.tableName, [...(store.get(input.tableName) ?? []), ...result.renamed]); + } + return result; + }; +} + +function makeInverseResolver( + renames: { from: T; to: T }[], +): (input: ResolverInput) => Promise> { + return async (input) => { + const r = invertRenames(renames, input.created, input.deleted); + return { renamed: r.renamed, created: r.created, deleted: r.deleted }; + }; +} + +function makeInverseResolverWithMoved( + renames: { from: T; to: T }[], + moved: { name: string; schemaFrom: string; schemaTo: string }[], +): (input: ResolverInput) => Promise> { + return async (input) => { + const r = invertRenames(renames, input.created, input.deleted); + const inverseMoved = moved + .filter((m) => input.deleted.some((d) => d.name === m.name)) + .map((m) => ({ name: m.name, schemaFrom: m.schemaTo, schemaTo: m.schemaFrom })); + return { renamed: r.renamed, created: r.created, deleted: r.deleted, moved: inverseMoved }; + }; +} + +function makeInverseColumnsResolver( + store: Map, + tableRenames: { from: { name: string }; to: { name: string } }[], +): (input: ColumnsResolverInput) => Promise> { + return async (input) => { + // The down diff calls columnsResolver with the new table name. The forward resolver + // may have stored column renames under either the old or the new table name. + const oldName = + tableRenames.find((r) => r.to.name === input.tableName)?.from.name ?? input.tableName; + const renames = store.get(input.tableName) ?? store.get(oldName) ?? []; + const r = invertRenames(renames, input.created, input.deleted); + return { tableName: input.tableName, schema: input.schema, renamed: r.renamed, created: r.created, deleted: r.deleted }; + }; +} + +function makeInverseTablePolicyResolver( + store: Map, + tableRenames: { from: { name: string }; to: { name: string } }[], +): (input: TablePolicyResolverInput) => Promise> { + return async (input) => { + const oldName = + tableRenames.find((r) => r.to.name === input.tableName)?.from.name ?? input.tableName; + const renames = store.get(input.tableName) ?? store.get(oldName) ?? []; + const r = invertRenames(renames, input.created, input.deleted); + return { tableName: input.tableName, schema: input.schema, renamed: r.renamed, created: r.created, deleted: r.deleted }; + }; +} export const schemasResolver = async ( input: ResolverInput
, @@ -380,18 +474,32 @@ export const prepareAndMigratePg = async (config: GenerateConfig) => { const squashedPrev = squashPgScheme(validatedPrev); const squashedCur = squashPgScheme(validatedCur); + const pgSchemaRenames: { from: any; to: any }[] = []; + const pgEnumRenames: { from: any; to: any }[] = []; + const pgEnumMoved: { name: string; schemaFrom: string; schemaTo: string }[] = []; + const pgSeqRenames: { from: any; to: any }[] = []; + const pgSeqMoved: { name: string; schemaFrom: string; schemaTo: string }[] = []; + const pgTableRenames: { from: any; to: any }[] = []; + const pgTableMoved: { name: string; schemaFrom: string; schemaTo: string }[] = []; + const pgColRenames = new Map(); + const pgViewRenames: { from: any; to: any }[] = []; + const pgViewMoved: { name: string; schemaFrom: string; schemaTo: string }[] = []; + const pgRoleRenames: { from: any; to: any }[] = []; + const pgPolicyRenames: { from: any; to: any }[] = []; + const pgTblPolicyRenames = new Map(); + const { sqlStatements, _meta } = await applyPgSnapshotsDiff( squashedPrev, squashedCur, - schemasResolver, - enumsResolver, - sequencesResolver, - policyResolver, - indPolicyResolver, - roleResolver, - tablesResolver, - columnsResolver, - viewsResolver, + withCapture(schemasResolver, pgSchemaRenames), + withCaptureWithMoved(enumsResolver, pgEnumRenames, pgEnumMoved), + withCaptureWithMoved(sequencesResolver, pgSeqRenames, pgSeqMoved), + withCaptureTablePolicy(policyResolver, pgTblPolicyRenames), + withCapture(indPolicyResolver, pgPolicyRenames), + withCapture(roleResolver, pgRoleRenames), + withCaptureWithMoved(tablesResolver, pgTableRenames, pgTableMoved), + withCaptureColumns(columnsResolver, pgColRenames), + withCaptureWithMoved(viewsResolver, pgViewRenames, pgViewMoved), validatedPrev, validatedCur, ); @@ -399,15 +507,15 @@ export const prepareAndMigratePg = async (config: GenerateConfig) => { const { sqlStatements: downSqlStatements } = await applyPgSnapshotsDiff( squashedCur, squashedPrev, - autoResolverNoRename, - autoResolverWithMovedNoRename, - autoResolverWithMovedNoRename, - autoTablePolicyResolverNoRename, - autoPolicyResolverNoRename, - autoRolesResolverNoRename, - autoResolverWithMovedNoRename, - autoColumnsResolverNoRename, - autoResolverWithMovedNoRename, + makeInverseResolver(pgSchemaRenames), + makeInverseResolverWithMoved(pgEnumRenames, pgEnumMoved), + makeInverseResolverWithMoved(pgSeqRenames, pgSeqMoved), + makeInverseTablePolicyResolver(pgTblPolicyRenames, pgTableRenames), + makeInverseResolver(pgPolicyRenames), + makeInverseResolver(pgRoleRenames), + makeInverseResolverWithMoved(pgTableRenames, pgTableMoved), + makeInverseColumnsResolver(pgColRenames, pgTableRenames), + makeInverseResolverWithMoved(pgViewRenames, pgViewMoved), validatedCur, validatedPrev, ); @@ -619,12 +727,18 @@ export const prepareAndMigrateMysql = async (config: GenerateConfig) => { const squashedPrev = squashMysqlScheme(validatedPrev); const squashedCur = squashMysqlScheme(validatedCur); + const mysqlTableRenames: { from: any; to: any }[] = []; + const mysqlTableMoved: { name: string; schemaFrom: string; schemaTo: string }[] = []; + const mysqlColRenames = new Map(); + const mysqlViewRenames: { from: any; to: any }[] = []; + const mysqlViewMoved: { name: string; schemaFrom: string; schemaTo: string }[] = []; + const { sqlStatements, statements, _meta } = await applyMysqlSnapshotsDiff( squashedPrev, squashedCur, - tablesResolver, - columnsResolver, - mySqlViewsResolver, + withCaptureWithMoved(tablesResolver, mysqlTableRenames, mysqlTableMoved), + withCaptureColumns(columnsResolver, mysqlColRenames), + withCaptureWithMoved(mySqlViewsResolver, mysqlViewRenames, mysqlViewMoved), validatedPrev, validatedCur, ); @@ -632,9 +746,9 @@ export const prepareAndMigrateMysql = async (config: GenerateConfig) => { const { sqlStatements: downSqlStatements } = await applyMysqlSnapshotsDiff( squashedCur, squashedPrev, - autoResolverWithMovedNoRename, - autoColumnsResolverNoRename, - autoResolverWithMovedNoRename, + makeInverseResolverWithMoved(mysqlTableRenames, mysqlTableMoved), + makeInverseColumnsResolver(mysqlColRenames, mysqlTableRenames), + makeInverseResolverWithMoved(mysqlViewRenames, mysqlViewMoved), validatedCur, validatedPrev, ); @@ -780,11 +894,15 @@ export const prepareAndMigrateSingleStore = async (config: GenerateConfig) => { const squashedPrev = squashSingleStoreScheme(validatedPrev); const squashedCur = squashSingleStoreScheme(validatedCur); + const ssTableRenames: { from: any; to: any }[] = []; + const ssTableMoved: { name: string; schemaFrom: string; schemaTo: string }[] = []; + const ssColRenames = new Map(); + const { sqlStatements, _meta } = await applySingleStoreSnapshotsDiff( squashedPrev, squashedCur, - tablesResolver, - columnsResolver, + withCaptureWithMoved(tablesResolver, ssTableRenames, ssTableMoved), + withCaptureColumns(columnsResolver, ssColRenames), /* singleStoreViewsResolver, */ validatedPrev, validatedCur, @@ -793,8 +911,8 @@ export const prepareAndMigrateSingleStore = async (config: GenerateConfig) => { const { sqlStatements: downSqlStatements } = await applySingleStoreSnapshotsDiff( squashedCur, squashedPrev, - autoResolverWithMovedNoRename, - autoColumnsResolverNoRename, + makeInverseResolverWithMoved(ssTableRenames, ssTableMoved), + makeInverseColumnsResolver(ssColRenames, ssTableRenames), /* singleStoreViewsResolver, */ validatedCur, validatedPrev, @@ -916,12 +1034,18 @@ export const prepareAndMigrateSqlite = async (config: GenerateConfig) => { const squashedPrev = squashSqliteScheme(validatedPrev); const squashedCur = squashSqliteScheme(validatedCur); + const sqliteTableRenames: { from: any; to: any }[] = []; + const sqliteTableMoved: { name: string; schemaFrom: string; schemaTo: string }[] = []; + const sqliteColRenames = new Map(); + const sqliteViewRenames: { from: any; to: any }[] = []; + const sqliteViewMoved: { name: string; schemaFrom: string; schemaTo: string }[] = []; + const { sqlStatements, _meta } = await applySqliteSnapshotsDiff( squashedPrev, squashedCur, - tablesResolver, - columnsResolver, - sqliteViewsResolver, + withCaptureWithMoved(tablesResolver, sqliteTableRenames, sqliteTableMoved), + withCaptureColumns(columnsResolver, sqliteColRenames), + withCaptureWithMoved(sqliteViewsResolver, sqliteViewRenames, sqliteViewMoved), validatedPrev, validatedCur, ); @@ -929,9 +1053,9 @@ export const prepareAndMigrateSqlite = async (config: GenerateConfig) => { const { sqlStatements: downSqlStatements } = await applySqliteSnapshotsDiff( squashedCur, squashedPrev, - autoResolverWithMovedNoRename, - autoColumnsResolverNoRename, - autoResolverWithMovedNoRename, + makeInverseResolverWithMoved(sqliteTableRenames, sqliteTableMoved), + makeInverseColumnsResolver(sqliteColRenames, sqliteTableRenames), + makeInverseResolverWithMoved(sqliteViewRenames, sqliteViewMoved), validatedCur, validatedPrev, ); @@ -1022,12 +1146,18 @@ export const prepareAndMigrateLibSQL = async (config: GenerateConfig) => { const squashedPrev = squashSqliteScheme(validatedPrev); const squashedCur = squashSqliteScheme(validatedCur); + const libsqlTableRenames: { from: any; to: any }[] = []; + const libsqlTableMoved: { name: string; schemaFrom: string; schemaTo: string }[] = []; + const libsqlColRenames = new Map(); + const libsqlViewRenames: { from: any; to: any }[] = []; + const libsqlViewMoved: { name: string; schemaFrom: string; schemaTo: string }[] = []; + const { sqlStatements, _meta } = await applyLibSQLSnapshotsDiff( squashedPrev, squashedCur, - tablesResolver, - columnsResolver, - sqliteViewsResolver, + withCaptureWithMoved(tablesResolver, libsqlTableRenames, libsqlTableMoved), + withCaptureColumns(columnsResolver, libsqlColRenames), + withCaptureWithMoved(sqliteViewsResolver, libsqlViewRenames, libsqlViewMoved), validatedPrev, validatedCur, ); @@ -1035,9 +1165,9 @@ export const prepareAndMigrateLibSQL = async (config: GenerateConfig) => { const { sqlStatements: downSqlStatements } = await applyLibSQLSnapshotsDiff( squashedCur, squashedPrev, - autoResolverWithMovedNoRename, - autoColumnsResolverNoRename, - autoResolverWithMovedNoRename, + makeInverseResolverWithMoved(libsqlTableRenames, libsqlTableMoved), + makeInverseColumnsResolver(libsqlColRenames, libsqlTableRenames), + makeInverseResolverWithMoved(libsqlViewRenames, libsqlViewMoved), validatedCur, validatedPrev, ); @@ -1546,7 +1676,7 @@ export const writeResult = ({ fs.writeFileSync(`${outFolder}/${tag}.sql`, sql); - if (downSqlStatements !== undefined) { + if (downSqlStatements !== undefined && downSqlStatements.length > 0) { const downSqlDelimiter = breakpoints ? BREAKPOINT : '\n'; const downSql = downSqlStatements.join(downSqlDelimiter); fs.writeFileSync(`${outFolder}/${tag}.down.sql`, downSql); @@ -1594,7 +1724,11 @@ export const embeddedMigrations = (journal: Journal, driver?: Driver) => { ? `,\n downMigrations: {\n ${ journal.entries .filter((it) => it.hasDown) - .map((it) => `d${it.idx.toString().padStart(4, '0')}`) + .map((it) => { + const key = `m${it.idx.toString().padStart(4, '0')}`; + const val = `d${it.idx.toString().padStart(4, '0')}`; + return `${key}: ${val}`; + }) .join(',\n ') }\n }` : ''; From 575519704dcebaaf86302361d21d56cc431b1e31 Mon Sep 17 00:00:00 2001 From: Rob Orton Date: Thu, 12 Mar 2026 13:54:53 -0600 Subject: [PATCH 4/4] fix: use rowid for rollback deletion and add PG/bundled migration tests Use rowid instead of hash for SQLite migration deletion to fix hash collision bugs with bundled migrations (empty hash). Add PGlite rollback integration tests and bundled migration rollback tests. Fix down-sql test assertions for updated key format. --- drizzle-kit/tests/migrate/down-sql.test.ts | 10 +-- drizzle-orm/src/durable-sqlite/migrator.ts | 8 +- drizzle-orm/src/sqlite-core/dialect.ts | 16 ++-- .../0000_create_rollback_users.down.sql | 1 + .../0000_create_rollback_users.sql | 4 + .../0001_create_rollback_posts.down.sql | 1 + .../0001_create_rollback_posts.sql | 4 + .../drizzle2/pg-rollback/meta/_journal.json | 22 +++++ integration-tests/tests/pg/pglite.test.ts | 79 ++++++++++++++++- .../tests/sqlite/rollback.test.ts | 88 +++++++++++++++++++ 10 files changed, 216 insertions(+), 17 deletions(-) create mode 100644 integration-tests/drizzle2/pg-rollback/0000_create_rollback_users.down.sql create mode 100644 integration-tests/drizzle2/pg-rollback/0000_create_rollback_users.sql create mode 100644 integration-tests/drizzle2/pg-rollback/0001_create_rollback_posts.down.sql create mode 100644 integration-tests/drizzle2/pg-rollback/0001_create_rollback_posts.sql create mode 100644 integration-tests/drizzle2/pg-rollback/meta/_journal.json diff --git a/drizzle-kit/tests/migrate/down-sql.test.ts b/drizzle-kit/tests/migrate/down-sql.test.ts index e8228dec26..c4432043c9 100644 --- a/drizzle-kit/tests/migrate/down-sql.test.ts +++ b/drizzle-kit/tests/migrate/down-sql.test.ts @@ -126,11 +126,7 @@ describe('writeResult — down SQL file generation', () => { expect(entry.hasDown).toBeUndefined(); const tag = entry.tag; - // Empty downSqlStatements: file is written (empty content is valid) - const downPath = path.join(tmpDir, `${tag}.down.sql`); - if (fs.existsSync(downPath)) { - expect(fs.readFileSync(downPath, 'utf8')).toBe(''); - } + expect(fs.existsSync(path.join(tmpDir, `${tag}.down.sql`))).toBe(false); }); test('respects breakpoints delimiter in .down.sql', () => { @@ -166,7 +162,7 @@ describe('embeddedMigrations — down SQL bundling', () => { expect(output).toContain("import d0000 from './0000_test.down.sql'"); expect(output).toContain('downMigrations'); - expect(output).toContain('d0000'); + expect(output).toContain('m0000: d0000'); }); test('omits downMigrations block when no entries have hasDown', () => { @@ -199,7 +195,7 @@ describe('embeddedMigrations — down SQL bundling', () => { expect(output).toContain("import d0001 from './0001_has_down.down.sql'"); expect(output).not.toContain("import d0000 from './0000_no_down.down.sql'"); expect(output).toContain('downMigrations'); - expect(output).toContain('d0001'); + expect(output).toContain('m0001: d0001'); expect(output).not.toContain('d0000'); }); diff --git a/drizzle-orm/src/durable-sqlite/migrator.ts b/drizzle-orm/src/durable-sqlite/migrator.ts index 24809c66e1..6ea39e351a 100644 --- a/drizzle-orm/src/durable-sqlite/migrator.ts +++ b/drizzle-orm/src/durable-sqlite/migrator.ts @@ -108,7 +108,7 @@ export async function rollback< const migrationsTable = '__drizzle_migrations'; const dbMigrations = db.values<[number, string, string]>( - sql`SELECT id, hash, created_at FROM ${ + sql`SELECT rowid, hash, created_at FROM ${ sql.identifier(migrationsTable) } ORDER BY created_at DESC LIMIT ${sql.raw(String(steps))}`, ); @@ -118,7 +118,9 @@ export async function rollback< } for (const dbMigration of dbMigrations) { - const meta = migrations.find((m) => m.hash === dbMigration[1]); + const meta = migrations.find((m) => + m.hash ? m.hash === dbMigration[1] : m.folderMillis === Number(dbMigration[2]) + ); if (!meta) { throw new DrizzleError({ message: `Cannot rollback migration with hash ${dbMigration[1]}: migration file not found`, @@ -133,7 +135,7 @@ export async function rollback< db.run(sql.raw(stmt)); } db.run( - sql`DELETE FROM ${sql.identifier(migrationsTable)} WHERE hash = ${dbMigration[1]}`, + sql`DELETE FROM ${sql.identifier(migrationsTable)} WHERE rowid = ${dbMigration[0]}`, ); } } catch (error: any) { diff --git a/drizzle-orm/src/sqlite-core/dialect.ts b/drizzle-orm/src/sqlite-core/dialect.ts index 412a89c76c..3339f12581 100644 --- a/drizzle-orm/src/sqlite-core/dialect.ts +++ b/drizzle-orm/src/sqlite-core/dialect.ts @@ -891,7 +891,7 @@ export class SQLiteSyncDialect extends SQLiteDialect { : config.migrationsTable ?? '__drizzle_migrations'; const dbMigrations = session.values<[number, string, string]>( - sql`SELECT id, hash, created_at FROM ${ + sql`SELECT rowid, hash, created_at FROM ${ sql.identifier(migrationsTable) } ORDER BY created_at DESC LIMIT ${sql.raw(String(steps))}`, ); @@ -903,7 +903,9 @@ export class SQLiteSyncDialect extends SQLiteDialect { session.run(sql`BEGIN`); try { for (const dbMigration of dbMigrations) { - const meta = migrations.find((m) => m.hash === dbMigration[1]); + const meta = migrations.find((m) => + m.hash ? m.hash === dbMigration[1] : m.folderMillis === Number(dbMigration[2]) + ); if (!meta) { throw new DrizzleError({ message: `Cannot rollback migration with hash ${dbMigration[1]}: migration file not found`, @@ -918,7 +920,7 @@ export class SQLiteSyncDialect extends SQLiteDialect { session.run(sql.raw(stmt)); } session.run( - sql`DELETE FROM ${sql.identifier(migrationsTable)} WHERE hash = ${dbMigration[1]}`, + sql`DELETE FROM ${sql.identifier(migrationsTable)} WHERE rowid = ${dbMigration[0]}`, ); } session.run(sql`COMMIT`); @@ -987,7 +989,7 @@ export class SQLiteAsyncDialect extends SQLiteDialect { : config.migrationsTable ?? '__drizzle_migrations'; const dbMigrations = await session.values<[number, string, string]>( - sql`SELECT id, hash, created_at FROM ${ + sql`SELECT rowid, hash, created_at FROM ${ sql.identifier(migrationsTable) } ORDER BY created_at DESC LIMIT ${sql.raw(String(steps))}`, ); @@ -998,7 +1000,9 @@ export class SQLiteAsyncDialect extends SQLiteDialect { await session.transaction(async (tx) => { for (const dbMigration of dbMigrations) { - const meta = migrations.find((m) => m.hash === dbMigration[1]); + const meta = migrations.find((m) => + m.hash ? m.hash === dbMigration[1] : m.folderMillis === Number(dbMigration[2]) + ); if (!meta) { throw new DrizzleError({ message: `Cannot rollback migration with hash ${dbMigration[1]}: migration file not found`, @@ -1013,7 +1017,7 @@ export class SQLiteAsyncDialect extends SQLiteDialect { await tx.run(sql.raw(stmt)); } await tx.run( - sql`DELETE FROM ${sql.identifier(migrationsTable)} WHERE hash = ${dbMigration[1]}`, + sql`DELETE FROM ${sql.identifier(migrationsTable)} WHERE rowid = ${dbMigration[0]}`, ); } }); diff --git a/integration-tests/drizzle2/pg-rollback/0000_create_rollback_users.down.sql b/integration-tests/drizzle2/pg-rollback/0000_create_rollback_users.down.sql new file mode 100644 index 0000000000..a339ef865b --- /dev/null +++ b/integration-tests/drizzle2/pg-rollback/0000_create_rollback_users.down.sql @@ -0,0 +1 @@ +DROP TABLE "rollback_users"; diff --git a/integration-tests/drizzle2/pg-rollback/0000_create_rollback_users.sql b/integration-tests/drizzle2/pg-rollback/0000_create_rollback_users.sql new file mode 100644 index 0000000000..5c781f7d79 --- /dev/null +++ b/integration-tests/drizzle2/pg-rollback/0000_create_rollback_users.sql @@ -0,0 +1,4 @@ +CREATE TABLE "rollback_users" ( + "id" serial PRIMARY KEY NOT NULL, + "name" text NOT NULL +); diff --git a/integration-tests/drizzle2/pg-rollback/0001_create_rollback_posts.down.sql b/integration-tests/drizzle2/pg-rollback/0001_create_rollback_posts.down.sql new file mode 100644 index 0000000000..83bd1279a1 --- /dev/null +++ b/integration-tests/drizzle2/pg-rollback/0001_create_rollback_posts.down.sql @@ -0,0 +1 @@ +DROP TABLE "rollback_posts"; diff --git a/integration-tests/drizzle2/pg-rollback/0001_create_rollback_posts.sql b/integration-tests/drizzle2/pg-rollback/0001_create_rollback_posts.sql new file mode 100644 index 0000000000..83a9aa2f6c --- /dev/null +++ b/integration-tests/drizzle2/pg-rollback/0001_create_rollback_posts.sql @@ -0,0 +1,4 @@ +CREATE TABLE "rollback_posts" ( + "id" serial PRIMARY KEY NOT NULL, + "title" text NOT NULL +); diff --git a/integration-tests/drizzle2/pg-rollback/meta/_journal.json b/integration-tests/drizzle2/pg-rollback/meta/_journal.json new file mode 100644 index 0000000000..345e0413b5 --- /dev/null +++ b/integration-tests/drizzle2/pg-rollback/meta/_journal.json @@ -0,0 +1,22 @@ +{ + "version": "5", + "dialect": "pg", + "entries": [ + { + "idx": 0, + "version": "5", + "when": 1700000000000, + "tag": "0000_create_rollback_users", + "breakpoints": true, + "hasDown": true + }, + { + "idx": 1, + "version": "5", + "when": 1700000001000, + "tag": "0001_create_rollback_posts", + "breakpoints": true, + "hasDown": true + } + ] +} diff --git a/integration-tests/tests/pg/pglite.test.ts b/integration-tests/tests/pg/pglite.test.ts index 560b24490b..6bf8f5d320 100644 --- a/integration-tests/tests/pg/pglite.test.ts +++ b/integration-tests/tests/pg/pglite.test.ts @@ -1,7 +1,7 @@ import { PGlite } from '@electric-sql/pglite'; import { Name, sql } from 'drizzle-orm'; import { drizzle, type PgliteDatabase } from 'drizzle-orm/pglite'; -import { migrate } from 'drizzle-orm/pglite/migrator'; +import { migrate, rollback } from 'drizzle-orm/pglite/migrator'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { skipTests } from '~/common'; import { tests, usersMigratorTable, usersTable } from './pg-common'; @@ -61,6 +61,83 @@ test('migrator : default migration strategy', async () => { await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); }); +test('migrator : rollback(1) removes last migration', async () => { + await db.execute(sql`drop table if exists "rollback_users"`); + await db.execute(sql`drop table if exists "rollback_posts"`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg-rollback' }); + + const afterMigrate = await db.execute(sql` + select table_name from information_schema.tables + where table_schema = 'public' and table_name in ('rollback_users', 'rollback_posts') + order by table_name + `); + expect(afterMigrate.rows).toHaveLength(2); + + await rollback(db, { migrationsFolder: './drizzle2/pg-rollback' }, 1); + + const afterRollback = await db.execute(sql` + select table_name from information_schema.tables + where table_schema = 'public' and table_name in ('rollback_users', 'rollback_posts') + order by table_name + `); + expect(afterRollback.rows.map((r: any) => r.table_name)).toEqual(['rollback_users']); + + const applied = await db.execute(sql`select hash from "drizzle"."__drizzle_migrations" order by created_at`); + expect(applied.rows).toHaveLength(1); + + await db.execute(sql`drop table if exists "rollback_users"`); + await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); +}); + +test('migrator : rollback(2) undoes all migrations', async () => { + await db.execute(sql`drop table if exists "rollback_users"`); + await db.execute(sql`drop table if exists "rollback_posts"`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg-rollback' }); + await rollback(db, { migrationsFolder: './drizzle2/pg-rollback' }, 2); + + const tables = await db.execute(sql` + select table_name from information_schema.tables + where table_schema = 'public' and table_name in ('rollback_users', 'rollback_posts') + `); + expect(tables.rows).toHaveLength(0); + + const applied = await db.execute(sql`select hash from "drizzle"."__drizzle_migrations" order by created_at`); + expect(applied.rows).toHaveLength(0); + + await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); +}); + +test('migrator : rollback then migrate re-applies', async () => { + await db.execute(sql`drop table if exists "rollback_users"`); + await db.execute(sql`drop table if exists "rollback_posts"`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg-rollback' }); + await rollback(db, { migrationsFolder: './drizzle2/pg-rollback' }, 1); + + const afterRollback = await db.execute(sql` + select table_name from information_schema.tables + where table_schema = 'public' and table_name = 'rollback_posts' + `); + expect(afterRollback.rows).toHaveLength(0); + + await migrate(db, { migrationsFolder: './drizzle2/pg-rollback' }); + + const afterReapply = await db.execute(sql` + select table_name from information_schema.tables + where table_schema = 'public' and table_name = 'rollback_posts' + `); + expect(afterReapply.rows).toHaveLength(1); + + await db.execute(sql`drop table "rollback_users"`); + await db.execute(sql`drop table "rollback_posts"`); + await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); +}); + test('insert via db.execute + select via db.execute', async () => { await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); diff --git a/integration-tests/tests/sqlite/rollback.test.ts b/integration-tests/tests/sqlite/rollback.test.ts index 21fc8bfbff..86e2d37647 100644 --- a/integration-tests/tests/sqlite/rollback.test.ts +++ b/integration-tests/tests/sqlite/rollback.test.ts @@ -5,6 +5,7 @@ import path from 'node:path'; import Database from 'better-sqlite3'; import { drizzle } from 'drizzle-orm/better-sqlite3'; import { migrate, rollback } from 'drizzle-orm/better-sqlite3/migrator'; +import type { MigrationMeta } from 'drizzle-orm/migrator'; import { readMigrationFiles } from 'drizzle-orm/migrator'; import { afterEach, beforeEach, describe, expect, test } from 'vitest'; @@ -171,6 +172,18 @@ describe('rollback — better-sqlite3', () => { expect(appliedMigrations(client)).toHaveLength(1); }); + test('rollback with steps > applied migrations rolls back all applied migrations', () => { + migrate(db, { migrationsFolder: MIGRATIONS_FOLDER }); + expect(appliedMigrations(client)).toHaveLength(2); + + // Request more rollbacks than applied migrations — should clamp silently + rollback(db, { migrationsFolder: MIGRATIONS_FOLDER }, 99); + + expect(tableExists(client, 'rollback_users')).toBe(false); + expect(tableExists(client, 'rollback_posts')).toBe(false); + expect(appliedMigrations(client)).toHaveLength(0); + }); + test('rollback when no migrations applied is a no-op', () => { // create the tracking table but no migrations applied client.exec(`CREATE TABLE __drizzle_migrations (id INTEGER PRIMARY KEY, hash TEXT, created_at NUMERIC)`); @@ -230,3 +243,78 @@ describe('rollback — better-sqlite3', () => { } }); }); + +// ─── rollback with bundled (empty hash) migrations ─────────────────────────── + +describe('rollback — bundled migrations (hash: "")', () => { + // Simulates expo-sqlite / op-sqlite / durable-sqlite where readMigrationFiles + // sets hash: '' for every entry. Before the fix, find() always matched the + // first meta and DELETE WHERE hash='' removed all tracking rows at once. + + const WHEN_USERS = 1700000000000; + const WHEN_POSTS = 1700000001000; + + const bundledMetas: MigrationMeta[] = [ + { + sql: ['CREATE TABLE bundled_users (id INTEGER PRIMARY KEY)'], + downSql: ['DROP TABLE bundled_users'], + bps: true, + folderMillis: WHEN_USERS, + hash: '', + }, + { + sql: ['CREATE TABLE bundled_posts (id INTEGER PRIMARY KEY)'], + downSql: ['DROP TABLE bundled_posts'], + bps: true, + folderMillis: WHEN_POSTS, + hash: '', + }, + ]; + + let client: Database.Database; + let db: ReturnType; + + beforeEach(() => { + client = new Database(':memory:'); + db = drizzle(client); + client.exec( + `CREATE TABLE __drizzle_migrations (id INTEGER PRIMARY KEY AUTOINCREMENT, hash TEXT, created_at NUMERIC)`, + ); + // Simulate what bundled migrate() writes + client.exec(`INSERT INTO __drizzle_migrations (hash, created_at) VALUES ('', ${WHEN_USERS})`); + client.exec(`CREATE TABLE bundled_users (id INTEGER PRIMARY KEY)`); + client.exec(`INSERT INTO __drizzle_migrations (hash, created_at) VALUES ('', ${WHEN_POSTS})`); + client.exec(`CREATE TABLE bundled_posts (id INTEGER PRIMARY KEY)`); + }); + + afterEach(() => { + client.close(); + }); + + test('rollback(1) targets the most recent bundled migration, not the first', () => { + db.dialect.rollback(bundledMetas, db.session, undefined, 1); + + expect(tableExists(client, 'bundled_posts')).toBe(false); + expect(tableExists(client, 'bundled_users')).toBe(true); + expect(appliedMigrations(client)).toHaveLength(1); + }); + + test('DELETE by rowid removes only the targeted row', () => { + db.dialect.rollback(bundledMetas, db.session, undefined, 1); + + // Exactly one tracking row must remain (the users migration) + const remaining = client + .prepare(`SELECT created_at FROM __drizzle_migrations`) + .all() as { created_at: number }[]; + expect(remaining).toHaveLength(1); + expect(remaining[0]!.created_at).toBe(WHEN_USERS); + }); + + test('rollback(2) removes both bundled migration rows', () => { + db.dialect.rollback(bundledMetas, db.session, undefined, 2); + + expect(tableExists(client, 'bundled_users')).toBe(false); + expect(tableExists(client, 'bundled_posts')).toBe(false); + expect(appliedMigrations(client)).toHaveLength(0); + }); +});