diff --git a/.mocharc.js b/.mocharc.js index 6fd536d09..0752a0322 100644 --- a/.mocharc.js +++ b/.mocharc.js @@ -9,6 +9,7 @@ module.exports = { ignore: [ + 'test/db-migrations/**', 'test/e2e/**', ], }; diff --git a/Makefile b/Makefile index 40791f049..d85149f0e 100644 --- a/Makefile +++ b/Makefile @@ -87,6 +87,12 @@ test: lint test-ci: lint BCRYPT=insecure npx mocha --recursive --reporter test/ci-mocha-reporter.js +.PHONY: test-db-migrations +test-db-migrations: + NODE_CONFIG_ENV=db-migration-test npx mocha --bail --sort --timeout=20000 \ + --require test/db-migrations/mocha-setup.db-migrations.js \ + ./test/db-migrations/**/*.spec.js + .PHONY: test-fast test-fast: node_version BCRYPT=insecure npx mocha --recursive --fgrep @slow --invert diff --git a/config/db-migration-test.json b/config/db-migration-test.json new file mode 100644 index 000000000..29e5cd52e --- /dev/null +++ b/config/db-migration-test.json @@ -0,0 +1,10 @@ +{ + "default": { + "database": { + "host": "localhost", + "user": "jubilant", + "password": "jubilant", + "database": "jubilant_test" + } + } +} diff --git a/test/db-migrations/.eslintrc.js b/test/db-migrations/.eslintrc.js new file mode 100644 index 000000000..1c8be7708 --- /dev/null +++ b/test/db-migrations/.eslintrc.js @@ -0,0 +1,19 @@ +module.exports = { + extends: '../.eslintrc.js', + rules: { + 'key-spacing': 'off', + 'keyword-spacing': 'off', + 'no-console': 'off', + 'no-multi-spaces': 'off', + 'no-plusplus': 'off', + 'no-use-before-define': 'off', + 'object-curly-newline': 'off', + 'prefer-arrow-callback': 'off', + }, + globals: { + assert: false, + db: false, + log: false, + sql: false, + }, +}; diff --git a/test/db-migrations/.gitignore b/test/db-migrations/.gitignore new file mode 100644 index 000000000..307c98fa3 --- /dev/null +++ b/test/db-migrations/.gitignore @@ -0,0 +1 @@ +/.holding-pen/ diff --git a/test/db-migrations/1900-test-first.spec.js b/test/db-migrations/1900-test-first.spec.js new file mode 100644 index 000000000..b1e800ccf --- /dev/null +++ b/test/db-migrations/1900-test-first.spec.js @@ -0,0 +1,13 @@ +// Test order is very important, so if this test fails then the whole suite may +// be doing unexpected things. + +describe('1900-test-first', () => { + after(() => { + global.firstHasBeenRun = true; + }); + + it('should be run first', () => { + // expect + assert.equal(global.firstHasBeenRun, undefined); + }); +}); diff --git a/test/db-migrations/20241008-01-add-user_preferences.spec.js b/test/db-migrations/20241008-01-add-user_preferences.spec.js new file mode 100644 index 000000000..976fc8eaa --- /dev/null +++ b/test/db-migrations/20241008-01-add-user_preferences.spec.js @@ -0,0 +1,46 @@ +const { + assertIndexExists, + assertTableDoesNotExist, + assertTableSchema, + describeMigration, +} = require('./utils'); + +describeMigration('20241008-01-add-user_preferences', ({ runMigrationBeingTested }) => { + before(async () => { + await assertTableDoesNotExist('user_site_preferences'); + await assertTableDoesNotExist('user_project_preferences'); + + await runMigrationBeingTested(); + }); + + it('should create user_site_preferences table', async () => { + await assertTableSchema('user_site_preferences', + { column_name:'userId', is_nullable:'NO', data_type:'integer' }, + { column_name:'propertyName', is_nullable:'NO', data_type:'text' }, + { column_name:'propertyValue', is_nullable:'NO', data_type:'jsonb' }, + ); + }); + + it('should create user_site_preferences userId index', async () => { + await assertIndexExists( + 'user_site_preferences', + 'CREATE INDEX "user_site_preferences_userId_idx" ON public.user_site_preferences USING btree ("userId")', + ); + }); + + it('should create user_project_preferences table', async () => { + await assertTableSchema('user_project_preferences', + { column_name:'userId', is_nullable:'NO', data_type:'integer' }, + { column_name:'projectId', is_nullable:'NO', data_type:'integer' }, + { column_name:'propertyName', is_nullable:'NO', data_type:'text' }, + { column_name:'propertyValue', is_nullable:'NO', data_type:'jsonb' }, + ); + }); + + it('should create user_project_preferences userId index', async () => { + await assertIndexExists( + 'user_project_preferences', + 'CREATE INDEX "user_project_preferences_userId_idx" ON public.user_project_preferences USING btree ("userId")', + ); + }); +}); diff --git a/test/db-migrations/3000-test-last.spec.js b/test/db-migrations/3000-test-last.spec.js new file mode 100644 index 000000000..c28f8f705 --- /dev/null +++ b/test/db-migrations/3000-test-last.spec.js @@ -0,0 +1,13 @@ +// Test order is very important, so if this test fails then the whole suite may +// be doing unexpected things. + +describe('3000-test-last', () => { + it('should NOT be run first', () => { + // expect + assert.equal(global.firstHasBeenRun, true); + }); + + it('should be LAST run', function() { + // FIXME work out some way to test this + }); +}); diff --git a/test/db-migrations/migrator.js b/test/db-migrations/migrator.js new file mode 100644 index 000000000..91d3ed70c --- /dev/null +++ b/test/db-migrations/migrator.js @@ -0,0 +1,96 @@ +module.exports = { + exists, + runBefore, + runIncluding, + restoreMigrations, +}; + +const fs = require('node:fs'); +const { execSync } = require('node:child_process'); + +// Horrible hacks. Without this: +// +// 1. production migration code needs modifying, and +// 2. it takes 3 mins+ just to run the migrations + +const migrationsDir = './lib/model/migrations'; +const holdingPen = './test/db-migrations/.holding-pen'; + +fs.mkdirSync(holdingPen, { recursive:true }); + +restoreMigrations(); +const allMigrations = loadMigrationsList(); +moveMigrationsToHoldingPen(); + +let lastRunIdx = -1; + +function runBefore(migrationName) { + const idx = getIndex(migrationName); + if(idx === 0) return; + + const previousMigration = allMigrations[idx - 1]; + + log('previousMigration:', previousMigration); + + return runIncluding(previousMigration); +} + +function runIncluding(lastMigrationToRun) { + const finalIdx = getIndex(lastMigrationToRun); + + for(let restoreIdx=lastRunIdx+1; restoreIdx<=finalIdx; ++restoreIdx) { + const f = allMigrations[restoreIdx] + '.js'; + fs.renameSync(`${holdingPen}/${f}`, `${migrationsDir}/${f}`); + } + + log('Running migrations until:', lastMigrationToRun, '...'); + const res = execSync(`node ./lib/bin/run-migrations.js`, { encoding:'utf8' }); + + lastRunIdx = finalIdx; + + log(`Ran migrations up-to-and-including ${lastMigrationToRun}:\n`, res); +} + +function getIndex(migrationName) { + const idx = allMigrations.indexOf(migrationName); + log('getIndex()', migrationName, 'found at', idx); + if(idx === -1) throw new Error(`Unknown migration: ${migrationName}`); + return idx; +} + +function restoreMigrations() { + moveAll(holdingPen, migrationsDir); +} + +function moveMigrationsToHoldingPen() { + moveAll(migrationsDir, holdingPen); +} + +function moveAll(src, tgt) { + fs.readdirSync(src) + .forEach(f => fs.renameSync(`${src}/${f}`, `${tgt}/${f}`)); +} + +function loadMigrationsList() { + const migrations = fs.readdirSync(migrationsDir) + .filter(f => f.endsWith('.js')) + .map(f => f.replace(/\.js$/, '')) + .sort(); // TODO check that this is how knex sorts migration files + log(); + log('All migrations:'); + log(); + migrations.forEach(m => log('*', m)); + log(); + log('Total:', migrations.length); + log(); + return migrations; +} + +function exists(migrationName) { + try { + getIndex(migrationName); + return true; + } catch(err) { + return false; + } +} diff --git a/test/db-migrations/mocha-setup.db-migrations.js b/test/db-migrations/mocha-setup.db-migrations.js new file mode 100644 index 000000000..ddf128ffa --- /dev/null +++ b/test/db-migrations/mocha-setup.db-migrations.js @@ -0,0 +1,52 @@ +global.assert = require('node:assert'); +const fs = require('node:fs'); +const slonik = require('slonik'); +const migrator = require('./migrator'); + +const _log = level => (...args) => console.log(level, ...args); +global.log = _log('[INFO]'); + +async function mochaGlobalSetup() { + log('mochaGlobalSetup() :: ENTRY'); + + global.assert = assert; + + global.sql = slonik.sql; + + const { user, password, host, database } = jsonFile('./config/db-migration-test.json').default.database; + const dbUrl = `postgres://${user}:${password}@${host}/${database}`; + log('dbUrl:', dbUrl); + global.db = slonik.createPool(dbUrl); + + const existingTables = await db.oneFirst(sql`SELECT COUNT(*) FROM information_schema.tables WHERE table_schema='public'`); + if(existingTables) { + console.log(` + Existing tables were found in the public database schema. Reset the database before running migration tests. + + If you are using odk-postgres14 docker, try: + + docker exec odk-postgres14 psql -U postgres ${database} -c " + DROP SCHEMA public CASCADE; + CREATE SCHEMA public; + GRANT ALL ON SCHEMA public TO postgres; + GRANT ALL ON SCHEMA public TO public; + " + `); + process.exit(1); + } + + log('mochaGlobalSetup() :: EXIT'); +} + +function mochaGlobalTeardown() { + log('mochaGlobalTeardown() :: ENTRY'); + db?.end(); + migrator.restoreMigrations(); + log('mochaGlobalTeardown() :: EXIT'); +} + +module.exports = { mochaGlobalSetup, mochaGlobalTeardown }; + +function jsonFile(path) { + return JSON.parse(fs.readFileSync(path, { encoding:'utf8' })); +} diff --git a/test/db-migrations/utils.js b/test/db-migrations/utils.js new file mode 100644 index 000000000..a9bf30fa0 --- /dev/null +++ b/test/db-migrations/utils.js @@ -0,0 +1,117 @@ +module.exports = { + assertIndexExists, + assertTableDoesNotExist, + assertTableSchema, + describeMigration, +}; + +const _ = require('lodash'); +const migrator = require('./migrator'); + +function describeMigration(migrationName, fn) { + assert.ok(migrator.exists(migrationName)); + + assert.strictEqual(typeof fn, 'function'); + assert.strictEqual(fn.length, 1); + + assert.strictEqual(arguments.length, 2); + + const runMigrationBeingTested = (() => { + let alreadyRun; + return async () => { + if(alreadyRun) throw new Error('Migration has already run! Check your test structure.'); + alreadyRun = true; + migrator.runIncluding(migrationName); + }; + })(); + + return describe(`database migration: ${migrationName}`, () => { + before(async () => { + migrator.runBefore(migrationName); + }); + return fn({ runMigrationBeingTested }); + }); +} + +async function assertIndexExists(tableName, expected) { + if(arguments.length !== 2) throw new Error('Incorrect arg count.'); + const actualIndexes = await db.anyFirst(sql`SELECT indexdef FROM pg_indexes WHERE tablename=${tableName}`); + + if(actualIndexes.includes(expected)) return true; + assert.fail( + 'Could not find expected index:\njson=' + + JSON.stringify({ expected, actualIndexes, }), + ); +} + +async function assertTableExists(tableName) { + const count = await db.oneFirst(sql`SELECT COUNT(*) FROM information_schema.tables WHERE table_name=${tableName}`); + assert.strictEqual(count, 1, `Table not found: ${tableName}`); +} + +async function assertTableDoesNotExist(tableName) { + const count = await db.oneFirst(sql`SELECT COUNT(*) FROM information_schema.tables WHERE table_name=${tableName}`); + assert.strictEqual(count, 0, `Table should not exist: ${tableName}`); +} + +async function assertTableSchema(tableName, ...expectedCols) { + await assertTableExists(tableName); + + expectedCols.forEach((def, idx) => { + if(!def.column_name) throw new Error(`Expected column definition is missing required prop: .column_name at index ${idx}`); + }); + + const actualCols = await db.any(sql`SELECT * FROM information_schema.columns WHERE table_name=${tableName}`); + console.log('actualCols:', actualCols); + + assertEqualInAnyOrder( + expectedCols.map(col => col.column_name), + actualCols.map(col => col.column_name), + 'Expected columns did not match returned columns!', + ); + + assertRowsMatch(actualCols, expectedCols); +} + +function assertRowsMatch(actualRows, expectedRows) { + assert.strictEqual(actualRows.length, expectedRows.length, 'row count mismatch'); + + const remainingRows = [...actualRows]; + for(let i=0; i _.pick(r, Object.keys(x))); + assert.fail( + `Expected row ${i} not found:\njson=` + + JSON.stringify({ remainingRows, filteredRemainingRows, expectedRow:x }), + ); + } + } +} + +function assertEqualInAnyOrder(a, b, message) { + if(!Array.isArray(a)) throw new Error('IllegalArgument: first arg is not an array'); + if(!Array.isArray(b)) throw new Error('IllegalArgument: second arg is not an array'); + assert.deepEqual([...a].sort(), [...b].sort(), message); +} + +function assertIncludes(actual, expected) { + for(const [k, expectedVal] of Object.entries(expected)) { + const actualVal = actual[k]; + try { + assert.deepEqual(actualVal, expectedVal); + } catch(err) { + assert.fail(`Could not find all properties of ${expected} in ${actual}`); + } + } +}