diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 37637e8..c2ca3e5 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -148,16 +148,16 @@ constructor(@Inject(DATABASE_TOKEN) private db: DatabaseService) {} ```typescript // ✅ Use specific NestJS exceptions -throw new NotFoundException("User not found"); -throw new BadRequestException("Invalid input"); -throw new ConflictException("Email already exists"); -throw new InternalServerErrorException("Database error"); +throw new NotFoundException('User not found'); +throw new BadRequestException('Invalid input'); +throw new ConflictException('Email already exists'); +throw new InternalServerErrorException('Database error'); // ✅ Log errors with context try { await this.operation(); } catch (error) { - this.logger.error("Operation failed", error); + this.logger.error('Operation failed', error); throw error; } @@ -175,11 +175,11 @@ try { // ✅ Environment-driven configuration const uri = process.env.MONGO_URI; if (!uri) { - throw new Error("MONGO_URI not configured"); + throw new Error('MONGO_URI not configured'); } // ❌ Never hardcode values -const uri = "mongodb://localhost:27017/mydb"; +const uri = 'mongodb://localhost:27017/mydb'; ``` ### 4. Type Safety @@ -235,7 +235,7 @@ export class UserService { async getUser(id: string): Promise { const user = await this.users.findById(id); if (!user) { - throw new NotFoundException("User not found"); + throw new NotFoundException('User not found'); } return user; } @@ -253,7 +253,7 @@ export class UserService { class MongoAdapter { async createUser(data: CreateUserDto) { if (await this.exists({ email: data.email })) { - throw new ConflictException("Email exists"); // Business logic! + throw new ConflictException('Email exists'); // Business logic! } return this.model.create(data); } @@ -275,19 +275,19 @@ const poolSize = 10; const timeout = 5000; // ✅ GOOD -const poolSize = parseInt(process.env.POOL_SIZE || "10", 10); -const timeout = parseInt(process.env.TIMEOUT || "5000", 10); +const poolSize = parseInt(process.env.POOL_SIZE || '10', 10); +const timeout = parseInt(process.env.TIMEOUT || '5000', 10); ``` ### 3. Leaking Internal Types ```typescript // ❌ BAD - Exporting internal implementation -export { MongoAdapter } from "./adapters/mongo.adapter"; +export { MongoAdapter } from './adapters/mongo.adapter'; // ✅ GOOD - Only export public API -export { DatabaseService } from "./services/database.service"; -export { Repository } from "./contracts/database.contracts"; +export { DatabaseService } from './services/database.service'; +export { Repository } from './contracts/database.contracts'; ``` ### 4. Direct Model Access in Services @@ -323,7 +323,7 @@ export class UserService { ### Test Structure ```typescript -describe("DatabaseService", () => { +describe('DatabaseService', () => { let service: DatabaseService; let mockAdapter: jest.Mocked; @@ -343,8 +343,8 @@ describe("DatabaseService", () => { service = module.get(DatabaseService); }); - describe("connect", () => { - it("should connect to database", async () => { + describe('connect', () => { + it('should connect to database', async () => { await service.connect(); expect(mockAdapter.connect).toHaveBeenCalled(); }); @@ -362,34 +362,34 @@ describe("DatabaseService", () => { // index.ts - Only these should be exported // Module (primary) -export { DatabaseKitModule } from "./database-kit.module"; +export { DatabaseKitModule } from './database-kit.module'; // Services (for direct injection) -export { DatabaseService } from "./services/database.service"; +export { DatabaseService } from './services/database.service'; // Decorators (for DI) -export { InjectDatabase } from "./middleware/database.decorators"; +export { InjectDatabase } from './middleware/database.decorators'; // Filters (for app-wide use) -export { DatabaseExceptionFilter } from "./filters/database-exception.filter"; +export { DatabaseExceptionFilter } from './filters/database-exception.filter'; // Types (for consumers) export { Repository, PageResult, DatabaseConfig, -} from "./contracts/database.contracts"; +} from './contracts/database.contracts'; // Utilities (for convenience) -export { isValidMongoId } from "./utils/validation.utils"; +export { isValidMongoId } from './utils/validation.utils'; ``` ### ❌ DON'T Export ```typescript // These should NOT be in index.ts -export { MongoAdapter } from "./adapters/mongo.adapter"; // Internal -export { PostgresAdapter } from "./adapters/postgres.adapter"; // Internal +export { MongoAdapter } from './adapters/mongo.adapter'; // Internal +export { PostgresAdapter } from './adapters/postgres.adapter'; // Internal ``` --- diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..f1c6c67 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,34 @@ +version: 2 +updates: + # npm dependencies + - package-ecosystem: npm + directory: '/' + schedule: + interval: weekly + day: monday + time: '03:00' + open-pull-requests-limit: 5 + assignees: + - CISCODE-MA/cloud-devops + labels: + - 'dependencies' + - 'npm' + commit-message: + prefix: 'chore(deps)' + include: 'scope' + rebase-strategy: auto + + # GitHub Actions + - package-ecosystem: github-actions + directory: '/' + schedule: + interval: weekly + day: sunday + time: '03:00' + assignees: + - CISCODE-MA/cloud-devops + labels: + - 'dependencies' + - 'github-actions' + commit-message: + prefix: 'ci(deps)' diff --git a/.github/instructions/adapters.instructions.md b/.github/instructions/adapters.instructions.md index ffad981..4042450 100644 --- a/.github/instructions/adapters.instructions.md +++ b/.github/instructions/adapters.instructions.md @@ -100,7 +100,7 @@ const model = mongoose.model(name, schema); } // Direct pass-through { status: { - $in: ["active", "pending"]; + $in: ['active', 'pending']; } } ``` @@ -126,7 +126,7 @@ try { ```typescript // MongoDB uses ObjectId -import { Types } from "mongoose"; +import { Types } from 'mongoose'; const objectId = new Types.ObjectId(id); ``` @@ -156,8 +156,8 @@ const table = knex(tableName); ```typescript // Use Knex transaction await knex.transaction(async (trx) => { - await trx("users").insert(data); - await trx("orders").insert(orderData); + await trx('users').insert(data); + await trx('orders').insert(orderData); }); ``` @@ -166,7 +166,7 @@ await knex.transaction(async (trx) => { ```typescript // PostgreSQL uses auto-increment or UUID // Return inserted row to get ID -const [inserted] = await knex("users").insert(data).returning("*"); +const [inserted] = await knex('users').insert(data).returning('*'); ``` --- diff --git a/.github/instructions/bugfix.instructions.md b/.github/instructions/bugfix.instructions.md index f1c3e98..abe7eeb 100644 --- a/.github/instructions/bugfix.instructions.md +++ b/.github/instructions/bugfix.instructions.md @@ -66,14 +66,14 @@ pool: { min: 2, max: 10 } // MongoDB - Is the filter format correct? { status: { - $eq: "active"; + $eq: 'active'; } } // PostgreSQL - Are operators translated? { status: { - eq: "active"; + eq: 'active'; } } // → .where('status', '=', 'active') @@ -92,7 +92,7 @@ if (this.softDelete) { ```typescript // Is session/transaction passed to all operations? await model.create([data], { session }); // MongoDB -await trx("table").insert(data); // PostgreSQL +await trx('table').insert(data); // PostgreSQL // Is rollback called on error? try { @@ -149,10 +149,10 @@ await this.hooks.afterCreate(result); ### Step 1: Create Failing Test ```typescript -describe("Bug #123: Description", () => { - it("should handle the edge case correctly", async () => { +describe('Bug #123: Description', () => { + it('should handle the edge case correctly', async () => { // This test should FAIL initially - const result = await repo.findById(""); + const result = await repo.findById(''); expect(result).toBeNull(); // Currently throws }); }); @@ -270,13 +270,13 @@ async findById(id: string): Promise { ```typescript // BAD -it.skip("should handle edge case", () => { +it.skip('should handle edge case', () => { // "I'll fix this later" }); // GOOD -it("should handle edge case", async () => { - expect(await repo.findById("")).toBeNull(); +it('should handle edge case', async () => { + expect(await repo.findById('')).toBeNull(); }); ``` @@ -307,14 +307,14 @@ async findById(id: string): Promise { ```typescript // In test -it("debug test", async () => { +it('debug test', async () => { // Check actual database state const allRecords = await repo.findAll({}); - console.log("Current records:", allRecords); + console.log('Current records:', allRecords); // Check what query returns - const result = await repo.findById("123"); - console.log("Query result:", result); + const result = await repo.findById('123'); + console.log('Query result:', result); }); ``` @@ -322,11 +322,11 @@ it("debug test", async () => { ```typescript // MongoDB - Enable Mongoose debug -mongoose.set("debug", true); +mongoose.set('debug', true); // PostgreSQL - Knex debug -const knex = require("knex")({ - client: "pg", +const knex = require('knex')({ + client: 'pg', debug: true, // ... }); diff --git a/.github/instructions/features.instructions.md b/.github/instructions/features.instructions.md index 547a133..f305e22 100644 --- a/.github/instructions/features.instructions.md +++ b/.github/instructions/features.instructions.md @@ -95,12 +95,12 @@ class PostgresRepository implements Repository { ```typescript // src/adapters/mongo.adapter.spec.ts -describe("newMethod", () => { - it("should perform expected behavior", async () => { +describe('newMethod', () => { + it('should perform expected behavior', async () => { // Test implementation }); - it("should handle edge cases", async () => { + it('should handle edge cases', async () => { // Edge case tests }); }); @@ -113,7 +113,7 @@ describe("newMethod", () => { export { // ... existing exports NewReturnType, // If you added new types -} from "./contracts/database.contracts"; +} from './contracts/database.contracts'; ``` --- @@ -273,20 +273,20 @@ export function newUtility(input: string): string { ```typescript // src/index.ts -export { newUtility } from "./utils/new.utils"; +export { newUtility } from './utils/new.utils'; ``` ### Step 3: Add Tests ```typescript // src/utils/new.utils.spec.ts -describe("newUtility", () => { - it("should transform input correctly", () => { - expect(newUtility("input")).toBe("expected"); +describe('newUtility', () => { + it('should transform input correctly', () => { + expect(newUtility('input')).toBe('expected'); }); - it("should handle edge cases", () => { - expect(newUtility("")).toBe(""); + it('should handle edge cases', () => { + expect(newUtility('')).toBe(''); expect(newUtility(null as any)).toBeNull(); }); }); diff --git a/.github/instructions/general.instructions.md b/.github/instructions/general.instructions.md index 7e9a8af..a0816c0 100644 --- a/.github/instructions/general.instructions.md +++ b/.github/instructions/general.instructions.md @@ -148,16 +148,16 @@ constructor(@Inject(DATABASE_TOKEN) private db: DatabaseService) {} ```typescript // ✅ Use specific NestJS exceptions -throw new NotFoundException("User not found"); -throw new BadRequestException("Invalid input"); -throw new ConflictException("Email already exists"); -throw new InternalServerErrorException("Database error"); +throw new NotFoundException('User not found'); +throw new BadRequestException('Invalid input'); +throw new ConflictException('Email already exists'); +throw new InternalServerErrorException('Database error'); // ✅ Log errors with context try { await this.operation(); } catch (error) { - this.logger.error("Operation failed", error); + this.logger.error('Operation failed', error); throw error; } @@ -175,11 +175,11 @@ try { // ✅ Environment-driven configuration const uri = process.env.MONGO_URI; if (!uri) { - throw new Error("MONGO_URI not configured"); + throw new Error('MONGO_URI not configured'); } // ❌ Never hardcode values -const uri = "mongodb://localhost:27017/mydb"; +const uri = 'mongodb://localhost:27017/mydb'; ``` ### 4. Type Safety @@ -235,7 +235,7 @@ export class UserService { async getUser(id: string): Promise { const user = await this.users.findById(id); if (!user) { - throw new NotFoundException("User not found"); + throw new NotFoundException('User not found'); } return user; } @@ -253,7 +253,7 @@ export class UserService { class MongoAdapter { async createUser(data: CreateUserDto) { if (await this.exists({ email: data.email })) { - throw new ConflictException("Email exists"); // Business logic! + throw new ConflictException('Email exists'); // Business logic! } return this.model.create(data); } @@ -275,19 +275,19 @@ const poolSize = 10; const timeout = 5000; // ✅ GOOD -const poolSize = parseInt(process.env.POOL_SIZE || "10", 10); -const timeout = parseInt(process.env.TIMEOUT || "5000", 10); +const poolSize = parseInt(process.env.POOL_SIZE || '10', 10); +const timeout = parseInt(process.env.TIMEOUT || '5000', 10); ``` ### 3. Leaking Internal Types ```typescript // ❌ BAD - Exporting internal implementation -export { MongoAdapter } from "./adapters/mongo.adapter"; +export { MongoAdapter } from './adapters/mongo.adapter'; // ✅ GOOD - Only export public API -export { DatabaseService } from "./services/database.service"; -export { Repository } from "./contracts/database.contracts"; +export { DatabaseService } from './services/database.service'; +export { Repository } from './contracts/database.contracts'; ``` ### 4. Direct Model Access in Services @@ -323,7 +323,7 @@ export class UserService { ### Test Structure ```typescript -describe("DatabaseService", () => { +describe('DatabaseService', () => { let service: DatabaseService; let mockAdapter: jest.Mocked; @@ -343,8 +343,8 @@ describe("DatabaseService", () => { service = module.get(DatabaseService); }); - describe("connect", () => { - it("should connect to database", async () => { + describe('connect', () => { + it('should connect to database', async () => { await service.connect(); expect(mockAdapter.connect).toHaveBeenCalled(); }); @@ -362,34 +362,34 @@ describe("DatabaseService", () => { // index.ts - Only these should be exported // Module (primary) -export { DatabaseKitModule } from "./database-kit.module"; +export { DatabaseKitModule } from './database-kit.module'; // Services (for direct injection) -export { DatabaseService } from "./services/database.service"; +export { DatabaseService } from './services/database.service'; // Decorators (for DI) -export { InjectDatabase } from "./middleware/database.decorators"; +export { InjectDatabase } from './middleware/database.decorators'; // Filters (for app-wide use) -export { DatabaseExceptionFilter } from "./filters/database-exception.filter"; +export { DatabaseExceptionFilter } from './filters/database-exception.filter'; // Types (for consumers) export { Repository, PageResult, DatabaseConfig, -} from "./contracts/database.contracts"; +} from './contracts/database.contracts'; // Utilities (for convenience) -export { isValidMongoId } from "./utils/validation.utils"; +export { isValidMongoId } from './utils/validation.utils'; ``` ### ❌ DON'T Export ```typescript // These should NOT be in index.ts -export { MongoAdapter } from "./adapters/mongo.adapter"; // Internal -export { PostgresAdapter } from "./adapters/postgres.adapter"; // Internal +export { MongoAdapter } from './adapters/mongo.adapter'; // Internal +export { PostgresAdapter } from './adapters/postgres.adapter'; // Internal ``` --- diff --git a/.github/instructions/sonarqube_mcp.instructions.md b/.github/instructions/sonarqube_mcp.instructions.md new file mode 100644 index 0000000..1e17f37 --- /dev/null +++ b/.github/instructions/sonarqube_mcp.instructions.md @@ -0,0 +1,50 @@ +--- +applyTo: '**/*' +--- + +These are some guidelines when using the SonarQube MCP server. + +# Important Tool Guidelines + +## Basic usage + +- **IMPORTANT**: After you finish generating or modifying any code files at the very end of the task, you MUST call the `analyze_file_list` tool (if it exists) to analyze the files you created or modified. +- **IMPORTANT**: When starting a new task, you MUST disable automatic analysis with the `toggle_automatic_analysis` tool if it exists. +- **IMPORTANT**: When you are done generating code at the very end of the task, you MUST re-enable automatic analysis with the `toggle_automatic_analysis` tool if it exists. + +## Project Keys + +- When a user mentions a project key, use `search_my_sonarqube_projects` first to find the exact project key +- Don't guess project keys - always look them up + +## Code Language Detection + +- When analyzing code snippets, try to detect the programming language from the code syntax +- If unclear, ask the user or make an educated guess based on syntax + +## Branch and Pull Request Context + +- Many operations support branch-specific analysis +- If user mentions working on a feature branch, include the branch parameter + +## Code Issues and Violations + +- After fixing issues, do not attempt to verify them using `search_sonar_issues_in_projects`, as the server will not yet reflect the updates + +# Common Troubleshooting + +## Authentication Issues + +- SonarQube requires USER tokens (not project tokens) +- When the error `SonarQube answered with Not authorized` occurs, verify the token type + +## Project Not Found + +- Use `search_my_sonarqube_projects` to find available projects +- Verify project key spelling and format + +## Code Analysis Issues + +- Ensure programming language is correctly specified +- Remind users that snippet analysis doesn't replace full project scans +- Provide full file content for better analysis results diff --git a/.github/instructions/testing.instructions.md b/.github/instructions/testing.instructions.md index 83d3437..de3d474 100644 --- a/.github/instructions/testing.instructions.md +++ b/.github/instructions/testing.instructions.md @@ -52,9 +52,9 @@ src/ ### Standard Test Template ```typescript -import { Test, TestingModule } from "@nestjs/testing"; +import { Test, TestingModule } from '@nestjs/testing'; -describe("ClassName", () => { +describe('ClassName', () => { let instance: ClassName; let mockDependency: jest.Mocked; @@ -79,8 +79,8 @@ describe("ClassName", () => { jest.clearAllMocks(); }); - describe("methodName", () => { - it("should do expected behavior", async () => { + describe('methodName', () => { + it('should do expected behavior', async () => { // Arrange mockDependency.method.mockResolvedValue(expectedData); @@ -92,12 +92,12 @@ describe("ClassName", () => { expect(mockDependency.method).toHaveBeenCalledWith(expectedArgs); }); - it("should throw when condition fails", async () => { + it('should throw when condition fails', async () => { // Arrange - mockDependency.method.mockRejectedValue(new Error("fail")); + mockDependency.method.mockRejectedValue(new Error('fail')); // Act & Assert - await expect(instance.methodName(input)).rejects.toThrow("fail"); + await expect(instance.methodName(input)).rejects.toThrow('fail'); }); }); }); @@ -196,27 +196,27 @@ const mockDatabaseService = { ### Repository Methods ```typescript -describe("Repository", () => { - describe("create", () => { - it("should create and return entity"); - it("should set createdAt when timestamps enabled"); - it("should call beforeCreate hook"); - it("should call afterCreate hook"); - it("should throw on duplicate key"); +describe('Repository', () => { + describe('create', () => { + it('should create and return entity'); + it('should set createdAt when timestamps enabled'); + it('should call beforeCreate hook'); + it('should call afterCreate hook'); + it('should throw on duplicate key'); }); - describe("findById", () => { - it("should return entity when found"); - it("should return null when not found"); - it("should exclude soft-deleted records"); + describe('findById', () => { + it('should return entity when found'); + it('should return null when not found'); + it('should exclude soft-deleted records'); }); - describe("findPage", () => { - it("should return paginated results"); - it("should apply default page and limit"); - it("should apply sorting"); - it("should apply filters"); - it("should calculate total pages correctly"); + describe('findPage', () => { + it('should return paginated results'); + it('should apply default page and limit'); + it('should apply sorting'); + it('should apply filters'); + it('should calculate total pages correctly'); }); // ... test all 20+ methods @@ -226,26 +226,26 @@ describe("Repository", () => { ### Error Scenarios ```typescript -describe("Error Handling", () => { - it("should throw NotFoundException when entity not found"); - it("should throw ConflictException on duplicate"); - it("should throw BadRequestException on invalid input"); - it("should handle database connection errors"); - it("should rollback transaction on error"); +describe('Error Handling', () => { + it('should throw NotFoundException when entity not found'); + it('should throw ConflictException on duplicate'); + it('should throw BadRequestException on invalid input'); + it('should handle database connection errors'); + it('should rollback transaction on error'); }); ``` ### Edge Cases ```typescript -describe("Edge Cases", () => { - it("should handle empty array for insertMany"); - it("should handle empty filter for findAll"); - it("should handle page 0 (treat as page 1)"); - it("should handle negative limit"); - it("should handle very large page numbers"); - it("should handle special characters in filters"); - it("should handle null values correctly"); +describe('Edge Cases', () => { + it('should handle empty array for insertMany'); + it('should handle empty filter for findAll'); + it('should handle page 0 (treat as page 1)'); + it('should handle negative limit'); + it('should handle very large page numbers'); + it('should handle special characters in filters'); + it('should handle null values correctly'); }); ``` @@ -254,30 +254,30 @@ describe("Edge Cases", () => { ## 🔄 Transaction Testing ```typescript -describe("Transactions", () => { - it("should commit on success", async () => { +describe('Transactions', () => { + it('should commit on success', async () => { const result = await adapter.withTransaction(async (ctx) => { const repo = ctx.createRepository({ model }); - return repo.create({ name: "test" }); + return repo.create({ name: 'test' }); }); expect(result).toBeDefined(); }); - it("should rollback on error", async () => { + it('should rollback on error', async () => { await expect( adapter.withTransaction(async (ctx) => { const repo = ctx.createRepository({ model }); - await repo.create({ name: "test" }); - throw new Error("Intentional failure"); + await repo.create({ name: 'test' }); + throw new Error('Intentional failure'); }), - ).rejects.toThrow("Intentional failure"); + ).rejects.toThrow('Intentional failure'); // Verify rollback - entity should not exist const count = await adapter.createRepository({ model }).count({}); expect(count).toBe(0); }); - it("should retry on transient errors", async () => { + it('should retry on transient errors', async () => { // Test retry logic }); }); @@ -288,8 +288,8 @@ describe("Transactions", () => { ## 🪝 Hook Testing ```typescript -describe("Hooks", () => { - it("should call beforeCreate and modify data", async () => { +describe('Hooks', () => { + it('should call beforeCreate and modify data', async () => { const beforeCreate = jest.fn((ctx) => ({ ...ctx.data, normalized: true, @@ -300,23 +300,23 @@ describe("Hooks", () => { hooks: { beforeCreate }, }); - const result = await repo.create({ name: "test" }); + const result = await repo.create({ name: 'test' }); expect(beforeCreate).toHaveBeenCalled(); expect(result.normalized).toBe(true); }); - it("should call afterCreate with created entity", async () => { + it('should call afterCreate with created entity', async () => { const afterCreate = jest.fn(); const repo = adapter.createRepository({ model, hooks: { afterCreate }, }); - await repo.create({ name: "test" }); + await repo.create({ name: 'test' }); expect(afterCreate).toHaveBeenCalledWith( - expect.objectContaining({ name: "test" }), + expect.objectContaining({ name: 'test' }), ); }); @@ -357,10 +357,10 @@ npm test -- --verbose ```typescript // Pattern: should [expected behavior] when [condition] -it("should return null when entity not found"); -it("should throw NotFoundException when id is invalid"); -it("should set updatedAt when updating entity"); -it("should exclude soft-deleted records when softDelete enabled"); +it('should return null when entity not found'); +it('should throw NotFoundException when id is invalid'); +it('should set updatedAt when updating entity'); +it('should exclude soft-deleted records when softDelete enabled'); ``` --- @@ -382,10 +382,10 @@ expect(result).toEqual(expectedEntity); ```typescript // BAD - Shared mutable state let counter = 0; -it("test 1", () => { +it('test 1', () => { counter++; }); -it("test 2", () => { +it('test 2', () => { expect(counter).toBe(1); }); // Fragile! @@ -399,14 +399,14 @@ beforeEach(() => { ```typescript // BAD - Missing await -it("should create", () => { - repo.create({ name: "test" }); // Promise not awaited! +it('should create', () => { + repo.create({ name: 'test' }); // Promise not awaited! expect(mock).toHaveBeenCalled(); // May fail randomly }); // GOOD -it("should create", async () => { - await repo.create({ name: "test" }); +it('should create', async () => { + await repo.create({ name: 'test' }); expect(mock).toHaveBeenCalled(); }); ``` @@ -418,11 +418,11 @@ it("should create", async () => { ```javascript // jest.config.js module.exports = { - preset: "ts-jest", - testEnvironment: "node", - roots: ["/src"], - testMatch: ["**/*.spec.ts"], - collectCoverageFrom: ["src/**/*.ts", "!src/**/*.spec.ts", "!src/index.ts"], + preset: 'ts-jest', + testEnvironment: 'node', + roots: ['/src'], + testMatch: ['**/*.spec.ts'], + collectCoverageFrom: ['src/**/*.ts', '!src/**/*.spec.ts', '!src/index.ts'], coverageThreshold: { global: { branches: 75, diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..bfecdcc --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,57 @@ +# Summary + +- What does this PR change? + +## Why + +- Why is this change needed? +- Does this address a specific issue? + +## Type of Change + +- [ ] 🐛 Bug fix (non-breaking) +- [ ] ✨ New feature (non-breaking) +- [ ] 🔄 Refactor (no behavior change) +- [ ] 📚 Documentation +- [ ] 🔐 Security improvement +- [ ] 💥 Breaking change + +## Testing + +- [ ] Added unit tests +- [ ] Added integration tests +- [ ] Tested locally with both MongoDB and PostgreSQL adapters +- [ ] Tested with real connection pooling + +## Checklist + +- [ ] `npm run lint` passes +- [ ] `npm run format` passes +- [ ] `npm run typecheck` passes +- [ ] `npm test` passes +- [ ] `npm run test:cov` maintains or improves coverage (>80%) +- [ ] `npm run build` passes +- [ ] Added a changeset (`npx changeset`) if this affects consumers +- [ ] Updated README if adding new features +- [ ] Updated JSDoc/TSDoc if changing public APIs +- [ ] No hardcoded credentials or sensitive data + +## Database Testing + +- [ ] Tested with MongoDB adapter +- [ ] Tested with PostgreSQL adapter +- [ ] Tested with connection pooling enabled +- [ ] Verified error handling and sanitization + +## Security + +- [ ] No parameterized query vulnerabilities +- [ ] No exposed connection strings +- [ ] Error messages sanitized (no internal details) +- [ ] Dependencies audited (`npm audit`) + +## Notes + +- Anything reviewers should pay attention to? +- Any known limitations? +- Any follow-up tasks needed? diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 57fb5bb..45a6707 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -3,7 +3,7 @@ name: Publish to NPM on: push: tags: - - "v*.*.*" + - 'v*.*.*' workflow_dispatch: jobs: @@ -21,8 +21,8 @@ jobs: - name: Setup Node.js uses: actions/setup-node@v4 with: - node-version: "20" - registry-url: "https://registry.npmjs.org" + node-version: '20' + registry-url: 'https://registry.npmjs.org' - name: Install dependencies run: npm ci diff --git a/.github/workflows/release-check.yml b/.github/workflows/release-check.yml index f95ff11..fff5acf 100644 --- a/.github/workflows/release-check.yml +++ b/.github/workflows/release-check.yml @@ -6,13 +6,13 @@ on: workflow_dispatch: inputs: sonar: - description: "Run SonarCloud analysis" + description: 'Run SonarCloud analysis' required: true - default: "false" + default: 'false' type: choice options: - - "false" - - "true" + - 'false' + - 'true' concurrency: group: ci-release-${{ github.ref }} @@ -24,11 +24,14 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 25 + permissions: + contents: read + # Config stays in the workflow file (token stays in repo secrets) env: - SONAR_HOST_URL: "https://sonarcloud.io" - SONAR_ORGANIZATION: "ciscode" - SONAR_PROJECT_KEY: "CISCODE-MA_DatabaseKit" + SONAR_HOST_URL: 'https://sonarcloud.io' + SONAR_ORGANIZATION: 'ciscode' + SONAR_PROJECT_KEY: 'CISCODE-MA_DatabaseKit' steps: - name: Checkout @@ -39,12 +42,15 @@ jobs: - name: Setup Node uses: actions/setup-node@v4 with: - node-version: "22" - cache: "npm" + node-version: '22' + cache: 'npm' - name: Install run: npm ci + - name: Audit + run: npm audit --omit=dev + - name: Format run: npm run format diff --git a/.husky/pre-commit b/.husky/pre-commit index d24fdfc..2312dc5 100755 --- a/.husky/pre-commit +++ b/.husky/pre-commit @@ -1,4 +1 @@ -#!/usr/bin/env sh -. "$(dirname -- "$0")/_/husky.sh" - npx lint-staged diff --git a/.husky/pre-push b/.husky/pre-push index 8ddb6b0..bfe23ec 100755 --- a/.husky/pre-push +++ b/.husky/pre-push @@ -1,2 +1 @@ -npm run typecheck -npm test +npm run typecheck && npm run test diff --git a/.npmignore b/.npmignore new file mode 100644 index 0000000..5e5a732 --- /dev/null +++ b/.npmignore @@ -0,0 +1,64 @@ +# ============================================================================= +# NPM Ignore - Prevent shipping unnecessary files to npm registry +# ============================================================================= + +# Source files (only dist/ is needed) +src/ +test/ +*.spec.ts +*.test.ts + +# Configuration files (not needed for consumers) +jest.config.ts +jest.config.js +tsconfig.json +tsconfig.eslint.json +eslint.config.mjs +eslint.config.js +prettier.config.js +.prettierignore +.prettierrc + +# Environment and secrets (CRITICAL) +.env +.env.* +!.env.example +*.pem +*.key +*.crt +*.secret + +# Build and test artifacts +dist/ +build/ +out/ +coverage/ +.nyc_output/ +.coverage/ +*.tsbuildinfo + +# Development files +.husky/ +.git/ +.github/ +.vscode/ +.idea/ +.DS_Store + +# Dependencies and package managers +node_modules/ +npm-debug.log +yarn-error.log +package-lock.json +yarn.lock +pnpm-lock.yaml + +# Documentation (optional, keep if valuable) +# docs/ +# CHANGELOG.md kept by default (files array in package.json controls this) + +# Other +.changeset/ +.turbo/ +.env.example + diff --git a/CHANGELOG.md b/CHANGELOG.md index b6adbbd..9f79c22 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -201,10 +201,10 @@ If you were using a pre-release version, follow these steps: ```typescript // Before - import { Database } from "@ciscode/database-kit/core/database"; + import { Database } from '@ciscode/database-kit/core/database'; // After - import { DatabaseService } from "@ciscode/database-kit"; + import { DatabaseService } from '@ciscode/database-kit'; ``` 2. **Update module configuration:** diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index dd63def..60b8b76 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -165,7 +165,7 @@ chore: update dependencies - **Constants:** `UPPER_SNAKE_CASE` ```typescript - export const DATABASE_TOKEN = "DATABASE_KIT_DEFAULT"; + export const DATABASE_TOKEN = 'DATABASE_KIT_DEFAULT'; export const DEFAULT_PAGE_SIZE = 10; ``` @@ -202,14 +202,14 @@ function parseData(input: any): any { ```typescript // ✅ DO: Use specific NestJS exceptions if (!user) { - throw new NotFoundException("User not found"); + throw new NotFoundException('User not found'); } // ✅ DO: Log errors with context try { await this.repo.create(data); } catch (error) { - this.logger.error("Failed to create user", error); + this.logger.error('Failed to create user', error); throw error; } @@ -226,10 +226,10 @@ try { ```typescript // ✅ DO: Use environment variables const uri = process.env.MONGO_URI; -if (!uri) throw new Error("MONGO_URI not configured"); +if (!uri) throw new Error('MONGO_URI not configured'); // ❌ DON'T: Hardcode values -const uri = "mongodb://localhost:27017/mydb"; +const uri = 'mongodb://localhost:27017/mydb'; ``` --- @@ -252,10 +252,10 @@ src/services/database.service.spec.ts ### Test Structure ```typescript -import { Test, TestingModule } from "@nestjs/testing"; -import { DatabaseService } from "./database.service"; +import { Test, TestingModule } from '@nestjs/testing'; +import { DatabaseService } from './database.service'; -describe("DatabaseService", () => { +describe('DatabaseService', () => { let service: DatabaseService; beforeEach(async () => { @@ -268,14 +268,14 @@ describe("DatabaseService", () => { service = module.get(DatabaseService); }); - describe("connect", () => { - it("should connect to MongoDB", async () => { + describe('connect', () => { + it('should connect to MongoDB', async () => { // Arrange // Act // Assert }); - it("should throw on invalid connection string", async () => { + it('should throw on invalid connection string', async () => { // ... }); }); diff --git a/README.md b/README.md index 2af7635..93745e1 100644 --- a/README.md +++ b/README.md @@ -43,8 +43,8 @@ A NestJS-friendly, OOP-style database library providing a unified repository API Every repository (MongoDB or PostgreSQL) implements the **same interface**: ```typescript -const user = await repo.create({ name: "John" }); // Works on both! -const found = await repo.findById("123"); // Works on both! +const user = await repo.create({ name: 'John' }); // Works on both! +const found = await repo.findById('123'); // Works on both! const page = await repo.findPage({ page: 1 }); // Works on both! ``` @@ -114,14 +114,14 @@ npm install pg knex ```typescript // app.module.ts -import { Module } from "@nestjs/common"; -import { DatabaseKitModule } from "@ciscode/database-kit"; +import { Module } from '@nestjs/common'; +import { DatabaseKitModule } from '@ciscode/database-kit'; @Module({ imports: [ DatabaseKitModule.forRoot({ config: { - type: "mongo", // or 'postgres' + type: 'mongo', // or 'postgres' connectionString: process.env.MONGO_URI!, }, }), @@ -134,13 +134,13 @@ export class AppModule {} ```typescript // users.service.ts -import { Injectable } from "@nestjs/common"; +import { Injectable } from '@nestjs/common'; import { InjectDatabase, DatabaseService, Repository, -} from "@ciscode/database-kit"; -import { UserModel } from "./user.model"; +} from '@ciscode/database-kit'; +import { UserModel } from './user.model'; interface User { _id: string; @@ -162,11 +162,11 @@ export class UsersService { hooks: { // Lifecycle hooks beforeCreate: (ctx) => { - console.log("Creating user:", ctx.data); + console.log('Creating user:', ctx.data); return ctx.data; // Can modify data }, afterCreate: (user) => { - console.log("User created:", user._id); + console.log('User created:', user._id); }, }, }); @@ -190,7 +190,7 @@ export class UsersService { return this.usersRepo.findPage({ page, limit, - sort: "-createdAt", + sort: '-createdAt', }); } @@ -221,12 +221,12 @@ export class UsersService { // DISTINCT VALUES async getUniqueEmails(): Promise { - return this.usersRepo.distinct("email"); + return this.usersRepo.distinct('email'); } // SELECT SPECIFIC FIELDS - async getUserNames(): Promise[]> { - return this.usersRepo.select({}, ["name", "email"]); + async getUserNames(): Promise[]> { + return this.usersRepo.select({}, ['name', 'email']); } } ``` @@ -290,7 +290,7 @@ const result = await db.getMongoAdapter().withTransaction( const userRepo = ctx.createRepository({ model: UserModel }); const orderRepo = ctx.createRepository({ model: OrderModel }); - const user = await userRepo.create({ name: "John" }); + const user = await userRepo.create({ name: 'John' }); const order = await orderRepo.create({ userId: user._id, total: 99.99 }); return { user, order }; @@ -304,16 +304,16 @@ const result = await db.getMongoAdapter().withTransaction( // PostgreSQL Transaction const result = await db.getPostgresAdapter().withTransaction( async (ctx) => { - const userRepo = ctx.createRepository({ table: "users" }); - const orderRepo = ctx.createRepository({ table: "orders" }); + const userRepo = ctx.createRepository({ table: 'users' }); + const orderRepo = ctx.createRepository({ table: 'orders' }); - const user = await userRepo.create({ name: "John" }); + const user = await userRepo.create({ name: 'John' }); const order = await orderRepo.create({ user_id: user.id, total: 99.99 }); return { user, order }; }, { - isolationLevel: "serializable", + isolationLevel: 'serializable', }, ); ``` @@ -328,7 +328,7 @@ const repo = db.createMongoRepository({ hooks: { // Before create - can modify data beforeCreate: (context) => { - console.log("Creating:", context.data); + console.log('Creating:', context.data); return { ...context.data, normalizedEmail: context.data.email?.toLowerCase(), @@ -342,7 +342,7 @@ const repo = db.createMongoRepository({ // Before update - can modify data beforeUpdate: (context) => { - return { ...context.data, updatedBy: "system" }; + return { ...context.data, updatedBy: 'system' }; }, // After update @@ -352,12 +352,12 @@ const repo = db.createMongoRepository({ // Before delete - for validation beforeDelete: (id) => { - console.log("Deleting user:", id); + console.log('Deleting user:', id); }, // After delete afterDelete: (success) => { - if (success) console.log("User deleted"); + if (success) console.log('User deleted'); }, }, }); @@ -371,7 +371,7 @@ Fine-tune database connection pooling: // MongoDB DatabaseKitModule.forRoot({ config: { - type: "mongo", + type: 'mongo', connectionString: process.env.MONGO_URI!, pool: { min: 5, @@ -388,7 +388,7 @@ DatabaseKitModule.forRoot({ // PostgreSQL DatabaseKitModule.forRoot({ config: { - type: "postgres", + type: 'postgres', connectionString: process.env.DATABASE_URL!, pool: { min: 2, @@ -405,7 +405,7 @@ DatabaseKitModule.forRoot({ Monitor database health in production: ```typescript -@Controller("health") +@Controller('health') export class HealthController { constructor(@InjectDatabase() private readonly db: DatabaseService) {} @@ -425,7 +425,7 @@ export class HealthController { // } return { - status: mongoHealth.healthy ? "healthy" : "unhealthy", + status: mongoHealth.healthy ? 'healthy' : 'unhealthy', database: mongoHealth, }; } @@ -440,11 +440,11 @@ Non-destructive deletion with restore capability: const repo = db.createMongoRepository({ model: UserModel, softDelete: true, // Enable soft delete - softDeleteField: "deletedAt", // Default field name + softDeleteField: 'deletedAt', // Default field name }); // "Delete" - sets deletedAt timestamp -await repo.deleteById("123"); +await repo.deleteById('123'); // Regular queries exclude deleted records await repo.findAll(); // Only non-deleted users @@ -453,7 +453,7 @@ await repo.findAll(); // Only non-deleted users await repo.findWithDeleted!(); // All users including deleted // Restore a deleted record -await repo.restore!("123"); +await repo.restore!('123'); ``` ### Timestamps @@ -464,16 +464,16 @@ Automatic created/updated tracking: const repo = db.createMongoRepository({ model: UserModel, timestamps: true, // Enable timestamps - createdAtField: "createdAt", // Default - updatedAtField: "updatedAt", // Default + createdAtField: 'createdAt', // Default + updatedAtField: 'updatedAt', // Default }); // create() automatically sets createdAt -const user = await repo.create({ name: "John" }); +const user = await repo.create({ name: 'John' }); // user.createdAt = 2026-02-01T12:00:00.000Z // updateById() automatically sets updatedAt -await repo.updateById(user._id, { name: "Johnny" }); +await repo.updateById(user._id, { name: 'Johnny' }); // user.updatedAt = 2026-02-01T12:01:00.000Z ``` @@ -488,7 +488,7 @@ Standard MongoDB query syntax: ```typescript await repo.findAll({ age: { $gte: 18, $lt: 65 }, - status: { $in: ["active", "pending"] }, + status: { $in: ['active', 'pending'] }, name: { $regex: /john/i }, }); ``` @@ -501,18 +501,18 @@ Structured query operators: // Comparison await repo.findAll({ price: { gt: 100, lte: 500 }, // > 100 AND <= 500 - status: { ne: "cancelled" }, // != 'cancelled' + status: { ne: 'cancelled' }, // != 'cancelled' }); // IN / NOT IN await repo.findAll({ - category: { in: ["electronics", "books"] }, - brand: { nin: ["unknown"] }, + category: { in: ['electronics', 'books'] }, + brand: { nin: ['unknown'] }, }); // LIKE (case-insensitive) await repo.findAll({ - name: { like: "%widget%" }, + name: { like: '%widget%' }, }); // NULL checks @@ -523,7 +523,7 @@ await repo.findAll({ // Sorting await repo.findPage({ - sort: "-created_at,name", // DESC created_at, ASC name + sort: '-created_at,name', // DESC created_at, ASC name // or: { created_at: -1, name: 1 } }); ``` @@ -546,17 +546,17 @@ await repo.findPage({ ### Async Configuration (Recommended) ```typescript -import { ConfigModule, ConfigService } from "@nestjs/config"; +import { ConfigModule, ConfigService } from '@nestjs/config'; DatabaseKitModule.forRootAsync({ imports: [ConfigModule], useFactory: (config: ConfigService) => ({ config: { - type: config.get("DATABASE_TYPE") as "mongo" | "postgres", - connectionString: config.get("DATABASE_URL")!, + type: config.get('DATABASE_TYPE') as 'mongo' | 'postgres', + connectionString: config.get('DATABASE_URL')!, pool: { - min: config.get("DATABASE_POOL_MIN", 0), - max: config.get("DATABASE_POOL_MAX", 10), + min: config.get('DATABASE_POOL_MIN', 0), + max: config.get('DATABASE_POOL_MAX', 10), }, }, }), @@ -571,11 +571,11 @@ DatabaseKitModule.forRootAsync({ imports: [ // Primary database DatabaseKitModule.forRoot({ - config: { type: "mongo", connectionString: process.env.MONGO_URI! }, + config: { type: 'mongo', connectionString: process.env.MONGO_URI! }, }), // Analytics database (PostgreSQL) - DatabaseKitModule.forFeature("ANALYTICS_DB", { - type: "postgres", + DatabaseKitModule.forFeature('ANALYTICS_DB', { + type: 'postgres', connectionString: process.env.ANALYTICS_DB_URL!, }), ], @@ -586,7 +586,7 @@ export class AppModule {} @Injectable() export class AnalyticsService { constructor( - @InjectDatabaseByToken("ANALYTICS_DB") + @InjectDatabaseByToken('ANALYTICS_DB') private readonly analyticsDb: DatabaseService, ) {} } @@ -600,7 +600,7 @@ export class AnalyticsService { ```typescript // main.ts -import { DatabaseExceptionFilter } from "@ciscode/database-kit"; +import { DatabaseExceptionFilter } from '@ciscode/database-kit'; app.useGlobalFilters(new DatabaseExceptionFilter()); ``` @@ -629,12 +629,12 @@ import { parseSortString, calculateOffset, createPageResult, -} from "@ciscode/database-kit"; +} from '@ciscode/database-kit'; const normalized = normalizePaginationOptions({ page: 1 }); // { page: 1, limit: 10, filter: {}, sort: undefined } -const sortObj = parseSortString("-createdAt,name"); +const sortObj = parseSortString('-createdAt,name'); // { createdAt: -1, name: 1 } const offset = calculateOffset(2, 10); // 10 @@ -649,16 +649,16 @@ import { sanitizeFilter, pickFields, omitFields, -} from "@ciscode/database-kit"; +} from '@ciscode/database-kit'; -isValidMongoId("507f1f77bcf86cd799439011"); // true -isValidUuid("550e8400-e29b-41d4-a716-446655440000"); // true +isValidMongoId('507f1f77bcf86cd799439011'); // true +isValidUuid('550e8400-e29b-41d4-a716-446655440000'); // true -const clean = sanitizeFilter({ name: "John", age: undefined }); +const clean = sanitizeFilter({ name: 'John', age: undefined }); // { name: 'John' } -const picked = pickFields(user, ["name", "email"]); -const safe = omitFields(user, ["password", "secret"]); +const picked = pickFields(user, ['name', 'email']); +const safe = omitFields(user, ['password', 'secret']); ``` --- @@ -679,17 +679,17 @@ npm test -- --testPathPattern=mongo.adapter.spec ### Mocking in Tests ```typescript -import { Test } from "@nestjs/testing"; -import { DATABASE_TOKEN } from "@ciscode/database-kit"; +import { Test } from '@nestjs/testing'; +import { DATABASE_TOKEN } from '@ciscode/database-kit'; const mockRepository = { - create: jest.fn().mockResolvedValue({ id: "1", name: "Test" }), - findById: jest.fn().mockResolvedValue({ id: "1", name: "Test" }), + create: jest.fn().mockResolvedValue({ id: '1', name: 'Test' }), + findById: jest.fn().mockResolvedValue({ id: '1', name: 'Test' }), findAll: jest.fn().mockResolvedValue([]), findPage: jest .fn() .mockResolvedValue({ data: [], total: 0, page: 1, limit: 10, pages: 0 }), - updateById: jest.fn().mockResolvedValue({ id: "1", name: "Updated" }), + updateById: jest.fn().mockResolvedValue({ id: '1', name: 'Updated' }), deleteById: jest.fn().mockResolvedValue(true), }; diff --git a/SECURITY.md b/SECURITY.md index d65cf88..170013f 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -62,14 +62,14 @@ Please provide as much information as possible: ```typescript // ✅ DO: Use environment variables const config = { - type: "postgres", + type: 'postgres', connectionString: process.env.DATABASE_URL, }; // ❌ DON'T: Hardcode credentials const config = { - type: "postgres", - connectionString: "postgresql://admin:password123@localhost/mydb", + type: 'postgres', + connectionString: 'postgresql://admin:password123@localhost/mydb', }; ``` @@ -103,13 +103,13 @@ await repo.findAll({ name: `%${userInput}%` }); // Risky! ```typescript // ✅ DO: Explicitly whitelist columns const repo = db.createPostgresRepository({ - table: "users", - columns: ["id", "name", "email"], // Only these columns are queryable + table: 'users', + columns: ['id', 'name', 'email'], // Only these columns are queryable }); // ❌ DON'T: Allow all columns (unless necessary) const repo = db.createPostgresRepository({ - table: "users", + table: 'users', columns: [], // Empty = all columns allowed }); ``` @@ -143,7 +143,7 @@ const repo = db.createPostgresRepository({ ```typescript @UseGuards(AuthGuard) - @Controller("users") + @Controller('users') export class UsersController {} ``` diff --git a/TROUBLESHOOTING.md b/TROUBLESHOOTING.md index 4c53db2..169d2cd 100644 --- a/TROUBLESHOOTING.md +++ b/TROUBLESHOOTING.md @@ -177,7 +177,7 @@ Error: Environment variable DATABASE_URL is not configured. ```typescript // In main.ts (before NestJS bootstrap) - import * as dotenv from "dotenv"; + import * as dotenv from 'dotenv'; dotenv.config(); ``` @@ -231,8 +231,8 @@ Add the column to your repository config: ```typescript const repo = db.createPostgresRepository({ - table: "users", - columns: ["id", "name", "email", "secret_column"], // Add here + table: 'users', + columns: ['id', 'name', 'email', 'secret_column'], // Add here }); ``` @@ -248,10 +248,10 @@ CastError: Cast to ObjectId failed for value "invalid-id" Validate IDs before querying: ```typescript -import { isValidMongoId } from "@ciscode/database-kit"; +import { isValidMongoId } from '@ciscode/database-kit'; if (!isValidMongoId(id)) { - throw new BadRequestException("Invalid ID format"); + throw new BadRequestException('Invalid ID format'); } const user = await repo.findById(id); @@ -272,7 +272,7 @@ MongoServerError: E11000 duplicate key error ```typescript const exists = await repo.exists({ email }); if (exists) { - throw new ConflictException("Email already exists"); + throw new ConflictException('Email already exists'); } ``` @@ -483,10 +483,10 @@ When creating an issue, include: Enable debug logging to diagnose issues: ```typescript -import { Logger } from "@nestjs/common"; +import { Logger } from '@nestjs/common'; // Enable all log levels -Logger.overrideLogger(["log", "error", "warn", "debug", "verbose"]); +Logger.overrideLogger(['log', 'error', 'warn', 'debug', 'verbose']); ``` Or set environment variable: diff --git a/eslint.config.js b/eslint.config.js index 5a2fea2..6061e78 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -1,54 +1,54 @@ // @ts-check -import eslint from "@eslint/js"; -import globals from "globals"; -import importPlugin from "eslint-plugin-import"; -import tseslint from "@typescript-eslint/eslint-plugin"; -import tsparser from "@typescript-eslint/parser"; +import eslint from '@eslint/js'; +import globals from 'globals'; +import importPlugin from 'eslint-plugin-import'; +import tseslint from '@typescript-eslint/eslint-plugin'; +import tsparser from '@typescript-eslint/parser'; export default [ - { ignores: ["dist/**", "coverage/**", "node_modules/**"] }, + { ignores: ['dist/**', 'coverage/**', 'node_modules/**'] }, eslint.configs.recommended, // Base TS rules (all TS files) { - files: ["**/*.ts"], + files: ['**/*.ts'], languageOptions: { parser: tsparser, parserOptions: { - project: "./tsconfig.eslint.json", + project: './tsconfig.eslint.json', tsconfigRootDir: import.meta.dirname, - ecmaVersion: "latest", - sourceType: "module", + ecmaVersion: 'latest', + sourceType: 'module', }, globals: { ...globals.node, ...globals.jest }, }, plugins: { - "@typescript-eslint": tseslint, + '@typescript-eslint': tseslint, import: importPlugin, }, rules: { - "no-unused-vars": "off", // Disable base rule to use TypeScript version - "@typescript-eslint/no-unused-vars": [ - "error", + 'no-unused-vars': 'off', // Disable base rule to use TypeScript version + '@typescript-eslint/no-unused-vars': [ + 'error', { - argsIgnorePattern: "^_", - varsIgnorePattern: "^_", - caughtErrorsIgnorePattern: "^_", - destructuredArrayIgnorePattern: "^_", + argsIgnorePattern: '^_', + varsIgnorePattern: '^_', + caughtErrorsIgnorePattern: '^_', + destructuredArrayIgnorePattern: '^_', }, ], - "@typescript-eslint/consistent-type-imports": [ - "error", - { prefer: "type-imports" }, + '@typescript-eslint/consistent-type-imports': [ + 'error', + { prefer: 'type-imports' }, ], - "import/no-duplicates": "error", - "import/order": [ - "error", + 'import/no-duplicates': 'error', + 'import/order': [ + 'error', { - "newlines-between": "always", - alphabetize: { order: "asc", caseInsensitive: true }, + 'newlines-between': 'always', + alphabetize: { order: 'asc', caseInsensitive: true }, }, ], }, @@ -56,17 +56,17 @@ export default [ // Test files { - files: ["**/*.spec.ts", "**/*.test.ts"], + files: ['**/*.spec.ts', '**/*.test.ts'], rules: { - "@typescript-eslint/no-explicit-any": "off", + '@typescript-eslint/no-explicit-any': 'off', }, }, // NestJS Controllers can use constructor injection with no-explicit-any { - files: ["**/*.controller.ts"], + files: ['**/*.controller.ts'], rules: { - "@typescript-eslint/no-explicit-any": "off", + '@typescript-eslint/no-explicit-any': 'off', }, }, ]; diff --git a/eslint.config.mjs b/eslint.config.mjs index 373f1ec..fec458b 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -1,18 +1,18 @@ // eslint.config.mjs - ESLint 9 Flat Config -import eslint from "@eslint/js"; -import tseslint from "typescript-eslint"; -import globals from "globals"; +import eslint from '@eslint/js'; +import tseslint from 'typescript-eslint'; +import globals from 'globals'; export default tseslint.config( // Global ignores { ignores: [ - "dist/", - "node_modules/", - "coverage/", - "*.config.js", - "*.config.mjs", - "**/*.spec.ts", + 'dist/', + 'node_modules/', + 'coverage/', + '*.config.js', + '*.config.mjs', + '**/*.spec.ts', ], }, @@ -24,33 +24,33 @@ export default tseslint.config( // Custom configuration for all TypeScript files { - files: ["src/**/*.ts"], + files: ['src/**/*.ts'], languageOptions: { ecmaVersion: 2022, - sourceType: "module", + sourceType: 'module', globals: { ...globals.node, ...globals.jest, }, parserOptions: { - project: "./tsconfig.json", + project: './tsconfig.json', }, }, rules: { // TypeScript rules - "@typescript-eslint/explicit-function-return-type": "off", - "@typescript-eslint/explicit-module-boundary-types": "off", - "@typescript-eslint/no-explicit-any": "warn", - "@typescript-eslint/no-unused-vars": [ - "error", - { argsIgnorePattern: "^_" }, + '@typescript-eslint/explicit-function-return-type': 'off', + '@typescript-eslint/explicit-module-boundary-types': 'off', + '@typescript-eslint/no-explicit-any': 'warn', + '@typescript-eslint/no-unused-vars': [ + 'error', + { argsIgnorePattern: '^_' }, ], - "@typescript-eslint/no-require-imports": "off", + '@typescript-eslint/no-require-imports': 'off', // General rules - "no-console": "warn", - "prefer-const": "error", - eqeqeq: ["error", "always"], + 'no-console': 'warn', + 'prefer-const': 'error', + eqeqeq: ['error', 'always'], }, }, ); diff --git a/jest.config.ts b/jest.config.ts index c9e7479..3a06a5f 100644 --- a/jest.config.ts +++ b/jest.config.ts @@ -1,25 +1,25 @@ -import type { Config } from "jest"; +import type { Config } from 'jest'; const config: Config = { - testEnvironment: "node", + testEnvironment: 'node', clearMocks: true, testMatch: [ - "/test/**/*.spec.ts", - "/test/**/*.test.ts", - "/src/**/*.spec.ts", + '/test/**/*.spec.ts', + '/test/**/*.test.ts', + '/src/**/*.spec.ts', ], transform: { - "^.+\\.ts$": ["ts-jest", { tsconfig: "tsconfig.json" }], + '^.+\\.ts$': ['ts-jest', { tsconfig: 'tsconfig.json' }], }, moduleNameMapper: { - "^@common/(.*)$": "/src/common/$1", - "^@config/(.*)$": "/src/config/$1", - "^@core/(.*)$": "/src/core/$1", - "^@adapters/(.*)$": "/src/adapters/$1", - "^@controllers/(.*)$": "/src/controllers/$1", + '^@common/(.*)$': '/src/common/$1', + '^@config/(.*)$': '/src/config/$1', + '^@core/(.*)$': '/src/core/$1', + '^@adapters/(.*)$': '/src/adapters/$1', + '^@controllers/(.*)$': '/src/controllers/$1', }, - collectCoverageFrom: ["src/**/*.ts", "!src/**/*.d.ts", "!src/**/index.ts"], - coverageDirectory: "coverage", + collectCoverageFrom: ['src/**/*.ts', '!src/**/*.d.ts', '!src/**/index.ts'], + coverageDirectory: 'coverage', coverageThreshold: { global: { branches: 65, diff --git a/package-lock.json b/package-lock.json index 4645370..228b396 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@ciscode/database-kit", - "version": "1.0.0", + "version": "1.0.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@ciscode/database-kit", - "version": "1.0.0", + "version": "1.0.1", "license": "MIT", "dependencies": { "knex": "^3.1.0", diff --git a/package.json b/package.json index fab6f9c..b17d9f3 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@ciscode/database-kit", - "version": "1.0.0", + "version": "1.0.1", "type": "module", "description": "A NestJS-friendly, OOP-style database library providing a unified repository API for MongoDB and PostgreSQL.", "main": "dist/index.js", @@ -19,16 +19,17 @@ "build": "tsc -p tsconfig.json && tsc-alias -p tsconfig.json", "build:watch": "tsc -w -p tsconfig.json", "clean": "rm -rf dist coverage", - "lint": "eslint 'src/**/*.ts'", - "lint:fix": "eslint 'src/**/*.ts' --fix", + "lint": "eslint src/", + "lint:fix": "eslint src/ --fix", "format": "prettier --check .", "format:write": "prettier --write .", "typecheck": "tsc -p tsconfig.json --noEmit", "test": "jest", "test:watch": "jest --watch", "test:cov": "jest --coverage", - "prepublishOnly": "npm run clean && npm run build", - "prepare": "husky || true" + "verify": "npm run lint && npm run typecheck && npm run test:cov", + "prepublishOnly": "npm run verify && npm run build", + "prepare": "husky install" }, "engines": { "node": ">=18" @@ -94,9 +95,9 @@ }, "lint-staged": { "*.{ts,tsx,js,jsx}": [ - "eslint -c eslint.config.mjs --fix", + "eslint --fix", "prettier --write" ], - "*.{json,md,css}": "prettier --write" + "*.{json,md,yaml,yml}": "prettier --write" } } diff --git a/src/adapters/mongo.adapter.spec.ts b/src/adapters/mongo.adapter.spec.ts index 1da2c89..53051a7 100644 --- a/src/adapters/mongo.adapter.spec.ts +++ b/src/adapters/mongo.adapter.spec.ts @@ -1,12 +1,18 @@ import type { MongoDatabaseConfig, MongoTransactionContext, -} from "../contracts/database.contracts"; +} from '../contracts/database.contracts'; +import { + createMockMongoModel, + createMockMongoDocs, + testSoftDeleteMethods, + testRepositoryMethods, +} from '../test/test.utils'; -import { MongoAdapter } from "./mongo.adapter"; +import { MongoAdapter } from './mongo.adapter'; // Mock mongoose -jest.mock("mongoose", () => { +jest.mock('mongoose', () => { const mockSession = { startTransaction: jest.fn(), commitTransaction: jest.fn().mockResolvedValue(undefined), @@ -28,11 +34,11 @@ jest.mock("mongoose", () => { }; }); -describe("MongoAdapter", () => { +describe('MongoAdapter', () => { let adapter: MongoAdapter; const mockConfig: MongoDatabaseConfig = { - type: "mongo", - connectionString: "mongodb://localhost:27017/testdb", + type: 'mongo', + connectionString: 'mongodb://localhost:27017/testdb', }; beforeEach(() => { @@ -44,22 +50,22 @@ describe("MongoAdapter", () => { await adapter.disconnect(); }); - describe("constructor", () => { - it("should create adapter instance", () => { + describe('constructor', () => { + it('should create adapter instance', () => { expect(adapter).toBeDefined(); expect(adapter).toBeInstanceOf(MongoAdapter); }); }); - describe("isConnected", () => { - it("should return false when not connected", () => { + describe('isConnected', () => { + it('should return false when not connected', () => { expect(adapter.isConnected()).toBe(false); }); }); - describe("connect", () => { - it("should connect to MongoDB", async () => { - const mongoose = await import("mongoose"); + describe('connect', () => { + it('should connect to MongoDB', async () => { + const mongoose = await import('mongoose'); await adapter.connect(); expect(mongoose.connect).toHaveBeenCalledWith( mockConfig.connectionString, @@ -70,95 +76,57 @@ describe("MongoAdapter", () => { ); }); - it("should reuse existing connection", async () => { - const mongoose = await import("mongoose"); + it('should reuse existing connection', async () => { + const mongoose = await import('mongoose'); await adapter.connect(); await adapter.connect(); expect(mongoose.connect).toHaveBeenCalledTimes(1); }); }); - describe("disconnect", () => { - it("should disconnect from MongoDB", async () => { - const mongoose = await import("mongoose"); + describe('disconnect', () => { + it('should disconnect from MongoDB', async () => { + const mongoose = await import('mongoose'); await adapter.connect(); await adapter.disconnect(); expect(mongoose.disconnect).toHaveBeenCalled(); }); }); - describe("createRepository", () => { - it("should create a repository with all CRUD methods", () => { - const mockModel = { - create: jest.fn(), - findById: jest.fn().mockReturnThis(), - find: jest.fn().mockReturnThis(), - findByIdAndUpdate: jest.fn().mockReturnThis(), - findByIdAndDelete: jest.fn().mockReturnThis(), - countDocuments: jest.fn().mockReturnThis(), - exists: jest.fn(), - insertMany: jest.fn(), - updateMany: jest.fn().mockReturnThis(), - deleteMany: jest.fn().mockReturnThis(), - lean: jest.fn().mockReturnThis(), - exec: jest.fn(), - skip: jest.fn().mockReturnThis(), - limit: jest.fn().mockReturnThis(), - sort: jest.fn().mockReturnThis(), - }; + describe('createRepository', () => { + it('should create a repository with all CRUD methods', () => { + const mockModel = createMockMongoModel(); const repo = adapter.createRepository({ model: mockModel }); - expect(repo).toBeDefined(); - expect(typeof repo.create).toBe("function"); - expect(typeof repo.findById).toBe("function"); - expect(typeof repo.findAll).toBe("function"); - expect(typeof repo.findPage).toBe("function"); - expect(typeof repo.updateById).toBe("function"); - expect(typeof repo.deleteById).toBe("function"); - expect(typeof repo.count).toBe("function"); - expect(typeof repo.exists).toBe("function"); - // Bulk operations - expect(typeof repo.insertMany).toBe("function"); - expect(typeof repo.updateMany).toBe("function"); - expect(typeof repo.deleteMany).toBe("function"); + testRepositoryMethods(repo); }); - it("should insertMany documents", async () => { - const mockDocs = [ - { - _id: "1", - name: "John", - toObject: () => ({ _id: "1", name: "John" }), - }, - { - _id: "2", - name: "Jane", - toObject: () => ({ _id: "2", name: "Jane" }), - }, - ]; - const mockModel = { + it('should insertMany documents', async () => { + const mockDocs = createMockMongoDocs([ + { _id: '1', name: 'John' }, + { _id: '2', name: 'Jane' }, + ]); + const mockModel = createMockMongoModel({ insertMany: jest.fn().mockResolvedValue(mockDocs), - }; + }); const repo = adapter.createRepository({ model: mockModel }); const result = await repo.insertMany([ - { name: "John" }, - { name: "Jane" }, + { name: 'John' }, + { name: 'Jane' }, ]); expect(mockModel.insertMany).toHaveBeenCalledWith([ - { name: "John" }, - { name: "Jane" }, + { name: 'John' }, + { name: 'Jane' }, ]); expect(result).toHaveLength(2); - expect(result[0]).toEqual({ _id: "1", name: "John" }); + expect(result[0]).toEqual({ _id: '1', name: 'John' }); }); - it("should return empty array when insertMany with empty data", async () => { - const mockModel = { - insertMany: jest.fn(), - }; + it('should return empty array when insertMany with empty data', async () => { + const mockModel = createMockMongoModel(); const repo = adapter.createRepository({ model: mockModel }); const result = await repo.insertMany([]); @@ -167,48 +135,48 @@ describe("MongoAdapter", () => { expect(mockModel.insertMany).not.toHaveBeenCalled(); }); - it("should updateMany documents", async () => { - const mockModel = { + it('should updateMany documents', async () => { + const mockModel = createMockMongoModel({ updateMany: jest.fn().mockReturnValue({ exec: jest.fn().mockResolvedValue({ modifiedCount: 5 }), }), - }; + }); const repo = adapter.createRepository({ model: mockModel }); const result = await repo.updateMany( - { status: "active" }, - { status: "inactive" }, + { status: 'active' }, + { status: 'inactive' }, ); expect(mockModel.updateMany).toHaveBeenCalledWith( - { status: "active" }, - { status: "inactive" }, + { status: 'active' }, + { status: 'inactive' }, {}, ); expect(result).toBe(5); }); - it("should deleteMany documents", async () => { - const mockModel = { + it('should deleteMany documents', async () => { + const mockModel = createMockMongoModel({ deleteMany: jest.fn().mockReturnValue({ exec: jest.fn().mockResolvedValue({ deletedCount: 3 }), }), - }; + }); const repo = adapter.createRepository({ model: mockModel }); - const result = await repo.deleteMany({ status: "deleted" }); + const result = await repo.deleteMany({ status: 'deleted' }); expect(mockModel.deleteMany).toHaveBeenCalledWith( - { status: "deleted" }, + { status: 'deleted' }, {}, ); expect(result).toBe(3); }); }); - describe("withTransaction", () => { - it("should execute callback within transaction", async () => { - const mongoose = await import("mongoose"); + describe('withTransaction', () => { + it('should execute callback within transaction', async () => { + const mongoose = await import('mongoose'); const mockCallback = jest.fn().mockResolvedValue({ success: true }); // Need to connect first @@ -225,55 +193,53 @@ describe("MongoAdapter", () => { ); }); - it("should commit transaction on success", async () => { - const mongoose = await import("mongoose"); + it('should commit transaction on success', async () => { + const mongoose = await import('mongoose'); await adapter.connect(); const mockSession = await mongoose.startSession(); - await adapter.withTransaction(async () => "result"); + await adapter.withTransaction(async () => 'result'); expect(mockSession.commitTransaction).toHaveBeenCalled(); expect(mockSession.endSession).toHaveBeenCalled(); }); - it("should abort transaction on error", async () => { - const mongoose = await import("mongoose"); + it('should abort transaction on error', async () => { + const mongoose = await import('mongoose'); await adapter.connect(); const mockSession = await mongoose.startSession(); - const error = new Error("Test error"); + const error = new Error('Test error'); await expect( adapter.withTransaction(async () => { throw error; }), - ).rejects.toThrow("Test error"); + ).rejects.toThrow('Test error'); expect(mockSession.abortTransaction).toHaveBeenCalled(); expect(mockSession.endSession).toHaveBeenCalled(); }); - it("should provide transaction context with createRepository", async () => { + it('should provide transaction context with createRepository', async () => { await adapter.connect(); let capturedContext: MongoTransactionContext | undefined; await adapter.withTransaction(async (ctx) => { capturedContext = ctx; - return "done"; + return 'done'; }); expect(capturedContext).toBeDefined(); - expect(capturedContext!.transaction).toBeDefined(); - expect(typeof capturedContext!.createRepository).toBe("function"); }); - it("should respect transaction options", async () => { - const mongoose = await import("mongoose"); + it('should respect transaction options', async () => { + const mongoose = await import('mongoose'); await adapter.connect(); const mockSession = await mongoose.startSession(); - await adapter.withTransaction(async () => "result", { + await adapter.withTransaction(async () => 'result', { timeout: 10000, retries: 0, }); @@ -286,200 +252,170 @@ describe("MongoAdapter", () => { }); }); - describe("healthCheck", () => { - it("should return unhealthy when not connected", async () => { + describe('healthCheck', () => { + it('should return unhealthy when not connected', async () => { const result = await adapter.healthCheck(); expect(result.healthy).toBe(false); - expect(result.type).toBe("mongo"); - expect(result.error).toBe("Not connected to MongoDB"); + expect(result.type).toBe('mongo'); + expect(result.error).toBe('Not connected to MongoDB'); expect(result.responseTimeMs).toBeGreaterThanOrEqual(0); }); - it("should have healthCheck method", () => { - expect(typeof adapter.healthCheck).toBe("function"); + it('should have healthCheck method', () => { + expect(typeof adapter.healthCheck).toBe('function'); }); - it("should return response time in result", async () => { + it('should return response time in result', async () => { const result = await adapter.healthCheck(); - expect(typeof result.responseTimeMs).toBe("number"); + expect(typeof result.responseTimeMs).toBe('number'); expect(result.responseTimeMs).toBeGreaterThanOrEqual(0); }); }); - describe("Soft Delete", () => { - it("should not have soft delete methods when softDelete is disabled", () => { - const mockModel = { - find: jest.fn().mockReturnThis(), - lean: jest.fn().mockReturnThis(), - exec: jest.fn(), - }; + describe('Soft Delete', () => { + it('should not have soft delete methods when softDelete is disabled', () => { + const mockModel = createMockMongoModel(); const repo = adapter.createRepository({ model: mockModel, softDelete: false, }); - expect(repo.softDelete).toBeUndefined(); - expect(repo.softDeleteMany).toBeUndefined(); - expect(repo.restore).toBeUndefined(); - expect(repo.restoreMany).toBeUndefined(); - expect(repo.findAllWithDeleted).toBeUndefined(); - expect(repo.findDeleted).toBeUndefined(); + testSoftDeleteMethods(repo, false); }); - it("should have soft delete methods when softDelete is enabled", () => { - const mockModel = { - find: jest.fn().mockReturnThis(), - findById: jest.fn().mockReturnThis(), + it('should have soft delete methods when softDelete is enabled', () => { + const mockModel = createMockMongoModel({ updateOne: jest.fn().mockReturnThis(), updateMany: jest.fn().mockReturnThis(), findOneAndUpdate: jest.fn().mockReturnThis(), - lean: jest.fn().mockReturnThis(), - exec: jest.fn(), - }; + }); const repo = adapter.createRepository({ model: mockModel, softDelete: true, }); - expect(typeof repo.softDelete).toBe("function"); - expect(typeof repo.softDeleteMany).toBe("function"); - expect(typeof repo.restore).toBe("function"); - expect(typeof repo.restoreMany).toBe("function"); - expect(typeof repo.findAllWithDeleted).toBe("function"); - expect(typeof repo.findDeleted).toBe("function"); + testSoftDeleteMethods(repo, true); }); - it("should soft delete a record by setting deletedAt", async () => { - const mockModel = { - find: jest.fn().mockReturnThis(), + it('should soft delete a record by setting deletedAt', async () => { + const mockModel = createMockMongoModel({ updateOne: jest.fn().mockReturnValue({ exec: jest.fn().mockResolvedValue({ modifiedCount: 1 }), }), - lean: jest.fn().mockReturnThis(), - exec: jest.fn(), - }; + }); const repo = adapter.createRepository({ model: mockModel, softDelete: true, }); - const result = await repo.softDelete!("123"); + const result = await repo.softDelete?.('123'); expect(result).toBe(true); expect(mockModel.updateOne).toHaveBeenCalledWith( - { _id: "123", deletedAt: { $eq: null } }, + { _id: '123', deletedAt: { $eq: null } }, expect.objectContaining({ deletedAt: expect.any(Date) }), {}, ); }); - it("should use custom softDeleteField", async () => { - const mockModel = { - find: jest.fn().mockReturnThis(), + it('should use custom softDeleteField', async () => { + const mockModel = createMockMongoModel({ updateOne: jest.fn().mockReturnValue({ exec: jest.fn().mockResolvedValue({ modifiedCount: 1 }), }), - lean: jest.fn().mockReturnThis(), - exec: jest.fn(), - }; + }); const repo = adapter.createRepository({ model: mockModel, softDelete: true, - softDeleteField: "removedAt", + softDeleteField: 'removedAt', }); - await repo.softDelete!("123"); + await repo.softDelete?.('123'); expect(mockModel.updateOne).toHaveBeenCalledWith( - { _id: "123", removedAt: { $eq: null } }, + { _id: '123', removedAt: { $eq: null } }, expect.objectContaining({ removedAt: expect.any(Date) }), {}, ); }); - it("should restore a soft-deleted record", async () => { - const mockModel = { - find: jest.fn().mockReturnThis(), + it('should restore a soft-deleted record', async () => { + const mockModel = createMockMongoModel({ findOneAndUpdate: jest.fn().mockReturnValue({ lean: jest.fn().mockReturnValue({ - exec: jest.fn().mockResolvedValue({ _id: "123", name: "Test" }), + exec: jest.fn().mockResolvedValue({ _id: '123', name: 'Test' }), }), }), - lean: jest.fn().mockReturnThis(), - exec: jest.fn(), - }; + }); const repo = adapter.createRepository({ model: mockModel, softDelete: true, }); - const result = await repo.restore!("123"); + const result = await repo.restore?.('123'); - expect(result).toEqual({ _id: "123", name: "Test" }); + expect(result).toEqual({ _id: '123', name: 'Test' }); expect(mockModel.findOneAndUpdate).toHaveBeenCalledWith( - { _id: "123", deletedAt: { $ne: null } }, + { _id: '123', deletedAt: { $ne: null } }, { $unset: { deletedAt: 1 } }, { new: true }, ); }); - it("should find only deleted records", async () => { - const mockDocs = [{ _id: "1", deletedAt: new Date() }]; - const mockModel = { + it('should find only deleted records', async () => { + const mockDocs = [{ _id: '1', deletedAt: new Date() }]; + const mockModel = createMockMongoModel({ find: jest.fn().mockReturnValue({ lean: jest.fn().mockReturnValue({ exec: jest.fn().mockResolvedValue(mockDocs), }), }), - }; + }); const repo = adapter.createRepository({ model: mockModel, softDelete: true, }); - const result = await repo.findDeleted!({}); + const result = await repo.findDeleted?.({}); expect(result).toEqual(mockDocs); expect(mockModel.find).toHaveBeenCalledWith({ deletedAt: { $ne: null } }); }); - it("should deleteMany as soft delete when enabled", async () => { - const mockModel = { - find: jest.fn().mockReturnThis(), + it('should deleteMany as soft delete when enabled', async () => { + const mockModel = createMockMongoModel({ updateMany: jest.fn().mockReturnValue({ exec: jest.fn().mockResolvedValue({ modifiedCount: 5 }), }), - lean: jest.fn().mockReturnThis(), - exec: jest.fn(), - }; + }); const repo = adapter.createRepository({ model: mockModel, softDelete: true, }); - const result = await repo.deleteMany({ status: "old" }); + const result = await repo.deleteMany({ status: 'old' }); expect(result).toBe(5); expect(mockModel.updateMany).toHaveBeenCalledWith( - expect.objectContaining({ status: "old", deletedAt: { $eq: null } }), + expect.objectContaining({ status: 'old', deletedAt: { $eq: null } }), expect.objectContaining({ deletedAt: expect.any(Date) }), {}, ); }); - it("should filter out soft-deleted records in findAll", async () => { - const mockDocs = [{ _id: "1", name: "Active" }]; - const mockModel = { + it('should filter out soft-deleted records in findAll', async () => { + const mockDocs = [{ _id: '1', name: 'Active' }]; + const mockModel = createMockMongoModel({ find: jest.fn().mockReturnValue({ lean: jest.fn().mockReturnValue({ exec: jest.fn().mockResolvedValue(mockDocs), }), }), - }; + }); const repo = adapter.createRepository({ model: mockModel, @@ -493,223 +429,217 @@ describe("MongoAdapter", () => { }); }); - describe("Timestamps", () => { - it("should add createdAt on create when timestamps enabled", async () => { + describe('Timestamps', () => { + it('should add createdAt on create when timestamps enabled', async () => { const mockDoc = { - _id: "1", - name: "Test", - toObject: () => ({ _id: "1", name: "Test" }), + _id: '1', + name: 'Test', + toObject: () => ({ _id: '1', name: 'Test' }), }; - const mockModel = { + const mockModel = createMockMongoModel({ create: jest.fn().mockResolvedValue(mockDoc), - }; + }); const repo = adapter.createRepository({ model: mockModel, timestamps: true, }); - await repo.create({ name: "Test" }); + await repo.create({ name: 'Test' }); expect(mockModel.create).toHaveBeenCalledWith( expect.objectContaining({ - name: "Test", + name: 'Test', createdAt: expect.any(Date), }), ); }); - it("should not add createdAt when timestamps disabled", async () => { + it('should not add createdAt when timestamps disabled', async () => { const mockDoc = { - _id: "1", - name: "Test", - toObject: () => ({ _id: "1", name: "Test" }), + _id: '1', + name: 'Test', + toObject: () => ({ _id: '1', name: 'Test' }), }; - const mockModel = { + const mockModel = createMockMongoModel({ create: jest.fn().mockResolvedValue(mockDoc), - }; + }); const repo = adapter.createRepository({ model: mockModel, timestamps: false, }); - await repo.create({ name: "Test" }); + await repo.create({ name: 'Test' }); - expect(mockModel.create).toHaveBeenCalledWith({ name: "Test" }); + expect(mockModel.create).toHaveBeenCalledWith({ name: 'Test' }); }); - it("should add updatedAt on updateById when timestamps enabled", async () => { - const mockModel = { - find: jest.fn().mockReturnThis(), + it('should add updatedAt on updateById when timestamps enabled', async () => { + const mockModel = createMockMongoModel({ findOneAndUpdate: jest.fn().mockReturnValue({ lean: jest.fn().mockReturnValue({ - exec: jest.fn().mockResolvedValue({ _id: "1", name: "Updated" }), + exec: jest.fn().mockResolvedValue({ _id: '1', name: 'Updated' }), }), }), - lean: jest.fn().mockReturnThis(), - exec: jest.fn(), - }; + }); const repo = adapter.createRepository({ model: mockModel, timestamps: true, }); - await repo.updateById("1", { name: "Updated" }); + await repo.updateById('1', { name: 'Updated' }); expect(mockModel.findOneAndUpdate).toHaveBeenCalledWith( - { _id: "1" }, + { _id: '1' }, expect.objectContaining({ - name: "Updated", + name: 'Updated', updatedAt: expect.any(Date), }), { new: true }, ); }); - it("should use custom timestamp fields", async () => { + it('should use custom timestamp fields', async () => { const mockDoc = { - _id: "1", - name: "Test", - toObject: () => ({ _id: "1", name: "Test" }), + _id: '1', + name: 'Test', + toObject: () => ({ _id: '1', name: 'Test' }), }; - const mockModel = { + const mockModel = createMockMongoModel({ create: jest.fn().mockResolvedValue(mockDoc), - }; + }); const repo = adapter.createRepository({ model: mockModel, timestamps: true, - createdAtField: "created", - updatedAtField: "modified", + createdAtField: 'created', + updatedAtField: 'modified', }); - await repo.create({ name: "Test" }); + await repo.create({ name: 'Test' }); expect(mockModel.create).toHaveBeenCalledWith( expect.objectContaining({ - name: "Test", + name: 'Test', created: expect.any(Date), }), ); }); - it("should add createdAt to insertMany items when timestamps enabled", async () => { + it('should add createdAt to insertMany items when timestamps enabled', async () => { const mockDocs = [ { - _id: "1", - name: "John", - toObject: () => ({ _id: "1", name: "John" }), + _id: '1', + name: 'John', + toObject: () => ({ _id: '1', name: 'John' }), }, { - _id: "2", - name: "Jane", - toObject: () => ({ _id: "2", name: "Jane" }), + _id: '2', + name: 'Jane', + toObject: () => ({ _id: '2', name: 'Jane' }), }, ]; - const mockModel = { + const mockModel = createMockMongoModel({ insertMany: jest.fn().mockResolvedValue(mockDocs), - }; + }); const repo = adapter.createRepository({ model: mockModel, timestamps: true, }); - await repo.insertMany([{ name: "John" }, { name: "Jane" }]); + await repo.insertMany([{ name: 'John' }, { name: 'Jane' }]); expect(mockModel.insertMany).toHaveBeenCalledWith([ - expect.objectContaining({ name: "John", createdAt: expect.any(Date) }), - expect.objectContaining({ name: "Jane", createdAt: expect.any(Date) }), + expect.objectContaining({ name: 'John', createdAt: expect.any(Date) }), + expect.objectContaining({ name: 'Jane', createdAt: expect.any(Date) }), ]); }); - it("should add updatedAt to updateMany when timestamps enabled", async () => { - const mockModel = { - find: jest.fn().mockReturnThis(), + it('should add updatedAt to updateMany when timestamps enabled', async () => { + const mockModel = createMockMongoModel({ updateMany: jest.fn().mockReturnValue({ exec: jest.fn().mockResolvedValue({ modifiedCount: 3 }), }), - lean: jest.fn().mockReturnThis(), - exec: jest.fn(), - }; + }); const repo = adapter.createRepository({ model: mockModel, timestamps: true, }); - await repo.updateMany({ status: "pending" }, { status: "active" }); + await repo.updateMany({ status: 'pending' }, { status: 'active' }); expect(mockModel.updateMany).toHaveBeenCalledWith( - { status: "pending" }, + { status: 'pending' }, expect.objectContaining({ - status: "active", + status: 'active', updatedAt: expect.any(Date), }), {}, ); }); - it("should soft delete when enabled", async () => { - const mockModel = { + it('should soft delete when enabled', async () => { + const mockModel = createMockMongoModel({ updateOne: jest.fn().mockReturnValue({ exec: jest.fn().mockResolvedValue({ modifiedCount: 1 }), }), - }; + }); const repo = adapter.createRepository({ model: mockModel, softDelete: true, }); - const result = await repo.deleteById("1"); + const result = await repo.deleteById('1'); expect(result).toBe(true); expect(mockModel.updateOne).toHaveBeenCalledWith( - { _id: "1", deletedAt: { $eq: null } }, + { _id: '1', deletedAt: { $eq: null } }, { deletedAt: expect.any(Date) }, {}, ); }); - it("should restore soft deleted item when enabled", async () => { + it('should restore soft deleted item when enabled', async () => { const mockQuery = { lean: jest.fn().mockReturnThis(), - exec: jest.fn().mockResolvedValue({ _id: "1" }), + exec: jest.fn().mockResolvedValue({ _id: '1' }), }; - const mockModel = { + const mockModel = createMockMongoModel({ findOneAndUpdate: jest.fn().mockReturnValue(mockQuery), - }; + }); const repo = adapter.createRepository({ model: mockModel, softDelete: true, }); - const result = await repo.restore?.("1"); + const result = await repo.restore?.('1'); - expect(result).toEqual({ _id: "1" }); + expect(result).toEqual({ _id: '1' }); expect(mockModel.findOneAndUpdate).toHaveBeenCalledWith( - { _id: "1", deletedAt: { $ne: null } }, + { _id: '1', deletedAt: { $ne: null } }, { $unset: { deletedAt: 1 } }, { new: true }, ); }); - it("should upsert with timestamps when enabled", async () => { + it('should upsert with timestamps when enabled', async () => { const mockQuery = { lean: jest.fn().mockReturnThis(), - exec: jest.fn().mockResolvedValue({ _id: "1" }), + exec: jest.fn().mockResolvedValue({ _id: '1' }), }; - const mockModel = { + const mockModel = createMockMongoModel({ findOneAndUpdate: jest.fn().mockReturnValue(mockQuery), - }; + }); const repo = adapter.createRepository({ model: mockModel, timestamps: true, }); - await repo.upsert({ email: "a@b.com" }, { name: "John" }); + await repo.upsert({ email: 'a@b.com' }, { name: 'John' }); expect(mockModel.findOneAndUpdate).toHaveBeenCalledWith( - { email: "a@b.com" }, + { email: 'a@b.com' }, expect.objectContaining({ $set: expect.objectContaining({ - name: "John", + name: 'John', updatedAt: expect.any(Date), }), $setOnInsert: expect.objectContaining({ @@ -720,13 +650,13 @@ describe("MongoAdapter", () => { ); }); - it("should return distinct values", async () => { + it('should return distinct values', async () => { const mockQuery = { - exec: jest.fn().mockResolvedValue(["a", "b"]), + exec: jest.fn().mockResolvedValue(['a', 'b']), }; - const mockModel = { + const mockModel = createMockMongoModel({ distinct: jest.fn().mockReturnValue(mockQuery), - }; + }); const repo = adapter.createRepository<{ email: string; @@ -734,91 +664,91 @@ describe("MongoAdapter", () => { }>({ model: mockModel, }); - const result = await repo.distinct("email", { active: true }); + const result = await repo.distinct('email', { active: true }); - expect(result).toEqual(["a", "b"]); - expect(mockModel.distinct).toHaveBeenCalledWith("email", { + expect(result).toEqual(['a', 'b']); + expect(mockModel.distinct).toHaveBeenCalledWith('email', { active: true, }); }); - it("should select projected fields", async () => { + it('should select projected fields', async () => { const mockQuery = { select: jest.fn().mockReturnThis(), lean: jest.fn().mockReturnThis(), - exec: jest.fn().mockResolvedValue([{ name: "John" }]), + exec: jest.fn().mockResolvedValue([{ name: 'John' }]), }; - const mockModel = { + const mockModel = createMockMongoModel({ find: jest.fn().mockReturnValue(mockQuery), - }; + }); const repo = adapter.createRepository<{ name: string; active?: boolean }>( { model: mockModel, }, ); - const result = await repo.select({ active: true }, ["name"]); + const result = await repo.select({ active: true }, ['name']); - expect(result).toEqual([{ name: "John" }]); + expect(result).toEqual([{ name: 'John' }]); expect(mockModel.find).toHaveBeenCalledWith({ active: true }); expect(mockQuery.select).toHaveBeenCalledWith({ name: 1 }); }); - it("should query deleted records when soft delete enabled", async () => { + it('should query deleted records when soft delete enabled', async () => { const mockQuery = { lean: jest.fn().mockReturnThis(), - exec: jest.fn().mockResolvedValue([{ _id: "1" }]), + exec: jest.fn().mockResolvedValue([{ _id: '1' }]), }; - const mockModel = { + const mockModel = createMockMongoModel({ find: jest.fn().mockReturnValue(mockQuery), - }; + }); const repo = adapter.createRepository({ model: mockModel, softDelete: true, }); - const result = await repo.findDeleted?.({ status: "deleted" }); + const result = await repo.findDeleted?.({ status: 'deleted' }); - expect(result).toEqual([{ _id: "1" }]); + expect(result).toEqual([{ _id: '1' }]); expect(mockModel.find).toHaveBeenCalledWith({ - status: "deleted", + status: 'deleted', deletedAt: { $ne: null }, }); }); - it("should include deleted records when requested", async () => { + it('should include deleted records when requested', async () => { const mockQuery = { lean: jest.fn().mockReturnThis(), - exec: jest.fn().mockResolvedValue([{ _id: "1" }]), + exec: jest.fn().mockResolvedValue([{ _id: '1' }]), }; - const mockModel = { + const mockModel = createMockMongoModel({ find: jest.fn().mockReturnValue(mockQuery), - }; + }); const repo = adapter.createRepository({ model: mockModel, softDelete: true, }); - const result = await repo.findAllWithDeleted?.({ status: "any" }); + const result = await repo.findAllWithDeleted?.({ status: 'any' }); - expect(result).toEqual([{ _id: "1" }]); - expect(mockModel.find).toHaveBeenCalledWith({ status: "any" }); + expect(result).toEqual([{ _id: '1' }]); + expect(mockModel.find).toHaveBeenCalledWith({ status: 'any' }); }); }); - describe("healthCheck", () => { - it("should return healthy when connected and ping succeeds", async () => { - const mongoose = await import("mongoose"); + describe('healthCheck', () => { + it('should return healthy when connected and ping succeeds', async () => { + const mongoose = await import('mongoose'); - Object.defineProperty(mongoose.connection, "readyState", { + Object.defineProperty(mongoose.connection, 'readyState', { value: 1, writable: true, }); - Object.defineProperty(mongoose.connection, "db", { + Object.defineProperty(mongoose.connection, 'db', { value: { admin: () => ({ ping: jest.fn().mockResolvedValue({ ok: 1 }), - serverInfo: jest.fn().mockResolvedValue({ version: "6.0.0" }), + serverInfo: jest.fn().mockResolvedValue({ version: '6.0.0' }), }), }, writable: true, @@ -827,27 +757,27 @@ describe("MongoAdapter", () => { const result = await adapter.healthCheck(); expect(result.healthy).toBe(true); - expect(result.type).toBe("mongo"); - expect(result.details?.version).toBe("6.0.0"); + expect(result.type).toBe('mongo'); + expect(result.details?.version).toBe('6.0.0'); expect(result.responseTimeMs).toBeGreaterThanOrEqual(0); }); - it.skip("should return unhealthy when not connected", async () => { + it.skip('should return unhealthy when not connected', async () => { const result = await adapter.healthCheck(); expect(result.healthy).toBe(false); - expect(result.error).toBe("Not connected to MongoDB"); - expect(result.type).toBe("mongo"); + expect(result.error).toBe('Not connected to MongoDB'); + expect(result.type).toBe('mongo'); }); - it("should return unhealthy when ping fails", async () => { - const mongoose = await import("mongoose"); + it('should return unhealthy when ping fails', async () => { + const mongoose = await import('mongoose'); - Object.defineProperty(mongoose.connection, "readyState", { + Object.defineProperty(mongoose.connection, 'readyState', { value: 1, writable: true, }); - Object.defineProperty(mongoose.connection, "db", { + Object.defineProperty(mongoose.connection, 'db', { value: { admin: () => ({ ping: jest.fn().mockResolvedValue({ ok: 0 }), @@ -859,20 +789,20 @@ describe("MongoAdapter", () => { const result = await adapter.healthCheck(); expect(result.healthy).toBe(false); - expect(result.error).toBe("Ping command failed"); + expect(result.error).toBe('Ping command failed'); }); - it("should return unhealthy when ping throws error", async () => { - const mongoose = await import("mongoose"); + it('should return unhealthy when ping throws error', async () => { + const mongoose = await import('mongoose'); - Object.defineProperty(mongoose.connection, "readyState", { + Object.defineProperty(mongoose.connection, 'readyState', { value: 1, writable: true, }); - Object.defineProperty(mongoose.connection, "db", { + Object.defineProperty(mongoose.connection, 'db', { value: { admin: () => ({ - ping: jest.fn().mockRejectedValue(new Error("Connection lost")), + ping: jest.fn().mockRejectedValue(new Error('Connection lost')), }), }, writable: true, @@ -881,24 +811,24 @@ describe("MongoAdapter", () => { const result = await adapter.healthCheck(); expect(result.healthy).toBe(false); - expect(result.error).toBe("Connection lost"); + expect(result.error).toBe('Connection lost'); }); }); - describe("withTransaction", () => { - it("should execute callback within transaction successfully", async () => { - const mongoose = await import("mongoose"); + describe('withTransaction', () => { + it('should execute callback within transaction successfully', async () => { + const mongoose = await import('mongoose'); await adapter.connect(); const callback = jest.fn(async (ctx: MongoTransactionContext) => { expect(ctx.transaction).toBeDefined(); expect(ctx.createRepository).toBeDefined(); - return { result: "success" }; + return { result: 'success' }; }); const result = await adapter.withTransaction(callback); - expect(result).toEqual({ result: "success" }); + expect(result).toEqual({ result: 'success' }); expect(callback).toHaveBeenCalled(); const mockSession = await mongoose.startSession(); expect(mockSession.startTransaction).toHaveBeenCalled(); @@ -906,14 +836,14 @@ describe("MongoAdapter", () => { expect(mockSession.endSession).toHaveBeenCalled(); }); - it("should retry on transient errors", async () => { + it('should retry on transient errors', async () => { await adapter.connect(); const transientError = { hasErrorLabel: jest.fn( - (label: string) => label === "TransientTransactionError", + (label: string) => label === 'TransientTransactionError', ), - message: "Transient error", + message: 'Transient error', }; let attempt = 0; @@ -922,21 +852,21 @@ describe("MongoAdapter", () => { if (attempt === 1) { throw transientError; } - return { result: "success after retry" }; + return { result: 'success after retry' }; }); const result = await adapter.withTransaction(callback, { retries: 1 }); - expect(result).toEqual({ result: "success after retry" }); + expect(result).toEqual({ result: 'success after retry' }); expect(callback).toHaveBeenCalledTimes(2); }); - it("should retry on specific MongoDB error codes", async () => { + it('should retry on specific MongoDB error codes', async () => { await adapter.connect(); const retryableError = { code: 11600, // InterruptedAtShutdown - message: "Server shutting down", + message: 'Server shutting down', }; let attempt = 0; @@ -945,23 +875,23 @@ describe("MongoAdapter", () => { if (attempt === 1) { throw retryableError; } - return { result: "success after retry" }; + return { result: 'success after retry' }; }); const result = await adapter.withTransaction(callback, { retries: 1 }); - expect(result).toEqual({ result: "success after retry" }); + expect(result).toEqual({ result: 'success after retry' }); expect(callback).toHaveBeenCalledTimes(2); }); - it.skip("should throw after exhausting retries", async () => { + it.skip('should throw after exhausting retries', async () => { await adapter.connect(); const persistentError = { hasErrorLabel: jest.fn( - (label: string) => label === "TransientTransactionError", + (label: string) => label === 'TransientTransactionError', ), - message: "Persistent error", + message: 'Persistent error', }; const callback = jest.fn(async () => { @@ -970,22 +900,22 @@ describe("MongoAdapter", () => { await expect( adapter.withTransaction(callback, { retries: 2 }), - ).rejects.toThrow("Persistent error"); + ).rejects.toThrow('Persistent error'); expect(callback).toHaveBeenCalledTimes(3); // initial + 2 retries }); - it("should abort transaction on error", async () => { - const mongoose = await import("mongoose"); + it('should abort transaction on error', async () => { + const mongoose = await import('mongoose'); await adapter.connect(); - const error = new Error("Transaction failed"); + const error = new Error('Transaction failed'); const callback = jest.fn(async () => { throw error; }); await expect(adapter.withTransaction(callback)).rejects.toThrow( - "Transaction failed", + 'Transaction failed', ); const mockSession = await mongoose.startSession(); @@ -993,7 +923,7 @@ describe("MongoAdapter", () => { expect(mockSession.endSession).toHaveBeenCalled(); }); - it("should handle all retryable error codes", async () => { + it('should handle all retryable error codes', async () => { await adapter.connect(); const retryableCodes = [11600, 11602, 10107, 13435, 13436, 189, 91]; @@ -1005,7 +935,11 @@ describe("MongoAdapter", () => { const callback = jest.fn(async () => { attempt++; if (attempt === 1) { - throw { code, message: `Error code ${code}` }; + const error = new Error(`Error code ${code}`) as Error & { + code: number; + }; + error.code = code; + throw error; } return { code }; }); @@ -1016,27 +950,27 @@ describe("MongoAdapter", () => { }); }); - describe("connection event handlers", () => { - it("should register connection event handlers", async () => { - const mongoose = await import("mongoose"); + describe('connection event handlers', () => { + it('should register connection event handlers', async () => { + const mongoose = await import('mongoose'); await adapter.connect(); expect(mongoose.connection.on).toHaveBeenCalledWith( - "connected", + 'connected', expect.any(Function), ); expect(mongoose.connection.on).toHaveBeenCalledWith( - "error", + 'error', expect.any(Function), ); expect(mongoose.connection.on).toHaveBeenCalledWith( - "disconnected", + 'disconnected', expect.any(Function), ); }); - it("should apply custom connection options", async () => { - const mongoose = await import("mongoose"); + it('should apply custom connection options', async () => { + const mongoose = await import('mongoose'); const customOptions = { retryWrites: true }; await adapter.connect(customOptions); @@ -1047,8 +981,8 @@ describe("MongoAdapter", () => { ); }); - it("should use custom pool configuration", async () => { - const mongoose = await import("mongoose"); + it('should use custom pool configuration', async () => { + const mongoose = await import('mongoose'); const adapterWithPool = new MongoAdapter({ ...mockConfig, pool: { min: 2, max: 20, idleTimeoutMs: 60000 }, diff --git a/src/adapters/mongo.adapter.ts b/src/adapters/mongo.adapter.ts index 63e3f1f..fb77f4c 100644 --- a/src/adapters/mongo.adapter.ts +++ b/src/adapters/mongo.adapter.ts @@ -1,5 +1,5 @@ -import { Injectable, Logger } from "@nestjs/common"; -import mongoose, { ConnectOptions, Model, ClientSession } from "mongoose"; +import { Injectable, Logger } from '@nestjs/common'; +import mongoose, { ConnectOptions, Model, ClientSession } from 'mongoose'; import { MongoDatabaseConfig, @@ -12,7 +12,365 @@ import { TransactionCallback, HealthCheckResult, DATABASE_KIT_CONSTANTS, -} from "../contracts/database.contracts"; +} from '../contracts/database.contracts'; +import { + shapePage, + addCreatedAtTimestamp, + addUpdatedAtTimestamp, + createErrorHealthResult, + createSuccessHealthResult, +} from '../utils/adapter.utils'; + +type MongoRepoParams = { + model: Model; + session?: ClientSession; + notDeletedFilter: Record; + softDeleteEnabled: boolean; + softDeleteField: string; + timestampsEnabled: boolean; + createdAtField: string; + updatedAtField: string; + addCreatedAt: >(data: D) => D; + addUpdatedAt: >(data: D) => D; +}; + +function createMongoReadMethods(params: MongoRepoParams) { + const { model, session, notDeletedFilter } = params; + + return { + async findById(id: string | number): Promise { + const mergedFilter = { _id: id, ...notDeletedFilter }; + let query = model.findOne(mergedFilter); + if (session) query = query.session(session); + const doc = await query.lean().exec(); + return doc as T | null; + }, + + async findAll(filter: Record = {}): Promise { + const mergedFilter = { ...filter, ...notDeletedFilter }; + let query = model.find(mergedFilter); + if (session) query = query.session(session); + const docs = await query.lean().exec(); + return docs as T[]; + }, + + async findOne(filter: Record): Promise { + const mergedFilter = { ...filter, ...notDeletedFilter }; + let query = model.findOne(mergedFilter); + if (session) query = query.session(session); + const doc = await query.lean().exec(); + return doc as T | null; + }, + + async findPage(options: PageOptions = {}): Promise> { + const { filter = {}, page = 1, limit = 10, sort } = options; + const mergedFilter = { ...filter, ...notDeletedFilter }; + + const skip = Math.max(0, (page - 1) * limit); + let query = model.find(mergedFilter).skip(skip).limit(limit); + + if (sort) { + query = query.sort(sort as Record); + } + if (session) query = query.session(session); + + const [data, total] = await Promise.all([ + query.lean().exec(), + session + ? model.countDocuments(mergedFilter).session(session).exec() + : model.countDocuments(mergedFilter).exec(), + ]); + + return shapePage(data as T[], page, limit, total); + }, + + async count(filter: Record = {}): Promise { + const mergedFilter = { ...filter, ...notDeletedFilter }; + let query = model.countDocuments(mergedFilter); + if (session) query = query.session(session); + return query.exec(); + }, + + async exists(filter: Record = {}): Promise { + const mergedFilter = { ...filter, ...notDeletedFilter }; + if (session) { + const doc = await model + .findOne(mergedFilter) + .session(session) + .select('_id') + .lean() + .exec(); + return !!doc; + } + const res = await model.exists(mergedFilter); + return !!res; + }, + + async distinct( + field: K, + filter: Record = {}, + ): Promise { + const mergedFilter = { ...filter, ...notDeletedFilter }; + let query = model.distinct(String(field), mergedFilter); + if (session) query = query.session(session); + const values = await query.exec(); + return values as T[K][]; + }, + + async select( + filter: Record, + fields: K[], + ): Promise[]> { + const mergedFilter = { ...filter, ...notDeletedFilter }; + const projection = fields.reduce( + (acc, field) => ({ ...acc, [field]: 1 }), + {}, + ); + let query = model.find(mergedFilter).select(projection); + if (session) query = query.session(session); + const docs = await query.lean().exec(); + return docs as Pick[]; + }, + }; +} + +function createMongoWriteMethods(params: MongoRepoParams) { + const { + model, + session, + notDeletedFilter, + softDeleteEnabled, + softDeleteField, + addCreatedAt, + addUpdatedAt, + } = params; + + return { + async create(data: Partial): Promise { + const timestampedData = addCreatedAt(data as Record); + const doc = session + ? (await model.create([timestampedData], { session }))[0] + : await model.create(timestampedData); + return (doc as { toObject?: () => T }).toObject?.() ?? (doc as T); + }, + + async updateById( + id: string | number, + update: Partial, + ): Promise { + const mergedFilter = { _id: id, ...notDeletedFilter }; + const timestampedUpdate = addUpdatedAt(update as Record); + let query = model.findOneAndUpdate(mergedFilter, timestampedUpdate, { + new: true, + }); + if (session) query = query.session(session); + const doc = await query.lean().exec(); + return doc as T | null; + }, + + async deleteById(id: string | number): Promise { + if (softDeleteEnabled) { + const mergedFilter = { _id: id, ...notDeletedFilter }; + const options = session ? { session } : {}; + const result = await model + .updateOne(mergedFilter, { [softDeleteField]: new Date() }, options) + .exec(); + return result.modifiedCount > 0; + } + + let query = model.findByIdAndDelete(id); + if (session) query = query.session(session); + const res = await query.lean().exec(); + return !!res; + }, + }; +} + +function createMongoBulkMethods(params: MongoRepoParams) { + const { + model, + session, + notDeletedFilter, + softDeleteEnabled, + softDeleteField, + addCreatedAt, + addUpdatedAt, + } = params; + + return { + async insertMany(data: Partial[]): Promise { + if (data.length === 0) return []; + + const timestampedData = data.map((item) => + addCreatedAt(item as Record), + ); + + const docs = session + ? await model.insertMany(timestampedData, { session }) + : await model.insertMany(timestampedData); + + return docs.map( + (doc: { toObject?: () => T }) => doc.toObject?.() ?? (doc as T), + ); + }, + + async updateMany( + filter: Record, + update: Partial, + ): Promise { + const mergedFilter = { ...filter, ...notDeletedFilter }; + const timestampedUpdate = addUpdatedAt(update as Record); + const options = session ? { session } : {}; + const result = await model + .updateMany(mergedFilter, timestampedUpdate, options) + .exec(); + return result.modifiedCount; + }, + + async deleteMany(filter: Record): Promise { + const mergedFilter = { ...filter, ...notDeletedFilter }; + const options = session ? { session } : {}; + + if (softDeleteEnabled) { + const result = await model + .updateMany(mergedFilter, { [softDeleteField]: new Date() }, options) + .exec(); + return result.modifiedCount; + } + + const result = await model.deleteMany(mergedFilter, options).exec(); + return result.deletedCount; + }, + }; +} + +function createMongoAdvancedMethods(params: MongoRepoParams) { + const { + model, + session, + notDeletedFilter, + timestampsEnabled, + createdAtField, + updatedAtField, + } = params; + + return { + async upsert( + filter: Record, + data: Partial, + ): Promise { + const mergedFilter = { ...filter, ...notDeletedFilter }; + const timestampedData = timestampsEnabled + ? { ...data, [updatedAtField]: new Date() } + : data; + + let query = model.findOneAndUpdate( + mergedFilter, + { + $set: timestampedData, + ...(timestampsEnabled + ? { $setOnInsert: { [createdAtField]: new Date() } } + : {}), + }, + { upsert: true, new: true }, + ); + if (session) query = query.session(session); + const doc = await query.lean().exec(); + return doc as T; + }, + }; +} + +function createMongoSoftDeleteMethods(params: MongoRepoParams) { + const { + model, + session, + notDeletedFilter, + softDeleteEnabled, + softDeleteField, + } = params; + + if (!softDeleteEnabled) { + return { + softDelete: undefined, + softDeleteMany: undefined, + restore: undefined, + restoreMany: undefined, + findAllWithDeleted: undefined, + findDeleted: undefined, + }; + } + + return { + softDelete: async (id: string | number): Promise => { + const mergedFilter = { _id: id, ...notDeletedFilter }; + const options = session ? { session } : {}; + const result = await model + .updateOne(mergedFilter, { [softDeleteField]: new Date() }, options) + .exec(); + return result.modifiedCount > 0; + }, + + softDeleteMany: async ( + filter: Record, + ): Promise => { + const mergedFilter = { ...filter, ...notDeletedFilter }; + const options = session ? { session } : {}; + const result = await model + .updateMany(mergedFilter, { [softDeleteField]: new Date() }, options) + .exec(); + return result.modifiedCount; + }, + + restore: async (id: string | number): Promise => { + const deletedFilter = { _id: id, [softDeleteField]: { $ne: null } }; + let query = model.findOneAndUpdate( + deletedFilter, + { $unset: { [softDeleteField]: 1 } }, + { new: true }, + ); + if (session) query = query.session(session); + const doc = await query.lean().exec(); + return doc as T | null; + }, + + restoreMany: async (filter: Record): Promise => { + const deletedFilter = { + ...filter, + [softDeleteField]: { $ne: null }, + }; + const options = session ? { session } : {}; + const result = await model + .updateMany( + deletedFilter, + { $unset: { [softDeleteField]: 1 } }, + options, + ) + .exec(); + return result.modifiedCount; + }, + + findAllWithDeleted: async ( + filter: Record = {}, + ): Promise => { + let query = model.find(filter); + if (session) query = query.session(session); + const docs = await query.lean().exec(); + return docs as T[]; + }, + + findDeleted: async (filter: Record = {}): Promise => { + const deletedFilter = { + ...filter, + [softDeleteField]: { $ne: null }, + }; + let query = model.find(deletedFilter); + if (session) query = query.session(session); + const docs = await query.lean().exec(); + return docs as T[]; + }, + }; +} /** * MongoDB adapter for DatabaseKit. @@ -33,7 +391,7 @@ export class MongoAdapter { constructor(config: MongoDatabaseConfig) { this.config = config; - mongoose.set("strictQuery", false); + mongoose.set('strictQuery', false); } /** @@ -44,8 +402,8 @@ export class MongoAdapter { * @returns Promise resolving to mongoose instance */ async connect(options: ConnectOptions = {}): Promise { - if (!this.connectionPromise) { - this.logger.log("Connecting to MongoDB..."); + if (this.connectionPromise === undefined) { + this.logger.log('Connecting to MongoDB...'); // Apply pool configuration from config const poolConfig = this.config.pool || {}; @@ -65,16 +423,16 @@ export class MongoAdapter { ...options, }); - mongoose.connection.on("connected", () => { - this.logger.log("Successfully connected to MongoDB"); + mongoose.connection.on('connected', () => { + this.logger.log('Successfully connected to MongoDB'); }); - mongoose.connection.on("error", (err) => { - this.logger.error("MongoDB connection error", err?.message || err); + mongoose.connection.on('error', (err) => { + this.logger.error('MongoDB connection error', err?.message || err); }); - mongoose.connection.on("disconnected", () => { - this.logger.warn("MongoDB disconnected"); + mongoose.connection.on('disconnected', () => { + this.logger.warn('MongoDB disconnected'); }); } @@ -87,7 +445,7 @@ export class MongoAdapter { async disconnect(): Promise { await mongoose.disconnect(); this.connectionPromise = undefined; - this.logger.log("Disconnected from MongoDB"); + this.logger.log('Disconnected from MongoDB'); } /** @@ -116,12 +474,11 @@ export class MongoAdapter { try { if (!this.isConnected()) { - return { - healthy: false, - responseTimeMs: Date.now() - startTime, - type: "mongo", - error: "Not connected to MongoDB", - }; + return createErrorHealthResult( + 'mongo', + 'Not connected to MongoDB', + startTime, + ); } // Send ping command to verify connection @@ -129,32 +486,25 @@ export class MongoAdapter { const pingResult = await admin?.ping(); if (!pingResult?.ok) { - return { - healthy: false, - responseTimeMs: Date.now() - startTime, - type: "mongo", - error: "Ping command failed", - }; + return createErrorHealthResult( + 'mongo', + 'Ping command failed', + startTime, + ); } // Get server info for details const serverInfo = await admin?.serverInfo(); - return { - healthy: true, - responseTimeMs: Date.now() - startTime, - type: "mongo", - details: { - version: serverInfo?.version, - }, - }; + return createSuccessHealthResult('mongo', startTime, { + version: serverInfo?.version, + }); } catch (error) { - return { - healthy: false, - responseTimeMs: Date.now() - startTime, - type: "mongo", - error: error instanceof Error ? error.message : "Unknown error", - }; + return createErrorHealthResult( + 'mongo', + error instanceof Error ? error.message : 'Unknown error', + startTime, + ); } } @@ -172,355 +522,40 @@ export class MongoAdapter { ): Repository { const model = opts.model as Model; const softDeleteEnabled = opts.softDelete ?? false; - const softDeleteField = opts.softDeleteField ?? "deletedAt"; - - // Timestamp configuration + const softDeleteField = opts.softDeleteField ?? 'deletedAt'; const timestampsEnabled = opts.timestamps ?? false; - const createdAtField = opts.createdAtField ?? "createdAt"; - const updatedAtField = opts.updatedAtField ?? "updatedAt"; - - // Base filter to exclude soft-deleted records + const createdAtField = opts.createdAtField ?? 'createdAt'; + const updatedAtField = opts.updatedAtField ?? 'updatedAt'; const notDeletedFilter = softDeleteEnabled ? { [softDeleteField]: { $eq: null } } : {}; - // Helper to add createdAt timestamp - const addCreatedAt = >(data: D): D => { - if (timestampsEnabled) { - return { ...data, [createdAtField]: new Date() }; - } - return data; - }; - - // Helper to add updatedAt timestamp - const addUpdatedAt = >(data: D): D => { - if (timestampsEnabled) { - return { ...data, [updatedAtField]: new Date() }; - } - return data; + const addCreatedAt = >(data: D): D => + addCreatedAtTimestamp(data, timestampsEnabled, createdAtField); + + const addUpdatedAt = >(data: D): D => + addUpdatedAtTimestamp(data, timestampsEnabled, updatedAtField); + + const params: MongoRepoParams = { + model, + session, + notDeletedFilter, + softDeleteEnabled, + softDeleteField, + timestampsEnabled, + createdAtField, + updatedAtField, + addCreatedAt, + addUpdatedAt, }; - const shapePage = ( - data: T[], - page: number, - limit: number, - total: number, - ): PageResult => { - const pages = Math.max(1, Math.ceil((total || 0) / (limit || 1))); - return { data, page, limit, total, pages }; - }; - - const repo: Repository = { - async create(data: Partial): Promise { - const timestampedData = addCreatedAt(data as Record); - const doc = session - ? (await model.create([timestampedData], { session }))[0] - : await model.create(timestampedData); - return (doc as { toObject?: () => T }).toObject?.() ?? (doc as T); - }, - - async findById(id: string | number): Promise { - const mergedFilter = { _id: id, ...notDeletedFilter }; - let query = model.findOne(mergedFilter); - if (session) query = query.session(session); - const doc = await query.lean().exec(); - return doc as T | null; - }, - - async findAll(filter: Record = {}): Promise { - const mergedFilter = { ...filter, ...notDeletedFilter }; - let query = model.find(mergedFilter); - if (session) query = query.session(session); - const docs = await query.lean().exec(); - return docs as T[]; - }, - - async findOne(filter: Record): Promise { - const mergedFilter = { ...filter, ...notDeletedFilter }; - let query = model.findOne(mergedFilter); - if (session) query = query.session(session); - const doc = await query.lean().exec(); - return doc as T | null; - }, - - async findPage(options: PageOptions = {}): Promise> { - const { filter = {}, page = 1, limit = 10, sort } = options; - const mergedFilter = { ...filter, ...notDeletedFilter }; - - const skip = Math.max(0, (page - 1) * limit); - let query = model.find(mergedFilter).skip(skip).limit(limit); - - if (sort) { - query = query.sort(sort as Record); - } - if (session) query = query.session(session); - - const [data, total] = await Promise.all([ - query.lean().exec(), - session - ? model.countDocuments(mergedFilter).session(session).exec() - : model.countDocuments(mergedFilter).exec(), - ]); - - return shapePage(data as T[], page, limit, total); - }, - - async updateById( - id: string | number, - update: Partial, - ): Promise { - const mergedFilter = { _id: id, ...notDeletedFilter }; - const timestampedUpdate = addUpdatedAt( - update as Record, - ); - let query = model.findOneAndUpdate(mergedFilter, timestampedUpdate, { - new: true, - }); - if (session) query = query.session(session); - const doc = await query.lean().exec(); - return doc as T | null; - }, - - async deleteById(id: string | number): Promise { - // If soft delete is enabled, use softDelete instead - if (softDeleteEnabled) { - const mergedFilter = { _id: id, ...notDeletedFilter }; - const options = session ? { session } : {}; - const result = await model - .updateOne(mergedFilter, { [softDeleteField]: new Date() }, options) - .exec(); - return result.modifiedCount > 0; - } - - let query = model.findByIdAndDelete(id); - if (session) query = query.session(session); - const res = await query.lean().exec(); - return !!res; - }, - - async count(filter: Record = {}): Promise { - const mergedFilter = { ...filter, ...notDeletedFilter }; - let query = model.countDocuments(mergedFilter); - if (session) query = query.session(session); - return query.exec(); - }, - - async exists(filter: Record = {}): Promise { - const mergedFilter = { ...filter, ...notDeletedFilter }; - // exists() doesn't support session directly, use findOne - if (session) { - const doc = await model - .findOne(mergedFilter) - .session(session) - .select("_id") - .lean() - .exec(); - return !!doc; - } - const res = await model.exists(mergedFilter); - return !!res; - }, - - // ----------------------------- - // Bulk Operations - // ----------------------------- - - async insertMany(data: Partial[]): Promise { - if (data.length === 0) return []; - - // Add createdAt timestamp to each record - const timestampedData = data.map((item) => - addCreatedAt(item as Record), - ); - - const docs = session - ? await model.insertMany(timestampedData, { session }) - : await model.insertMany(timestampedData); - - return docs.map( - (doc) => (doc as { toObject?: () => T }).toObject?.() ?? (doc as T), - ); - }, - - async updateMany( - filter: Record, - update: Partial, - ): Promise { - const mergedFilter = { ...filter, ...notDeletedFilter }; - const timestampedUpdate = addUpdatedAt( - update as Record, - ); - const options = session ? { session } : {}; - const result = await model - .updateMany(mergedFilter, timestampedUpdate, options) - .exec(); - return result.modifiedCount; - }, - - async deleteMany(filter: Record): Promise { - const mergedFilter = { ...filter, ...notDeletedFilter }; - const options = session ? { session } : {}; - - // If soft delete is enabled, update instead of delete - if (softDeleteEnabled) { - const result = await model - .updateMany( - mergedFilter, - { [softDeleteField]: new Date() }, - options, - ) - .exec(); - return result.modifiedCount; - } - - const result = await model.deleteMany(mergedFilter, options).exec(); - return result.deletedCount; - }, - - // ----------------------------- - // Advanced Query Operations - // ----------------------------- - - async upsert( - filter: Record, - data: Partial, - ): Promise { - const mergedFilter = { ...filter, ...notDeletedFilter }; - const timestampedData = timestampsEnabled - ? { ...data, [updatedAtField]: new Date() } - : data; - - let query = model.findOneAndUpdate( - mergedFilter, - { - $set: timestampedData, - ...(timestampsEnabled - ? { $setOnInsert: { [createdAtField]: new Date() } } - : {}), - }, - { upsert: true, new: true }, - ); - if (session) query = query.session(session); - const doc = await query.lean().exec(); - return doc as T; - }, - - async distinct( - field: K, - filter: Record = {}, - ): Promise { - const mergedFilter = { ...filter, ...notDeletedFilter }; - let query = model.distinct(String(field), mergedFilter); - if (session) query = query.session(session); - const values = await query.exec(); - return values as T[K][]; - }, - - async select( - filter: Record, - fields: K[], - ): Promise[]> { - const mergedFilter = { ...filter, ...notDeletedFilter }; - const projection = fields.reduce( - (acc, field) => ({ ...acc, [field]: 1 }), - {}, - ); - let query = model.find(mergedFilter).select(projection); - if (session) query = query.session(session); - const docs = await query.lean().exec(); - return docs as Pick[]; - }, - - // ----------------------------- - // Soft Delete Operations - // ----------------------------- - - softDelete: softDeleteEnabled - ? async (id: string | number): Promise => { - const mergedFilter = { _id: id, ...notDeletedFilter }; - const options = session ? { session } : {}; - const result = await model - .updateOne( - mergedFilter, - { [softDeleteField]: new Date() }, - options, - ) - .exec(); - return result.modifiedCount > 0; - } - : undefined, - - softDeleteMany: softDeleteEnabled - ? async (filter: Record): Promise => { - const mergedFilter = { ...filter, ...notDeletedFilter }; - const options = session ? { session } : {}; - const result = await model - .updateMany( - mergedFilter, - { [softDeleteField]: new Date() }, - options, - ) - .exec(); - return result.modifiedCount; - } - : undefined, - - restore: softDeleteEnabled - ? async (id: string | number): Promise => { - const deletedFilter = { _id: id, [softDeleteField]: { $ne: null } }; - let query = model.findOneAndUpdate( - deletedFilter, - { $unset: { [softDeleteField]: 1 } }, - { new: true }, - ); - if (session) query = query.session(session); - const doc = await query.lean().exec(); - return doc as T | null; - } - : undefined, - - restoreMany: softDeleteEnabled - ? async (filter: Record): Promise => { - const deletedFilter = { - ...filter, - [softDeleteField]: { $ne: null }, - }; - const options = session ? { session } : {}; - const result = await model - .updateMany( - deletedFilter, - { $unset: { [softDeleteField]: 1 } }, - options, - ) - .exec(); - return result.modifiedCount; - } - : undefined, - - findAllWithDeleted: softDeleteEnabled - ? async (filter: Record = {}): Promise => { - let query = model.find(filter); - if (session) query = query.session(session); - const docs = await query.lean().exec(); - return docs as T[]; - } - : undefined, - - findDeleted: softDeleteEnabled - ? async (filter: Record = {}): Promise => { - const deletedFilter = { - ...filter, - [softDeleteField]: { $ne: null }, - }; - let query = model.find(deletedFilter); - if (session) query = query.session(session); - const docs = await query.lean().exec(); - return docs as T[]; - } - : undefined, + return { + ...createMongoReadMethods(params), + ...createMongoWriteMethods(params), + ...createMongoBulkMethods(params), + ...createMongoAdvancedMethods(params), + ...createMongoSoftDeleteMethods(params), }; - - return repo; } /** @@ -605,21 +640,21 @@ export class MongoAdapter { } } - throw lastError || new Error("Transaction failed"); + throw lastError || new Error('Transaction failed'); } /** * Checks if an error is transient and can be retried. */ private isTransientError(error: unknown): boolean { - if (error && typeof error === "object") { + if (error && typeof error === 'object') { const mongoError = error as { hasErrorLabel?: (label: string) => boolean; code?: number; }; // MongoDB transient transaction errors - if (mongoError.hasErrorLabel?.("TransientTransactionError")) { + if (mongoError.hasErrorLabel?.('TransientTransactionError')) { return true; } diff --git a/src/adapters/postgres.adapter.spec.ts b/src/adapters/postgres.adapter.spec.ts index c4dd3c2..fd0e6b4 100644 --- a/src/adapters/postgres.adapter.spec.ts +++ b/src/adapters/postgres.adapter.spec.ts @@ -1,11 +1,16 @@ -import type { Knex } from "knex"; +import type { Knex } from 'knex'; import type { PostgresDatabaseConfig, PostgresTransactionContext, -} from "../contracts/database.contracts"; +} from '../contracts/database.contracts'; +import { + createMockKnex, + testSoftDeleteMethods, + testRepositoryMethods, +} from '../test/test.utils'; -import { PostgresAdapter } from "./postgres.adapter"; +import { PostgresAdapter } from './postgres.adapter'; // Mock knex const mockTrx = { @@ -15,8 +20,8 @@ const mockTrx = { update: jest.fn().mockReturnThis(), delete: jest.fn().mockReturnThis(), where: jest.fn().mockReturnThis(), - returning: jest.fn().mockResolvedValue([{ id: 1, name: "test" }]), - first: jest.fn().mockResolvedValue({ id: 1, name: "test" }), + returning: jest.fn().mockResolvedValue([{ id: 1, name: 'test' }]), + first: jest.fn().mockResolvedValue({ id: 1, name: 'test' }), }; const mockKnexInstance = jest.fn((_tableName: string) => ({ @@ -37,8 +42,8 @@ const mockKnexInstance = jest.fn((_tableName: string) => ({ count: jest.fn().mockReturnThis(), modify: jest.fn().mockReturnThis(), clone: jest.fn().mockReturnThis(), - returning: jest.fn().mockResolvedValue([{ id: 1, name: "test" }]), - first: jest.fn().mockResolvedValue({ id: 1, name: "test" }), + returning: jest.fn().mockResolvedValue([{ id: 1, name: 'test' }]), + first: jest.fn().mockResolvedValue({ id: 1, name: 'test' }), })) as unknown as Knex; // Add transaction method to mock @@ -56,15 +61,15 @@ const mockKnexInstance = jest.fn((_tableName: string) => ({ .fn() .mockResolvedValue(undefined); -jest.mock("knex", () => { +jest.mock('knex', () => { return jest.fn(() => mockKnexInstance); }); -describe("PostgresAdapter", () => { +describe('PostgresAdapter', () => { let adapter: PostgresAdapter; const mockConfig: PostgresDatabaseConfig = { - type: "postgres", - connectionString: "postgresql://localhost:5432/testdb", + type: 'postgres', + connectionString: 'postgresql://localhost:5432/testdb', }; // Test interface for typed repositories @@ -83,133 +88,121 @@ describe("PostgresAdapter", () => { afterEach(async () => { // Reset the knexInstance to avoid disconnect issues with mocks - adapter["knexInstance"] = undefined; + adapter['knexInstance'] = undefined; }); - describe("constructor", () => { - it("should create adapter instance", () => { + describe('constructor', () => { + it('should create adapter instance', () => { expect(adapter).toBeDefined(); expect(adapter).toBeInstanceOf(PostgresAdapter); }); }); - describe("isConnected", () => { - it("should return false when not connected", () => { + describe('isConnected', () => { + it('should return false when not connected', () => { expect(adapter.isConnected()).toBe(false); }); - it("should return true when connected", () => { + it('should return true when connected', () => { adapter.connect(); expect(adapter.isConnected()).toBe(true); }); }); - describe("connect", () => { - it("should create Knex instance", () => { + describe('connect', () => { + it('should create Knex instance', () => { const knex = adapter.connect(); expect(knex).toBeDefined(); }); - it("should reuse existing connection", () => { + it('should reuse existing connection', () => { const knex1 = adapter.connect(); const knex2 = adapter.connect(); expect(knex1).toBe(knex2); }); }); - describe("disconnect", () => { - it("should destroy Knex instance", async () => { + describe('disconnect', () => { + it('should destroy Knex instance', async () => { adapter.connect(); await adapter.disconnect(); expect(adapter.isConnected()).toBe(false); }); }); - describe("getKnex", () => { - it("should throw when not connected", () => { - expect(() => adapter.getKnex()).toThrow("PostgreSQL not connected"); + describe('getKnex', () => { + it('should throw when not connected', () => { + expect(() => adapter.getKnex()).toThrow('PostgreSQL not connected'); }); - it("should return Knex instance when connected", () => { + it('should return Knex instance when connected', () => { adapter.connect(); expect(adapter.getKnex()).toBeDefined(); }); }); - describe("createRepository", () => { + describe('createRepository', () => { beforeEach(() => { adapter.connect(); }); - it("should create a repository with all CRUD methods", () => { + it('should create a repository with all CRUD methods', () => { const repo = adapter.createRepository({ - table: "users", - primaryKey: "id", - columns: ["id", "name", "email"], + table: 'users', + primaryKey: 'id', + columns: ['id', 'name', 'email'], }); - expect(repo).toBeDefined(); - expect(typeof repo.create).toBe("function"); - expect(typeof repo.findById).toBe("function"); - expect(typeof repo.findAll).toBe("function"); - expect(typeof repo.findPage).toBe("function"); - expect(typeof repo.updateById).toBe("function"); - expect(typeof repo.deleteById).toBe("function"); - expect(typeof repo.count).toBe("function"); - expect(typeof repo.exists).toBe("function"); - // Bulk operations - expect(typeof repo.insertMany).toBe("function"); - expect(typeof repo.updateMany).toBe("function"); - expect(typeof repo.deleteMany).toBe("function"); - }); - - it("should use default primary key when not specified", () => { + testRepositoryMethods(repo); + }); + + it('should use default primary key when not specified', () => { const repo = adapter.createRepository({ - table: "users", + table: 'users', }); expect(repo).toBeDefined(); }); - it("should have insertMany method that returns array", async () => { - const repo = adapter.createRepository({ table: "users" }); + it('should have insertMany method that returns array', async () => { + const repo = adapter.createRepository({ table: 'users' }); // Test that insertMany returns an array (mock returns array) const result = await repo.insertMany([ - { name: "John" }, - { name: "Jane" }, + { name: 'John' }, + { name: 'Jane' }, ]); expect(Array.isArray(result)).toBe(true); }); - it("should return empty array when insertMany with empty data", async () => { - const repo = adapter.createRepository({ table: "users" }); + it('should return empty array when insertMany with empty data', async () => { + const repo = adapter.createRepository({ table: 'users' }); const result = await repo.insertMany([]); expect(result).toEqual([]); }); - it("should have updateMany method that returns count", async () => { - const repo = adapter.createRepository({ table: "users" }); + it('should have updateMany method that returns count', async () => { + const repo = adapter.createRepository({ table: 'users' }); // updateMany method exists - expect(typeof repo.updateMany).toBe("function"); + expect(typeof repo.updateMany).toBe('function'); }); - it("should have deleteMany method that returns count", async () => { - const repo = adapter.createRepository({ table: "users" }); + it('should have deleteMany method that returns count', async () => { + const repo = adapter.createRepository({ table: 'users' }); // deleteMany method exists - expect(typeof repo.deleteMany).toBe("function"); + expect(typeof repo.deleteMany).toBe('function'); }); }); - describe("withTransaction", () => { + describe('withTransaction', () => { beforeEach(() => { adapter.connect(); }); - it("should execute callback within transaction", async () => { + it('should execute callback within transaction', async () => { const mockCallback = jest.fn().mockResolvedValue({ success: true }); const result = await adapter.withTransaction(mockCallback); @@ -223,86 +216,84 @@ describe("PostgresAdapter", () => { ); }); - it("should set statement timeout in transaction", async () => { - await adapter.withTransaction(async () => "result", { timeout: 15000 }); + it('should set statement timeout in transaction', async () => { + await adapter.withTransaction(async () => 'result', { timeout: 15000 }); expect(mockTrx.raw).toHaveBeenCalledWith( - "SET LOCAL statement_timeout = 15000", + 'SET LOCAL statement_timeout = 15000', ); }); - it("should provide transaction context with createRepository", async () => { + it('should provide transaction context with createRepository', async () => { let capturedContext: PostgresTransactionContext | undefined; await adapter.withTransaction(async (ctx) => { capturedContext = ctx; - return "done"; + return 'done'; }); expect(capturedContext).toBeDefined(); - expect(capturedContext!.transaction).toBeDefined(); - expect(typeof capturedContext!.createRepository).toBe("function"); }); - it("should propagate errors from callback", async () => { - const error = new Error("Test error"); + it('should propagate errors from callback', async () => { + const error = new Error('Test error'); await expect( adapter.withTransaction(async () => { throw error; }), - ).rejects.toThrow("Test error"); + ).rejects.toThrow('Test error'); }); - it("should support isolation levels", async () => { + it('should support isolation levels', async () => { const mockTransaction = ( mockKnexInstance as unknown as { transaction: jest.Mock } ).transaction; - await adapter.withTransaction(async () => "result", { - isolationLevel: "serializable", + await adapter.withTransaction(async () => 'result', { + isolationLevel: 'serializable', }); expect(mockTransaction).toHaveBeenCalledWith(expect.any(Function), { - isolationLevel: "serializable", + isolationLevel: 'serializable', }); }); - it("should use default isolation level when not specified", async () => { + it('should use default isolation level when not specified', async () => { const mockTransaction = ( mockKnexInstance as unknown as { transaction: jest.Mock } ).transaction; - await adapter.withTransaction(async () => "result"); + await adapter.withTransaction(async () => 'result'); expect(mockTransaction).toHaveBeenCalledWith(expect.any(Function), { - isolationLevel: "read committed", + isolationLevel: 'read committed', }); }); }); - describe("healthCheck", () => { - it("should return unhealthy when not connected", async () => { + describe('healthCheck', () => { + it('should return unhealthy when not connected', async () => { const result = await adapter.healthCheck(); expect(result.healthy).toBe(false); - expect(result.type).toBe("postgres"); - expect(result.error).toBe("Not connected to PostgreSQL"); + expect(result.type).toBe('postgres'); + expect(result.error).toBe('Not connected to PostgreSQL'); expect(result.responseTimeMs).toBeGreaterThanOrEqual(0); }); - it("should have healthCheck method", () => { - expect(typeof adapter.healthCheck).toBe("function"); + it('should have healthCheck method', () => { + expect(typeof adapter.healthCheck).toBe('function'); }); - it("should return response time in result", async () => { + it('should return response time in result', async () => { const result = await adapter.healthCheck(); - expect(typeof result.responseTimeMs).toBe("number"); + expect(typeof result.responseTimeMs).toBe('number'); expect(result.responseTimeMs).toBeGreaterThanOrEqual(0); }); - it("should return healthy result when connected", async () => { + it('should return healthy result when connected', async () => { // Create a fresh adapter and set up raw mock before health check const freshAdapter = new PostgresAdapter(mockConfig); freshAdapter.connect(); @@ -311,118 +302,106 @@ describe("PostgresAdapter", () => { // that healthCheck returns something when connected const result = await freshAdapter.healthCheck(); - expect(result.type).toBe("postgres"); + expect(result.type).toBe('postgres'); expect(result.responseTimeMs).toBeGreaterThanOrEqual(0); // Note: In real tests with actual DB, this would be true // With mocks, we're just verifying the method works }); }); - describe("Soft Delete", () => { - it("should not have soft delete methods when softDelete is disabled", () => { + describe('Soft Delete', () => { + it('should not have soft delete methods when softDelete is disabled', () => { adapter.connect(); const repo = adapter.createRepository({ - table: "users", + table: 'users', softDelete: false, }); - - expect(repo.softDelete).toBeUndefined(); - expect(repo.softDeleteMany).toBeUndefined(); - expect(repo.restore).toBeUndefined(); - expect(repo.restoreMany).toBeUndefined(); - expect(repo.findAllWithDeleted).toBeUndefined(); - expect(repo.findDeleted).toBeUndefined(); + testSoftDeleteMethods(repo, false); }); - it("should have soft delete methods when softDelete is enabled", () => { + it('should have soft delete methods when softDelete is enabled', () => { adapter.connect(); const repo = adapter.createRepository({ - table: "users", + table: 'users', softDelete: true, }); - - expect(typeof repo.softDelete).toBe("function"); - expect(typeof repo.softDeleteMany).toBe("function"); - expect(typeof repo.restore).toBe("function"); - expect(typeof repo.restoreMany).toBe("function"); - expect(typeof repo.findAllWithDeleted).toBe("function"); - expect(typeof repo.findDeleted).toBe("function"); + testSoftDeleteMethods(repo, true); }); - it("should soft delete a record by setting deleted_at", async () => { + it('should soft delete a record by setting deleted_at', async () => { adapter.connect(); const repo = adapter.createRepository({ - table: "users", + table: 'users', softDelete: true, }); - await repo.softDelete!("123"); + await repo.softDelete?.('123'); // Verify that update was called (soft delete sets timestamp instead of deleting) const knexTableMock = mockKnexInstance as unknown as jest.Mock; - expect(knexTableMock).toHaveBeenCalledWith("users"); + expect(knexTableMock).toHaveBeenCalledWith('users'); }); - it("should use custom softDeleteField", () => { + it('should use custom softDeleteField', () => { adapter.connect(); const repo = adapter.createRepository({ - table: "users", + table: 'users', softDelete: true, - softDeleteField: "removed_at", + softDeleteField: 'removed_at', }); // Verify soft delete methods are available with custom field - expect(typeof repo.softDelete).toBe("function"); - expect(typeof repo.restore).toBe("function"); + expect(typeof repo.softDelete).toBe('function'); + expect(typeof repo.restore).toBe('function'); }); - it("should provide restore method when soft delete is enabled", () => { + it('should provide restore method when soft delete is enabled', () => { adapter.connect(); const repo = adapter.createRepository({ - table: "users", + table: 'users', softDelete: true, }); - expect(typeof repo.restore).toBe("function"); - expect(typeof repo.restoreMany).toBe("function"); + expect(typeof repo.restore).toBe('function'); + expect(typeof repo.restoreMany).toBe('function'); }); - it("should provide findDeleted method when soft delete is enabled", () => { + it('should provide findDeleted method when soft delete is enabled', () => { adapter.connect(); const repo = adapter.createRepository({ - table: "users", + table: 'users', softDelete: true, }); - expect(typeof repo.findDeleted).toBe("function"); + expect(typeof repo.findDeleted).toBe('function'); }); - it("should provide findAllWithDeleted method when soft delete is enabled", () => { + it('should provide findAllWithDeleted method when soft delete is enabled', () => { adapter.connect(); const repo = adapter.createRepository({ - table: "users", + table: 'users', softDelete: true, }); - expect(typeof repo.findAllWithDeleted).toBe("function"); + expect(typeof repo.findAllWithDeleted).toBe('function'); }); - it("should provide softDeleteMany method when soft delete is enabled", () => { + it('should provide softDeleteMany method when soft delete is enabled', () => { adapter.connect(); const repo = adapter.createRepository({ - table: "users", + table: 'users', softDelete: true, }); - expect(typeof repo.softDeleteMany).toBe("function"); + expect(typeof repo.softDeleteMany).toBe('function'); }); - it("should have all soft delete methods defined correctly", () => { + it('should have all soft delete methods defined correctly', () => { adapter.connect(); const repo = adapter.createRepository({ - table: "users", + table: 'users', softDelete: true, - columns: ["id", "name", "deleted_at"], + columns: ['id', 'name', 'deleted_at'], }); // All soft delete methods should be defined @@ -434,77 +413,77 @@ describe("PostgresAdapter", () => { expect(repo.findDeleted).toBeDefined(); // They should all be functions - expect(typeof repo.softDelete).toBe("function"); - expect(typeof repo.softDeleteMany).toBe("function"); - expect(typeof repo.restore).toBe("function"); - expect(typeof repo.restoreMany).toBe("function"); - expect(typeof repo.findAllWithDeleted).toBe("function"); - expect(typeof repo.findDeleted).toBe("function"); + expect(typeof repo.softDelete).toBe('function'); + expect(typeof repo.softDeleteMany).toBe('function'); + expect(typeof repo.restore).toBe('function'); + expect(typeof repo.restoreMany).toBe('function'); + expect(typeof repo.findAllWithDeleted).toBe('function'); + expect(typeof repo.findDeleted).toBe('function'); }); }); - describe("Timestamps", () => { - it("should accept timestamps configuration option", () => { + describe('Timestamps', () => { + it('should accept timestamps configuration option', () => { adapter.connect(); const repo = adapter.createRepository({ - table: "users", + table: 'users', timestamps: true, }); expect(repo).toBeDefined(); - expect(typeof repo.create).toBe("function"); + expect(typeof repo.create).toBe('function'); }); - it("should accept custom timestamp field names", () => { + it('should accept custom timestamp field names', () => { adapter.connect(); const repo = adapter.createRepository({ - table: "users", + table: 'users', timestamps: true, - createdAtField: "date_created", - updatedAtField: "date_modified", + createdAtField: 'date_created', + updatedAtField: 'date_modified', }); expect(repo).toBeDefined(); }); - it("should have all CRUD methods when timestamps enabled", () => { + it('should have all CRUD methods when timestamps enabled', () => { adapter.connect(); const repo = adapter.createRepository({ - table: "users", + table: 'users', timestamps: true, }); - expect(typeof repo.create).toBe("function"); - expect(typeof repo.findById).toBe("function"); - expect(typeof repo.findAll).toBe("function"); - expect(typeof repo.findPage).toBe("function"); - expect(typeof repo.updateById).toBe("function"); - expect(typeof repo.deleteById).toBe("function"); - expect(typeof repo.insertMany).toBe("function"); - expect(typeof repo.updateMany).toBe("function"); - expect(typeof repo.deleteMany).toBe("function"); + expect(typeof repo.create).toBe('function'); + expect(typeof repo.findById).toBe('function'); + expect(typeof repo.findAll).toBe('function'); + expect(typeof repo.findPage).toBe('function'); + expect(typeof repo.updateById).toBe('function'); + expect(typeof repo.deleteById).toBe('function'); + expect(typeof repo.insertMany).toBe('function'); + expect(typeof repo.updateMany).toBe('function'); + expect(typeof repo.deleteMany).toBe('function'); }); - it("should work with both timestamps and soft delete enabled", () => { + it('should work with both timestamps and soft delete enabled', () => { adapter.connect(); const repo = adapter.createRepository({ - table: "users", + table: 'users', timestamps: true, softDelete: true, - columns: ["id", "name", "created_at", "updated_at", "deleted_at"], + columns: ['id', 'name', 'created_at', 'updated_at', 'deleted_at'], }); expect(repo).toBeDefined(); - expect(typeof repo.create).toBe("function"); - expect(typeof repo.softDelete).toBe("function"); - expect(typeof repo.restore).toBe("function"); + expect(typeof repo.create).toBe('function'); + expect(typeof repo.softDelete).toBe('function'); + expect(typeof repo.restore).toBe('function'); }); - it("should use default field names when not specified", () => { + it('should use default field names when not specified', () => { adapter.connect(); // Default: created_at, updated_at for PostgreSQL const repo = adapter.createRepository({ - table: "users", + table: 'users', timestamps: true, }); @@ -512,10 +491,10 @@ describe("PostgresAdapter", () => { }); }); - describe("Advanced Query Operations", () => { - describe("findOne", () => { - it("should find one row by filter", async () => { - const mockRow = { id: 1, name: "John", email: "john@example.com" }; + describe('Advanced Query Operations', () => { + describe('findOne', () => { + it('should find one row by filter', async () => { + const mockRow = { id: 1, name: 'John', email: 'john@example.com' }; const mockQb = { select: jest.fn().mockReturnThis(), where: jest.fn().mockReturnThis(), @@ -524,46 +503,42 @@ describe("PostgresAdapter", () => { }; const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + adapter['knexInstance'] = mockKnex; const repo = adapter.createRepository({ - table: "users", - columns: ["id", "name", "email"], + table: 'users', + columns: ['id', 'name', 'email'], }); - const result = await repo.findOne({ email: "john@example.com" }); + const result = await repo.findOne({ email: 'john@example.com' }); - expect(mockQb.select).toHaveBeenCalledWith("*"); - expect(mockQb.where).toHaveBeenCalledWith("email", "john@example.com"); + expect(mockQb.select).toHaveBeenCalledWith('*'); + expect(mockQb.where).toHaveBeenCalledWith('email', 'john@example.com'); expect(result).toEqual(mockRow); }); - it("should return null when findOne finds nothing", async () => { - const mockQb = { - select: jest.fn().mockReturnThis(), - where: jest.fn().mockReturnThis(), + it('should return null when findOne finds nothing', async () => { + const { mockKnex } = createMockKnex({ first: jest.fn().mockResolvedValue(undefined), - }; - - const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + }); + adapter['knexInstance'] = mockKnex as unknown as Knex; const repo = adapter.createRepository({ - table: "users", - columns: ["email"], + table: 'users', + columns: ['email'], }); - const result = await repo.findOne({ email: "nonexistent@example.com" }); + const result = await repo.findOne({ email: 'nonexistent@example.com' }); expect(result).toBeNull(); }); }); - describe("upsert", () => { - it("should update existing row", async () => { - const existingRow = { id: 1, name: "John", email: "john@example.com" }; + describe('upsert', () => { + it('should update existing row', async () => { + const existingRow = { id: 1, name: 'John', email: 'john@example.com' }; const updatedRow = { id: 1, - name: "John Updated", - email: "john@example.com", + name: 'John Updated', + email: 'john@example.com', }; const mockSelectQb = { @@ -584,22 +559,22 @@ describe("PostgresAdapter", () => { return callCount === 1 ? mockSelectQb : mockUpdateQb; }) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + adapter['knexInstance'] = mockKnex; const repo = adapter.createRepository({ - table: "users", - columns: ["id", "name", "email"], + table: 'users', + columns: ['id', 'name', 'email'], }); const result = await repo.upsert( - { email: "john@example.com" }, - { name: "John Updated" }, + { email: 'john@example.com' }, + { name: 'John Updated' }, ); expect(result).toEqual(updatedRow); }); - it("should insert new row when not exists", async () => { - const newRow = { id: 1, name: "New User", email: "new@example.com" }; + it('should insert new row when not exists', async () => { + const newRow = { id: 1, name: 'New User', email: 'new@example.com' }; const mockSelectQb = { select: jest.fn().mockReturnThis(), @@ -618,25 +593,25 @@ describe("PostgresAdapter", () => { return callCount === 1 ? mockSelectQb : mockInsertQb; }) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + adapter['knexInstance'] = mockKnex; const repo = adapter.createRepository({ - table: "users", - columns: ["id", "name", "email"], + table: 'users', + columns: ['id', 'name', 'email'], }); const result = await repo.upsert( - { email: "new@example.com" }, - { name: "New User" }, + { email: 'new@example.com' }, + { name: 'New User' }, ); expect(result).toEqual(newRow); }); }); - describe("distinct", () => { - it("should return distinct values for a column", async () => { - const mockRows = [{ status: "active" }, { status: "pending" }]; - const mockQb = { + describe('distinct', () => { + it('should return distinct values for a column', async () => { + const mockRows = [{ status: 'active' }, { status: 'pending' }]; + const { mockKnex, mockQb } = createMockKnex({ distinct: jest.fn().mockReturnThis(), modify: jest.fn().mockImplementation(function ( this: unknown, @@ -645,28 +620,25 @@ describe("PostgresAdapter", () => { fn(this); return Promise.resolve(mockRows); }), - where: jest.fn().mockReturnThis(), - }; - - const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + }); + adapter['knexInstance'] = mockKnex as unknown as Knex; const repo = adapter.createRepository({ - table: "users", - columns: ["status"], + table: 'users', + columns: ['status'], }); - const result = await repo.distinct("status"); + const result = await repo.distinct('status'); - expect(mockQb.distinct).toHaveBeenCalledWith("status"); - expect(result).toEqual(["active", "pending"]); + expect(mockQb.distinct).toHaveBeenCalledWith('status'); + expect(result).toEqual(['active', 'pending']); }); }); - describe("select", () => { - it("should return rows with only selected columns", async () => { + describe('select', () => { + it('should return rows with only selected columns', async () => { const mockRows = [ - { name: "John", email: "john@example.com" }, - { name: "Jane", email: "jane@example.com" }, + { name: 'John', email: 'john@example.com' }, + { name: 'Jane', email: 'jane@example.com' }, ]; const mockQb = { select: jest.fn().mockReturnThis(), @@ -681,137 +653,123 @@ describe("PostgresAdapter", () => { }; const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + adapter['knexInstance'] = mockKnex; const repo = adapter.createRepository({ - table: "users", - columns: ["name", "email", "active"], + table: 'users', + columns: ['name', 'email', 'active'], }); - const result = await repo.select({ active: true }, ["name", "email"]); + const result = await repo.select({ active: true }, ['name', 'email']); - expect(mockQb.select).toHaveBeenCalledWith(["name", "email"]); + expect(mockQb.select).toHaveBeenCalledWith(['name', 'email']); expect(result).toEqual(mockRows); }); }); }); - describe("Repository Hooks", () => { - it("should call beforeCreate hook and use modified data", async () => { - const mockRow = { id: 1, name: "MODIFIED" }; - const mockQb = { - insert: jest.fn().mockReturnThis(), + describe('Repository Hooks', () => { + it('should call beforeCreate hook and use modified data', async () => { + const mockRow = { id: 1, name: 'MODIFIED' }; + const { mockKnex, mockQb } = createMockKnex({ returning: jest.fn().mockResolvedValue([mockRow]), - }; - - const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + }); + adapter['knexInstance'] = mockKnex as unknown as Knex; const beforeCreate = jest.fn().mockImplementation((context) => ({ ...context.data, - name: "MODIFIED", + name: 'MODIFIED', })); const repo = adapter.createRepository({ - table: "users", + table: 'users', hooks: { beforeCreate }, }); - await repo.create({ name: "Original" }); + await repo.create({ name: 'Original' }); expect(beforeCreate).toHaveBeenCalledWith({ - data: { name: "Original" }, - operation: "create", + data: { name: 'Original' }, + operation: 'create', isBulk: false, }); expect(mockQb.insert).toHaveBeenCalledWith( - expect.objectContaining({ name: "MODIFIED" }), + expect.objectContaining({ name: 'MODIFIED' }), ); }); - it("should call afterCreate hook with created entity", async () => { - const mockRow = { id: 1, name: "Test" }; + it('should call afterCreate hook with created entity', async () => { + const mockRow = { id: 1, name: 'Test' }; const mockQb = { insert: jest.fn().mockReturnThis(), returning: jest.fn().mockResolvedValue([mockRow]), }; const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + adapter['knexInstance'] = mockKnex; const afterCreate = jest.fn(); const repo = adapter.createRepository({ - table: "users", + table: 'users', hooks: { afterCreate }, }); - await repo.create({ name: "Test" }); + await repo.create({ name: 'Test' }); - expect(afterCreate).toHaveBeenCalledWith({ id: 1, name: "Test" }); + expect(afterCreate).toHaveBeenCalledWith({ id: 1, name: 'Test' }); }); - it("should call beforeUpdate hook and use modified data", async () => { - const mockRow = { id: 1, name: "UPDATED" }; - const mockQb = { - where: jest.fn().mockReturnThis(), - update: jest.fn().mockReturnThis(), + it('should call beforeUpdate hook and use modified data', async () => { + const mockRow = { id: 1, name: 'UPDATED' }; + const { mockKnex } = createMockKnex({ returning: jest.fn().mockResolvedValue([mockRow]), - }; - - const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + }); + adapter['knexInstance'] = mockKnex as unknown as Knex; const beforeUpdate = jest.fn().mockImplementation((context) => ({ ...context.data, - name: "UPDATED", + name: 'UPDATED', })); const repo = adapter.createRepository({ - table: "users", + table: 'users', hooks: { beforeUpdate }, }); - await repo.updateById(1, { name: "Original" }); + await repo.updateById(1, { name: 'Original' }); expect(beforeUpdate).toHaveBeenCalledWith({ - data: { name: "Original" }, - operation: "update", + data: { name: 'Original' }, + operation: 'update', isBulk: false, }); }); - it("should call afterUpdate hook with updated entity", async () => { - const mockRow = { id: 1, name: "Updated" }; - const mockQb = { - where: jest.fn().mockReturnThis(), - update: jest.fn().mockReturnThis(), + it('should call afterUpdate hook with updated entity', async () => { + const mockRow = { id: 1, name: 'Updated' }; + const { mockKnex } = createMockKnex({ returning: jest.fn().mockResolvedValue([mockRow]), - }; - - const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + }); + adapter['knexInstance'] = mockKnex as unknown as Knex; const afterUpdate = jest.fn(); const repo = adapter.createRepository({ - table: "users", + table: 'users', hooks: { afterUpdate }, }); - await repo.updateById(1, { name: "Updated" }); + await repo.updateById(1, { name: 'Updated' }); expect(afterUpdate).toHaveBeenCalledWith(mockRow); }); - it("should call beforeDelete hook with entity id", async () => { - const mockQb = { - where: jest.fn().mockReturnThis(), + it('should call beforeDelete hook with entity id', async () => { + const { mockKnex } = createMockKnex({ delete: jest.fn().mockResolvedValue(1), - }; - - const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + }); + adapter['knexInstance'] = mockKnex as unknown as Knex; const beforeDelete = jest.fn(); const repo = adapter.createRepository({ - table: "users", + table: 'users', hooks: { beforeDelete }, }); await repo.deleteById(1); @@ -819,19 +777,16 @@ describe("PostgresAdapter", () => { expect(beforeDelete).toHaveBeenCalledWith(1); }); - it("should call afterDelete hook with success status", async () => { - const mockQb = { - where: jest.fn().mockReturnThis(), + it('should call afterDelete hook with success status', async () => { + const { mockKnex } = createMockKnex({ delete: jest.fn().mockResolvedValue(1), - }; - - const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + }); + adapter['knexInstance'] = mockKnex as unknown as Knex; const afterDelete = jest.fn(); const repo = adapter.createRepository({ - table: "users", + table: 'users', hooks: { afterDelete }, }); await repo.deleteById(1); @@ -839,19 +794,16 @@ describe("PostgresAdapter", () => { expect(afterDelete).toHaveBeenCalledWith(true); }); - it("should call afterDelete with false when entity not found", async () => { - const mockQb = { - where: jest.fn().mockReturnThis(), + it('should call afterDelete with false when entity not found', async () => { + const { mockKnex } = createMockKnex({ delete: jest.fn().mockResolvedValue(0), - }; - - const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + }); + adapter['knexInstance'] = mockKnex as unknown as Knex; const afterDelete = jest.fn(); const repo = adapter.createRepository({ - table: "users", + table: 'users', hooks: { afterDelete }, }); await repo.deleteById(999); @@ -859,7 +811,7 @@ describe("PostgresAdapter", () => { expect(afterDelete).toHaveBeenCalledWith(false); }); - it("should apply filters and sort in findPage", async () => { + it('should apply filters and sort in findPage', async () => { const mockQb = { select: jest.fn().mockReturnThis(), where: jest.fn().mockReturnThis(), @@ -875,27 +827,27 @@ describe("PostgresAdapter", () => { offset: jest.fn().mockResolvedValue([{ id: 1 }]), }; const mockCount = { - modify: jest.fn().mockResolvedValue([{ count: "2" }]), + modify: jest.fn().mockResolvedValue([{ count: '2' }]), }; (mockQb as unknown as { count: jest.Mock }).count = jest .fn() .mockReturnValue(mockCount); const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + adapter['knexInstance'] = mockKnex; const repo = adapter.createRepository({ - table: "users", - columns: ["id", "name", "email", "status", "active"], + table: 'users', + columns: ['id', 'name', 'email', 'status', 'active'], }); const result = await repo.findPage({ filter: { - status: { in: ["active"] }, - email: { like: "%@test.com" }, + status: { in: ['active'] }, + email: { like: '%@test.com' }, active: { isNull: true }, }, - sort: "-name", + sort: '-name', page: 2, limit: 1, }); @@ -904,96 +856,84 @@ describe("PostgresAdapter", () => { expect(mockQb.whereIn).toHaveBeenCalled(); expect(mockQb.whereILike).toHaveBeenCalled(); expect(mockQb.whereNull).toHaveBeenCalled(); - expect(mockQb.orderBy).toHaveBeenCalledWith("name", "desc"); + expect(mockQb.orderBy).toHaveBeenCalledWith('name', 'desc'); }); - it("should upsert existing records", async () => { - const mockQb = { - select: jest.fn().mockReturnThis(), - where: jest.fn().mockReturnThis(), + it('should upsert existing records', async () => { + const { mockKnex, mockQb } = createMockKnex({ first: jest.fn().mockResolvedValue({ id: 1 }), - update: jest.fn().mockReturnThis(), - returning: jest.fn().mockResolvedValue([{ id: 1, name: "Updated" }]), - }; - const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + returning: jest.fn().mockResolvedValue([{ id: 1, name: 'Updated' }]), + }); + adapter['knexInstance'] = mockKnex as unknown as Knex; const repo = adapter.createRepository({ - table: "users", - columns: ["id", "name", "email", "status", "active"], + table: 'users', + columns: ['id', 'name', 'email', 'status', 'active'], timestamps: true, }); - const result = await repo.upsert({ id: 1 }, { name: "Updated" }); + const result = await repo.upsert({ id: 1 }, { name: 'Updated' }); - expect(result).toEqual({ id: 1, name: "Updated" }); + expect(result).toEqual({ id: 1, name: 'Updated' }); expect(mockQb.update).toHaveBeenCalled(); }); - it("should upsert new records when none found", async () => { - const mockQb = { - select: jest.fn().mockReturnThis(), - where: jest.fn().mockReturnThis(), + it('should upsert new records when none found', async () => { + const { mockKnex, mockQb } = createMockKnex({ first: jest.fn().mockResolvedValue(undefined), - insert: jest.fn().mockReturnThis(), - returning: jest.fn().mockResolvedValue([{ id: 2, name: "New" }]), - }; - const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + returning: jest.fn().mockResolvedValue([{ id: 2, name: 'New' }]), + }); + adapter['knexInstance'] = mockKnex as unknown as Knex; const repo = adapter.createRepository({ - table: "users", - columns: ["id", "name", "email", "status", "active"], + table: 'users', + columns: ['id', 'name', 'email', 'status', 'active'], timestamps: true, }); - const result = await repo.upsert({ email: "a@b.com" }, { name: "New" }); + const result = await repo.upsert({ email: 'a@b.com' }, { name: 'New' }); - expect(result).toEqual({ id: 2, name: "New" }); + expect(result).toEqual({ id: 2, name: 'New' }); expect(mockQb.insert).toHaveBeenCalled(); }); - it("should return distinct values", async () => { - const rows = [{ email: "a@b.com" }, { email: "b@b.com" }]; - const mockQb = { - distinct: jest.fn().mockReturnThis(), + it('should return distinct values', async () => { + const rows = [{ email: 'a@b.com' }, { email: 'b@b.com' }]; + const { mockKnex, mockQb } = createMockKnex({ modify: jest.fn().mockResolvedValue(rows), - }; - const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + }); + adapter['knexInstance'] = mockKnex as unknown as Knex; const repo = adapter.createRepository({ - table: "users", - columns: ["id", "name", "email", "status", "active"], + table: 'users', + columns: ['id', 'name', 'email', 'status', 'active'], }); - const result = await repo.distinct("email"); + const result = await repo.distinct('email'); - expect(mockQb.distinct).toHaveBeenCalledWith("email"); - expect(result).toEqual(["a@b.com", "b@b.com"]); + expect(mockQb.distinct).toHaveBeenCalledWith('email'); + expect(result).toEqual(['a@b.com', 'b@b.com']); }); - it("should select projected fields", async () => { - const rows = [{ name: "John" }]; - const mockQb = { - select: jest.fn().mockReturnThis(), + it('should select projected fields', async () => { + const rows = [{ name: 'John' }]; + const { mockKnex, mockQb } = createMockKnex({ modify: jest.fn().mockResolvedValue(rows), - }; - const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + }); + adapter['knexInstance'] = mockKnex as unknown as Knex; const repo = adapter.createRepository({ - table: "users", - columns: ["id", "name", "email", "status", "active"], + table: 'users', + columns: ['id', 'name', 'email', 'status', 'active'], }); - const result = await repo.select({}, ["name"]); + const result = await repo.select({}, ['name']); - expect(mockQb.select).toHaveBeenCalledWith(["name"]); - expect(result).toEqual([{ name: "John" }]); + expect(mockQb.select).toHaveBeenCalledWith(['name']); + expect(result).toEqual([{ name: 'John' }]); }); - it("should soft delete and restore records when enabled", async () => { + it('should soft delete and restore records when enabled', async () => { const updateMock = jest.fn().mockResolvedValueOnce(1).mockReturnThis(); const mockQb = { select: jest.fn().mockReturnThis(), @@ -1001,14 +941,14 @@ describe("PostgresAdapter", () => { whereNull: jest.fn().mockReturnThis(), whereNotNull: jest.fn().mockReturnThis(), update: updateMock, - returning: jest.fn().mockResolvedValue([{ id: 1, name: "Restored" }]), + returning: jest.fn().mockResolvedValue([{ id: 1, name: 'Restored' }]), }; const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + adapter['knexInstance'] = mockKnex; const repo = adapter.createRepository({ - table: "users", - columns: ["id", "name", "email", "status", "active", "deleted_at"], + table: 'users', + columns: ['id', 'name', 'email', 'status', 'active', 'deleted_at'], softDelete: true, }); @@ -1016,23 +956,23 @@ describe("PostgresAdapter", () => { const restored = await repo.restore?.(1); expect(deleted).toBe(true); - expect(restored).toEqual({ id: 1, name: "Restored" }); + expect(restored).toEqual({ id: 1, name: 'Restored' }); expect(mockQb.update).toHaveBeenCalled(); }); }); - describe("healthCheck", () => { - it("should return unhealthy when not connected", async () => { + describe('healthCheck', () => { + it('should return unhealthy when not connected', async () => { const result = await adapter.healthCheck(); expect(result.healthy).toBe(false); - expect(result.error).toBe("Not connected to PostgreSQL"); - expect(result.type).toBe("postgres"); + expect(result.error).toBe('Not connected to PostgreSQL'); + expect(result.type).toBe('postgres'); }); - it("should return healthy when connected", async () => { + it('should return healthy when connected', async () => { const mockRaw = jest.fn().mockResolvedValue({ - rows: [{ version: "PostgreSQL 14.0", current_database: "testdb" }], + rows: [{ version: 'PostgreSQL 14.0', current_database: 'testdb' }], }); const mockKnex = { raw: mockRaw, @@ -1044,58 +984,58 @@ describe("PostgresAdapter", () => { }, } as unknown as Knex; - adapter["knexInstance"] = mockKnex; + adapter['knexInstance'] = mockKnex; const result = await adapter.healthCheck(); expect(result.healthy).toBe(true); - expect(result.type).toBe("postgres"); + expect(result.type).toBe('postgres'); expect(result.details?.activeConnections).toBe(2); expect(result.details?.poolSize).toBe(10); }); - it("should handle error during health check", async () => { + it('should handle error during health check', async () => { const mockRaw = jest .fn() - .mockRejectedValue(new Error("Connection failed")); + .mockRejectedValue(new Error('Connection failed')); const mockKnex = { raw: mockRaw, } as unknown as Knex; - adapter["knexInstance"] = mockKnex; + adapter['knexInstance'] = mockKnex; const result = await adapter.healthCheck(); expect(result.healthy).toBe(false); - expect(result.error).toBe("Connection failed"); + expect(result.error).toBe('Connection failed'); }); }); - describe("withTransaction", () => { - it("should execute callback within transaction successfully", async () => { + describe('withTransaction', () => { + it('should execute callback within transaction successfully', async () => { adapter.connect(); const callback = jest.fn(async (ctx: PostgresTransactionContext) => { expect(ctx.transaction).toBeDefined(); expect(ctx.createRepository).toBeDefined(); - return { result: "success" }; + return { result: 'success' }; }); const result = await adapter.withTransaction(callback); - expect(result).toEqual({ result: "success" }); + expect(result).toEqual({ result: 'success' }); expect(callback).toHaveBeenCalled(); expect(mockTrx.raw).toHaveBeenCalledWith( - expect.stringContaining("statement_timeout"), + expect.stringContaining('statement_timeout'), ); }); - it("should retry on serialization failure", async () => { + it('should retry on serialization failure', async () => { adapter.connect(); const serializationError = { - code: "40001", - message: "Serialization failure", + code: '40001', + message: 'Serialization failure', }; let attempt = 0; @@ -1115,20 +1055,20 @@ describe("PostgresAdapter", () => { (mockKnexInstance as unknown as { transaction: jest.Mock }).transaction = mockTransaction; - const callback = jest.fn(async () => ({ result: "success after retry" })); + const callback = jest.fn(async () => ({ result: 'success after retry' })); const result = await adapter.withTransaction(callback, { retries: 1 }); - expect(result).toEqual({ result: "success after retry" }); + expect(result).toEqual({ result: 'success after retry' }); expect(mockTransaction).toHaveBeenCalledTimes(2); }); - it("should retry on deadlock", async () => { + it('should retry on deadlock', async () => { adapter.connect(); const deadlockError = { - code: "40P01", - message: "Deadlock detected", + code: '40P01', + message: 'Deadlock detected', }; let attempt = 0; @@ -1148,19 +1088,19 @@ describe("PostgresAdapter", () => { (mockKnexInstance as unknown as { transaction: jest.Mock }).transaction = mockTransaction; - const callback = jest.fn(async () => ({ result: "success after retry" })); + const callback = jest.fn(async () => ({ result: 'success after retry' })); const result = await adapter.withTransaction(callback, { retries: 1 }); - expect(result).toEqual({ result: "success after retry" }); + expect(result).toEqual({ result: 'success after retry' }); }); - it("should throw after exhausting retries", async () => { + it('should throw after exhausting retries', async () => { adapter.connect(); const persistentError = { - code: "40001", - message: "Persistent serialization failure", + code: '40001', + message: 'Persistent serialization failure', }; const mockTransaction = jest.fn(async () => { @@ -1170,7 +1110,7 @@ describe("PostgresAdapter", () => { (mockKnexInstance as unknown as { transaction: jest.Mock }).transaction = mockTransaction; - const callback = jest.fn(async () => ({ result: "should not reach" })); + const callback = jest.fn(async () => ({ result: 'should not reach' })); await expect( adapter.withTransaction(callback, { retries: 2 }), @@ -1179,10 +1119,10 @@ describe("PostgresAdapter", () => { expect(mockTransaction).toHaveBeenCalledTimes(3); // initial + 2 retries }); - it("should handle all retryable error codes", async () => { + it('should handle all retryable error codes', async () => { adapter.connect(); - const retryableCodes = ["40001", "40P01", "55P03", "57P01", "57014"]; + const retryableCodes = ['40001', '40P01', '55P03', '57P01', '57014']; for (const code of retryableCodes) { jest.clearAllMocks(); @@ -1195,7 +1135,11 @@ describe("PostgresAdapter", () => { ) => { attempt++; if (attempt === 1) { - throw { code, message: `Error code ${code}` }; + const error = new Error(`Error code ${code}`) as Error & { + code: string; + }; + error.code = code; + throw error; } return callback(mockTrx); }, @@ -1212,7 +1156,7 @@ describe("PostgresAdapter", () => { } }); - it("should use specified isolation level", async () => { + it('should use specified isolation level', async () => { adapter.connect(); const mockTransaction = jest.fn( @@ -1220,7 +1164,7 @@ describe("PostgresAdapter", () => { callback: (trx: typeof mockTrx) => Promise, options?: { isolationLevel?: string }, ) => { - expect(options?.isolationLevel).toBe("serializable"); + expect(options?.isolationLevel).toBe('serializable'); return callback(mockTrx); }, ); @@ -1228,18 +1172,18 @@ describe("PostgresAdapter", () => { (mockKnexInstance as unknown as { transaction: jest.Mock }).transaction = mockTransaction; - const callback = jest.fn(async () => ({ result: "success" })); + const callback = jest.fn(async () => ({ result: 'success' })); await adapter.withTransaction(callback, { - isolationLevel: "serializable", + isolationLevel: 'serializable', }); expect(mockTransaction).toHaveBeenCalled(); }); }); - describe("complex filter operations", () => { - it("should apply all filter operators", async () => { + describe('complex filter operations', () => { + it('should apply all filter operators', async () => { const mockQb = { select: jest.fn().mockReturnThis(), where: jest.fn().mockReturnThis(), @@ -1252,47 +1196,47 @@ describe("PostgresAdapter", () => { first: jest.fn().mockResolvedValue({ id: 1 }), }; const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + adapter['knexInstance'] = mockKnex; const repo = adapter.createRepository({ - table: "users", - columns: ["id", "name", "email", "status", "active"], + table: 'users', + columns: ['id', 'name', 'email', 'status', 'active'], }); await repo.findOne({ id: { gt: 10, lt: 100 }, - status: { ne: "deleted" }, - name: { like: "John%" }, - email: { in: ["a@b.com", "c@d.com"] }, - }); - - expect(mockQb.where).toHaveBeenCalledWith("id", ">", 10); - expect(mockQb.where).toHaveBeenCalledWith("id", "<", 100); - expect(mockQb.whereNot).toHaveBeenCalledWith("status", "deleted"); - expect(mockQb.whereILike).toHaveBeenCalledWith("name", "John%"); - expect(mockQb.whereIn).toHaveBeenCalledWith("email", [ - "a@b.com", - "c@d.com", + status: { ne: 'deleted' }, + name: { like: 'John%' }, + email: { in: ['a@b.com', 'c@d.com'] }, + }); + + expect(mockQb.where).toHaveBeenCalledWith('id', '>', 10); + expect(mockQb.where).toHaveBeenCalledWith('id', '<', 100); + expect(mockQb.whereNot).toHaveBeenCalledWith('status', 'deleted'); + expect(mockQb.whereILike).toHaveBeenCalledWith('name', 'John%'); + expect(mockQb.whereIn).toHaveBeenCalledWith('email', [ + 'a@b.com', + 'c@d.com', ]); }); - it("should reject non-allowed fields", async () => { + it('should reject non-allowed fields', async () => { const mockKnex = jest.fn(() => ({ select: jest.fn().mockReturnThis(), })) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + adapter['knexInstance'] = mockKnex; const repo = adapter.createRepository({ - table: "users", - columns: ["id", "name"], + table: 'users', + columns: ['id', 'name'], }); - await expect(repo.findOne({ email: "test@test.com" })).rejects.toThrow( + await expect(repo.findOne({ email: 'test@test.com' })).rejects.toThrow( 'Field "email" is not allowed', ); }); - it("should support string sort format", async () => { + it('should support string sort format', async () => { const mockQb = { select: jest.fn().mockReturnThis(), count: jest.fn().mockReturnThis(), @@ -1304,26 +1248,26 @@ describe("PostgresAdapter", () => { }; const countQb = { count: jest.fn().mockReturnThis(), - modify: jest.fn().mockResolvedValue([{ count: "10" }]), + modify: jest.fn().mockResolvedValue([{ count: '10' }]), }; const mockKnex = jest.fn((tableName: string) => - tableName === "users" ? mockQb : countQb, + tableName === 'users' ? mockQb : countQb, ) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + adapter['knexInstance'] = mockKnex; const repo = adapter.createRepository({ - table: "users", - columns: ["id", "name", "email"], + table: 'users', + columns: ['id', 'name', 'email'], }); - await repo.findPage({ sort: "-name,+email" }); + await repo.findPage({ sort: '-name,+email' }); - expect(mockQb.orderBy).toHaveBeenCalledWith("name", "desc"); - expect(mockQb.orderBy).toHaveBeenCalledWith("email", "asc"); + expect(mockQb.orderBy).toHaveBeenCalledWith('name', 'desc'); + expect(mockQb.orderBy).toHaveBeenCalledWith('email', 'asc'); }); - it("should support object sort format", async () => { + it('should support object sort format', async () => { const mockQb = { select: jest.fn().mockReturnThis(), count: jest.fn().mockReturnThis(), @@ -1335,28 +1279,28 @@ describe("PostgresAdapter", () => { }; const countQb = { count: jest.fn().mockReturnThis(), - modify: jest.fn().mockResolvedValue([{ count: "10" }]), + modify: jest.fn().mockResolvedValue([{ count: '10' }]), }; const mockKnex = jest.fn((tableName: string) => - tableName === "users" ? mockQb : countQb, + tableName === 'users' ? mockQb : countQb, ) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + adapter['knexInstance'] = mockKnex; const repo = adapter.createRepository({ - table: "users", - columns: ["id", "name", "email"], + table: 'users', + columns: ['id', 'name', 'email'], }); - await repo.findPage({ sort: { name: -1, email: "asc" } }); + await repo.findPage({ sort: { name: -1, email: 'asc' } }); - expect(mockQb.orderBy).toHaveBeenCalledWith("name", "desc"); - expect(mockQb.orderBy).toHaveBeenCalledWith("email", "asc"); + expect(mockQb.orderBy).toHaveBeenCalledWith('name', 'desc'); + expect(mockQb.orderBy).toHaveBeenCalledWith('email', 'asc'); }); }); - describe("hook execution", () => { - it("should execute beforeCreate and afterCreate hooks", async () => { + describe('hook execution', () => { + it('should execute beforeCreate and afterCreate hooks', async () => { const beforeCreate = jest.fn(async ({ data }) => ({ ...data, modified: true, @@ -1367,23 +1311,23 @@ describe("PostgresAdapter", () => { insert: jest.fn().mockReturnThis(), returning: jest .fn() - .mockResolvedValue([{ id: 1, name: "test", modified: true }]), + .mockResolvedValue([{ id: 1, name: 'test', modified: true }]), }; const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + adapter['knexInstance'] = mockKnex; const repo = adapter.createRepository({ - table: "users", + table: 'users', hooks: { beforeCreate, afterCreate }, }); - const result = await repo.create({ name: "test" } as Partial); + const result = await repo.create({ name: 'test' } as Partial); expect(beforeCreate).toHaveBeenCalled(); expect(afterCreate).toHaveBeenCalledWith(result); }); - it("should execute beforeUpdate and afterUpdate hooks", async () => { + it('should execute beforeUpdate and afterUpdate hooks', async () => { const beforeUpdate = jest.fn(async ({ data }) => ({ ...data, updated: true, @@ -1397,22 +1341,22 @@ describe("PostgresAdapter", () => { returning: jest.fn().mockResolvedValue([{ id: 1, updated: true }]), }; const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + adapter['knexInstance'] = mockKnex; const repo = adapter.createRepository({ - table: "users", + table: 'users', hooks: { beforeUpdate, afterUpdate }, }); const result = await repo.updateById(1, { - name: "updated", + name: 'updated', } as Partial); expect(beforeUpdate).toHaveBeenCalled(); expect(afterUpdate).toHaveBeenCalledWith(result); }); - it("should execute beforeDelete and afterDelete hooks", async () => { + it('should execute beforeDelete and afterDelete hooks', async () => { const beforeDelete = jest.fn(); const afterDelete = jest.fn(); @@ -1422,10 +1366,10 @@ describe("PostgresAdapter", () => { delete: jest.fn().mockResolvedValue(1), }; const mockKnex = jest.fn(() => mockQb) as unknown as Knex; - adapter["knexInstance"] = mockKnex; + adapter['knexInstance'] = mockKnex; const repo = adapter.createRepository({ - table: "users", + table: 'users', hooks: { beforeDelete, afterDelete }, }); @@ -1436,9 +1380,9 @@ describe("PostgresAdapter", () => { }); }); - describe("connection configuration", () => { - it("should apply custom pool configuration", () => { - const knexMock = require("knex"); + describe('connection configuration', () => { + it('should apply custom pool configuration', () => { + const knexMock = require('knex'); const customAdapter = new PostgresAdapter({ ...mockConfig, pool: { @@ -1464,8 +1408,8 @@ describe("PostgresAdapter", () => { ); }); - it("should apply custom connection overrides", () => { - const knexMock = require("knex"); + it('should apply custom connection overrides', () => { + const knexMock = require('knex'); adapter.connect({ debug: true }); expect(knexMock).toHaveBeenCalledWith( @@ -1474,14 +1418,14 @@ describe("PostgresAdapter", () => { }); }); - describe("getKnex", () => { - it("should throw error when not connected", () => { + describe('getKnex', () => { + it('should throw error when not connected', () => { expect(() => adapter.getKnex()).toThrow( - "PostgreSQL not connected. Call connect() first.", + 'PostgreSQL not connected. Call connect() first.', ); }); - it("should return knex instance when connected", () => { + it('should return knex instance when connected', () => { adapter.connect(); const knex = adapter.getKnex(); expect(knex).toBeDefined(); diff --git a/src/adapters/postgres.adapter.ts b/src/adapters/postgres.adapter.ts index 64864e7..be4b828 100644 --- a/src/adapters/postgres.adapter.ts +++ b/src/adapters/postgres.adapter.ts @@ -1,5 +1,5 @@ -import { Injectable, Logger } from "@nestjs/common"; -import knex, { Knex } from "knex"; +import { Injectable, Logger } from '@nestjs/common'; +import knex, { Knex } from 'knex'; import { PostgresDatabaseConfig, @@ -12,7 +12,590 @@ import { TransactionCallback, HealthCheckResult, DATABASE_KIT_CONSTANTS, -} from "../contracts/database.contracts"; +} from '../contracts/database.contracts'; +import { + shapePage, + addCreatedAtTimestamp, + addUpdatedAtTimestamp, + createErrorHealthResult, + createSuccessHealthResult, +} from '../utils/adapter.utils'; + +type FilterOps = Record; + +const comparisonHandlers: Array<{ + key: 'eq' | 'ne' | 'gt' | 'gte' | 'lt' | 'lte'; + apply: (qb: Knex.QueryBuilder, column: string, value: unknown) => void; +}> = [ + { + key: 'eq', + apply: (qb, column, value) => { + qb.where(column, value as any); + }, + }, + { + key: 'ne', + apply: (qb, column, value) => { + qb.whereNot(column, value as any); + }, + }, + { + key: 'gt', + apply: (qb, column, value) => { + qb.where(column, '>', value as any); + }, + }, + { + key: 'gte', + apply: (qb, column, value) => { + qb.where(column, '>=', value as any); + }, + }, + { + key: 'lt', + apply: (qb, column, value) => { + qb.where(column, '<', value as any); + }, + }, + { + key: 'lte', + apply: (qb, column, value) => { + qb.where(column, '<=', value as any); + }, + }, +]; + +function isPlainObject(value: unknown): value is Record { + return !!value && typeof value === 'object' && !Array.isArray(value); +} + +function coerceLikeValue(value: unknown): string | null { + if (value === null || value === undefined) { + return null; + } + if (typeof value === 'string' || typeof value === 'number') { + return String(value); + } + return JSON.stringify(value); +} + +function normalizeSortDirection(dir: unknown): 'asc' | 'desc' { + if (typeof dir === 'number') { + return dir === -1 ? 'desc' : 'asc'; + } + if (typeof dir === 'string') { + return dir.toLowerCase() === 'desc' ? 'desc' : 'asc'; + } + return 'asc'; +} + +function applyPostgresFilter( + qb: Knex.QueryBuilder, + filter: Record, + assertFieldAllowed: (field: string) => void, +): void { + Object.entries(filter).forEach(([column, value]) => { + assertFieldAllowed(column); + + if (!isPlainObject(value)) { + qb.where(column, value as any); + return; + } + + const ops = value as FilterOps; + + comparisonHandlers.forEach(({ key, apply }) => { + const opValue = ops[key]; + if (opValue !== undefined) { + apply(qb, column, opValue); + } + }); + + if (ops.in !== undefined) { + const values = Array.isArray(ops.in) ? ops.in : [ops.in]; + qb.whereIn(column, values as readonly any[]); + } + + if (ops.nin !== undefined) { + const values = Array.isArray(ops.nin) ? ops.nin : [ops.nin]; + qb.whereNotIn(column, values as readonly any[]); + } + + const likeValue = coerceLikeValue(ops.like); + if (likeValue !== null) { + qb.whereILike(column, likeValue); + } + + if (ops.isNull === true) qb.whereNull(column); + if (ops.isNotNull === true) qb.whereNotNull(column); + }); +} + +function applyPostgresSort( + qb: Knex.QueryBuilder, + sort: string | Record | undefined, + assertFieldAllowed: (field: string) => void, +): void { + if (!sort) return; + + if (typeof sort === 'string') { + const parts = sort + .split(',') + .map((part) => part.trim()) + .filter(Boolean); + for (const part of parts) { + const direction = part.startsWith('-') ? 'desc' : 'asc'; + const column = part.replace(/^[-+]/, ''); + assertFieldAllowed(column); + qb.orderBy(column, direction); + } + return; + } + + Object.entries(sort).forEach(([column, dir]) => { + assertFieldAllowed(column); + qb.orderBy(column, normalizeSortDirection(dir)); + }); +} + +type PostgresRepoParams = { + kx: Knex; + table: string; + pk: string; + baseFilter: Record; + notDeletedFilter: Record; + softDeleteEnabled: boolean; + softDeleteField: string; + addCreatedAt: >(data: D) => D; + addUpdatedAt: >(data: D) => D; + hooks?: PostgresEntityConfig['hooks']; + applyFilter: (qb: Knex.QueryBuilder, filter: Record) => void; + applySort: ( + qb: Knex.QueryBuilder, + sort?: string | Record, + ) => void; +}; + +type PostgresHookHandlers = { + runBeforeCreate: (data: Partial) => Promise>; + runAfterCreate: (entity: T) => Promise; + runBeforeUpdate: (data: Partial) => Promise>; + runAfterUpdate: (entity: T | null) => Promise; + runBeforeDelete: (id: string | number) => Promise; + runAfterDelete: (success: boolean) => Promise; +}; + +function createPostgresHookHandlers( + hooks?: PostgresEntityConfig['hooks'], +): PostgresHookHandlers { + return { + runBeforeCreate: async (data) => { + if (hooks?.beforeCreate) { + const result = await hooks.beforeCreate({ + data, + operation: 'create', + isBulk: false, + }); + return result ?? data; + } + return data; + }, + runAfterCreate: async (entity) => { + if (hooks?.afterCreate) { + await hooks.afterCreate(entity); + } + }, + runBeforeUpdate: async (data) => { + if (hooks?.beforeUpdate) { + const result = await hooks.beforeUpdate({ + data, + operation: 'update', + isBulk: false, + }); + return result ?? data; + } + return data; + }, + runAfterUpdate: async (entity) => { + if (hooks?.afterUpdate) { + await hooks.afterUpdate(entity); + } + }, + runBeforeDelete: async (id) => { + if (hooks?.beforeDelete) { + await hooks.beforeDelete(id); + } + }, + runAfterDelete: async (success) => { + if (hooks?.afterDelete) { + await hooks.afterDelete(success); + } + }, + }; +} + +function createPostgresReadMethods(params: PostgresRepoParams) { + const { + kx, + table, + pk, + baseFilter, + notDeletedFilter, + applyFilter, + applySort, + } = params; + + return { + async findById(id: string | number): Promise { + const mergedFilter = { ...baseFilter, ...notDeletedFilter }; + const qb = kx(table) + .select('*') + .where({ [pk]: id }); + applyFilter(qb, mergedFilter); + const row = await qb.first(); + return (row as T) || null; + }, + + async findAll(filter: Record = {}): Promise { + const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; + const qb = kx(table).select('*'); + applyFilter(qb, mergedFilter); + const rows = await qb; + return rows as T[]; + }, + + async findOne(filter: Record): Promise { + const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; + const qb = kx(table).select('*'); + applyFilter(qb, mergedFilter); + const row = await qb.first(); + return (row as T) || null; + }, + + async findPage(options: PageOptions = {}): Promise> { + const { filter = {}, page = 1, limit = 10, sort } = options; + const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; + + const offset = Math.max(0, (page - 1) * limit); + const qb = kx(table).select('*'); + applyFilter(qb, mergedFilter); + applySort(qb, sort); + + const data = (await qb.clone().limit(limit).offset(offset)) as T[]; + + const countRow = await kx(table) + .count<{ count: string }[]>({ count: '*' }) + .modify((q) => applyFilter(q, mergedFilter)); + const total = Number(countRow[0]?.count || 0); + + return shapePage(data, page, limit, total); + }, + + async count(filter: Record = {}): Promise { + const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; + const [{ count }] = await kx(table) + .count<{ count: string }[]>({ count: '*' }) + .modify((q) => applyFilter(q, mergedFilter)); + return Number(count || 0); + }, + + async exists(filter: Record = {}): Promise { + const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; + const row = await kx(table) + .select([pk]) + .modify((q) => applyFilter(q, mergedFilter)) + .first(); + return !!row; + }, + + async distinct( + field: K, + filter: Record = {}, + ): Promise { + const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; + const qb = kx(table) + .distinct(String(field)) + .modify((q) => applyFilter(q, mergedFilter)); + const rows = await qb; + return rows.map( + (row: Record) => row[String(field)] as T[K], + ); + }, + + async select( + filter: Record, + fields: K[], + ): Promise[]> { + const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; + const qb = kx(table) + .select(fields.map(String)) + .modify((q) => applyFilter(q, mergedFilter)); + const rows = await qb; + return rows as Pick[]; + }, + }; +} + +function createPostgresWriteMethods( + params: PostgresRepoParams, + hooks: PostgresHookHandlers, +) { + const { + kx, + table, + pk, + baseFilter, + notDeletedFilter, + softDeleteEnabled, + softDeleteField, + addCreatedAt, + addUpdatedAt, + applyFilter, + } = params; + + return { + async create(data: Partial): Promise { + let processedData = await hooks.runBeforeCreate(data); + processedData = addCreatedAt( + processedData as Record, + ) as Partial; + + const [row] = await kx(table).insert(processedData).returning('*'); + const entity = row as T; + + await hooks.runAfterCreate(entity); + + return entity; + }, + + async updateById( + id: string | number, + update: Partial, + ): Promise { + let processedUpdate = await hooks.runBeforeUpdate(update); + processedUpdate = addUpdatedAt( + processedUpdate as Record, + ) as Partial; + + const mergedFilter = { ...baseFilter, ...notDeletedFilter }; + const qb = kx(table).where({ [pk]: id }); + applyFilter(qb, mergedFilter); + const [row] = await qb.update(processedUpdate).returning('*'); + const entity = (row as T) || null; + + await hooks.runAfterUpdate(entity); + + return entity; + }, + + async deleteById(id: string | number): Promise { + await hooks.runBeforeDelete(id); + + const mergedFilter = { ...baseFilter, ...notDeletedFilter }; + let success: boolean; + + if (softDeleteEnabled) { + const qb = kx(table).where({ [pk]: id }); + applyFilter(qb, mergedFilter); + const affectedRows = await qb.update({ + [softDeleteField]: new Date(), + }); + success = affectedRows > 0; + } else { + const qb = kx(table).where({ [pk]: id }); + applyFilter(qb, mergedFilter); + const affectedRows = await qb.delete(); + success = affectedRows > 0; + } + + await hooks.runAfterDelete(success); + + return success; + }, + }; +} + +function createPostgresBulkMethods(params: PostgresRepoParams) { + const { + kx, + table, + baseFilter, + notDeletedFilter, + softDeleteEnabled, + softDeleteField, + addCreatedAt, + addUpdatedAt, + applyFilter, + } = params; + + return { + async insertMany(data: Partial[]): Promise { + if (data.length === 0) return []; + + const timestampedData = data.map((item) => + addCreatedAt(item as Record), + ); + + const rows = await kx(table).insert(timestampedData).returning('*'); + + return rows as T[]; + }, + + async updateMany( + filter: Record, + update: Partial, + ): Promise { + const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; + const timestampedUpdate = addUpdatedAt(update as Record); + + const affectedRows = await kx(table) + .modify((q) => applyFilter(q, mergedFilter)) + .update(timestampedUpdate); + + return affectedRows; + }, + + async deleteMany(filter: Record): Promise { + const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; + + if (softDeleteEnabled) { + const affectedRows = await kx(table) + .modify((q) => applyFilter(q, mergedFilter)) + .update({ [softDeleteField]: new Date() }); + return affectedRows; + } + + const affectedRows = await kx(table) + .modify((q) => applyFilter(q, mergedFilter)) + .delete(); + + return affectedRows; + }, + }; +} + +function createPostgresAdvancedMethods(params: PostgresRepoParams) { + const { + kx, + table, + pk, + baseFilter, + notDeletedFilter, + addCreatedAt, + addUpdatedAt, + applyFilter, + } = params; + + return { + async upsert( + filter: Record, + data: Partial, + ): Promise { + const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; + + const qb = kx(table).select('*'); + applyFilter(qb, mergedFilter); + const existing = await qb.first(); + + if (existing) { + const timestampedUpdate = addUpdatedAt(data as Record); + const updateQb = kx(table).where({ [pk]: existing[pk] }); + const [row] = await updateQb.update(timestampedUpdate).returning('*'); + return row as T; + } + + const timestampedData = addCreatedAt({ ...filter, ...data } as Record< + string, + unknown + >); + const [row] = await kx(table).insert(timestampedData).returning('*'); + return row as T; + }, + }; +} + +function createPostgresSoftDeleteMethods(params: PostgresRepoParams) { + const { + kx, + table, + pk, + baseFilter, + notDeletedFilter, + softDeleteEnabled, + softDeleteField, + applyFilter, + } = params; + + if (!softDeleteEnabled) { + return { + softDelete: undefined, + softDeleteMany: undefined, + restore: undefined, + restoreMany: undefined, + findAllWithDeleted: undefined, + findDeleted: undefined, + }; + } + + return { + softDelete: async (id: string | number): Promise => { + const mergedFilter = { ...baseFilter, ...notDeletedFilter }; + const qb = kx(table).where({ [pk]: id }); + applyFilter(qb, mergedFilter); + const affectedRows = await qb.update({ + [softDeleteField]: new Date(), + }); + return affectedRows > 0; + }, + + softDeleteMany: async ( + filter: Record, + ): Promise => { + const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; + const affectedRows = await kx(table) + .modify((q) => applyFilter(q, mergedFilter)) + .update({ [softDeleteField]: new Date() }); + return affectedRows; + }, + + restore: async (id: string | number): Promise => { + const deletedFilter = { [softDeleteField]: { isNotNull: true } }; + const mergedFilter = { ...baseFilter, ...deletedFilter }; + const qb = kx(table).where({ [pk]: id }); + applyFilter(qb, mergedFilter); + const [row] = await qb.update({ [softDeleteField]: null }).returning('*'); + return (row as T) || null; + }, + + restoreMany: async (filter: Record): Promise => { + const deletedFilter = { [softDeleteField]: { isNotNull: true } }; + const mergedFilter = { ...baseFilter, ...deletedFilter, ...filter }; + const affectedRows = await kx(table) + .modify((q) => applyFilter(q, mergedFilter)) + .update({ [softDeleteField]: null }); + return affectedRows; + }, + + findAllWithDeleted: async ( + filter: Record = {}, + ): Promise => { + const mergedFilter = { ...baseFilter, ...filter }; + const qb = kx(table).select('*'); + applyFilter(qb, mergedFilter); + const rows = await qb; + return rows as T[]; + }, + + findDeleted: async (filter: Record = {}): Promise => { + const deletedFilter = { [softDeleteField]: { isNotNull: true } }; + const mergedFilter = { ...baseFilter, ...deletedFilter, ...filter }; + const qb = kx(table).select('*'); + applyFilter(qb, mergedFilter); + const rows = await qb; + return rows as T[]; + }, + }; +} /** * PostgreSQL adapter for DatabaseKit. @@ -44,7 +627,7 @@ export class PostgresAdapter { */ connect(overrides: Knex.Config = {}): Knex { if (!this.knexInstance) { - this.logger.log("Creating PostgreSQL connection pool..."); + this.logger.log('Creating PostgreSQL connection pool...'); // Apply pool configuration from config const poolConfig = this.config.pool || {}; @@ -56,14 +639,14 @@ export class PostgresAdapter { }; this.knexInstance = knex({ - client: "pg", + client: 'pg', connection: this.config.connectionString, pool, acquireConnectionTimeout: poolConfig.acquireTimeoutMs ?? 60000, ...overrides, }); - this.logger.log("PostgreSQL connection pool created"); + this.logger.log('PostgreSQL connection pool created'); } return this.knexInstance; @@ -76,7 +659,7 @@ export class PostgresAdapter { if (this.knexInstance) { await this.knexInstance.destroy(); this.knexInstance = undefined; - this.logger.log("PostgreSQL connection pool destroyed"); + this.logger.log('PostgreSQL connection pool destroyed'); } } @@ -86,7 +669,7 @@ export class PostgresAdapter { */ getKnex(): Knex { if (!this.knexInstance) { - throw new Error("PostgreSQL not connected. Call connect() first."); + throw new Error('PostgreSQL not connected. Call connect() first.'); } return this.knexInstance; } @@ -117,17 +700,16 @@ export class PostgresAdapter { try { if (!this.knexInstance) { - return { - healthy: false, - responseTimeMs: Date.now() - startTime, - type: "postgres", - error: "Not connected to PostgreSQL", - }; + return createErrorHealthResult( + 'postgres', + 'Not connected to PostgreSQL', + startTime, + ); } // Execute simple query to verify connection const result = await this.knexInstance.raw( - "SELECT version(), current_database()", + 'SELECT version(), current_database()', ); const row = result.rows?.[0]; @@ -138,23 +720,17 @@ export class PostgresAdapter { } ).pool; - return { - healthy: true, - responseTimeMs: Date.now() - startTime, - type: "postgres", - details: { - version: row?.version?.split(" ").slice(0, 2).join(" "), - activeConnections: pool?.numUsed?.() ?? 0, - poolSize: (pool?.numUsed?.() ?? 0) + (pool?.numFree?.() ?? 0), - }, - }; + return createSuccessHealthResult('postgres', startTime, { + version: row?.version?.split(' ').slice(0, 2).join(' '), + activeConnections: pool?.numUsed?.() ?? 0, + poolSize: (pool?.numUsed?.() ?? 0) + (pool?.numFree?.() ?? 0), + }); } catch (error) { - return { - healthy: false, - responseTimeMs: Date.now() - startTime, - type: "postgres", - error: error instanceof Error ? error.message : "Unknown error", - }; + return createErrorHealthResult( + 'postgres', + error instanceof Error ? error.message : 'Unknown error', + startTime, + ); } } @@ -172,91 +748,25 @@ export class PostgresAdapter { ): Repository { const kx = trx || this.getKnex(); const table = cfg.table; - const pk = cfg.primaryKey || "id"; + const pk = cfg.primaryKey || 'id'; const allowed = cfg.columns || []; const baseFilter = cfg.defaultFilter || {}; - - // Soft delete configuration const softDeleteEnabled = cfg.softDelete ?? false; - const softDeleteField = cfg.softDeleteField ?? "deleted_at"; - - // Timestamp configuration + const softDeleteField = cfg.softDeleteField ?? 'deleted_at'; const timestampsEnabled = cfg.timestamps ?? false; - const createdAtField = cfg.createdAtField ?? "created_at"; - const updatedAtField = cfg.updatedAtField ?? "updated_at"; - - // Hooks configuration + const createdAtField = cfg.createdAtField ?? 'created_at'; + const updatedAtField = cfg.updatedAtField ?? 'updated_at'; const hooks = cfg.hooks; - // Create not-deleted filter for soft delete const notDeletedFilter: Record = softDeleteEnabled ? { [softDeleteField]: { isNull: true } } : {}; - // Helper to add createdAt timestamp - const addCreatedAt = >(data: D): D => { - if (timestampsEnabled) { - return { ...data, [createdAtField]: new Date() }; - } - return data; - }; - - // Helper to add updatedAt timestamp - const addUpdatedAt = >(data: D): D => { - if (timestampsEnabled) { - return { ...data, [updatedAtField]: new Date() }; - } - return data; - }; - - // Hook helper functions - const runBeforeCreate = async (data: Partial): Promise> => { - if (hooks?.beforeCreate) { - const result = await hooks.beforeCreate({ - data, - operation: "create", - isBulk: false, - }); - return result ?? data; - } - return data; - }; + const addCreatedAt = >(data: D): D => + addCreatedAtTimestamp(data, timestampsEnabled, createdAtField); - const runAfterCreate = async (entity: T): Promise => { - if (hooks?.afterCreate) { - await hooks.afterCreate(entity); - } - }; - - const runBeforeUpdate = async (data: Partial): Promise> => { - if (hooks?.beforeUpdate) { - const result = await hooks.beforeUpdate({ - data, - operation: "update", - isBulk: false, - }); - return result ?? data; - } - return data; - }; - - const runAfterUpdate = async (entity: T | null): Promise => { - if (hooks?.afterUpdate) { - await hooks.afterUpdate(entity); - } - }; - - const runBeforeDelete = async (id: string | number): Promise => { - if (hooks?.beforeDelete) { - await hooks.beforeDelete(id); - } - }; - - const runAfterDelete = async (success: boolean): Promise => { - if (hooks?.afterDelete) { - await hooks.afterDelete(success); - } - }; + const addUpdatedAt = >(data: D): D => + addUpdatedAtTimestamp(data, timestampsEnabled, updatedAtField); const assertFieldAllowed = (field: string): void => { if (allowed.length && !allowed.includes(field)) { @@ -269,384 +779,37 @@ export class PostgresAdapter { const applyFilter = ( qb: Knex.QueryBuilder, filter: Record, - ): void => { - Object.entries(filter).forEach(([key, value]) => { - assertFieldAllowed(key); - - if (value && typeof value === "object" && !Array.isArray(value)) { - const ops = value as Record; - - if (ops.eq !== undefined) qb.where(key, ops.eq); - if (ops.ne !== undefined) qb.whereNot(key, ops.ne); - if (ops.gt !== undefined) qb.where(key, ">", ops.gt); - if (ops.gte !== undefined) qb.where(key, ">=", ops.gte); - if (ops.lt !== undefined) qb.where(key, "<", ops.lt); - if (ops.lte !== undefined) qb.where(key, "<=", ops.lte); - if (ops.in) qb.whereIn(key, ops.in as readonly string[]); - if (ops.nin) qb.whereNotIn(key, ops.nin as readonly string[]); - if (ops.like) qb.whereILike(key, `${ops.like}`); - if (ops.isNull === true) qb.whereNull(key); - if (ops.isNotNull === true) qb.whereNotNull(key); - } else { - qb.where(key, value as string | number | boolean); - } - }); - }; + ): void => applyPostgresFilter(qb, filter, assertFieldAllowed); const applySort = ( qb: Knex.QueryBuilder, sort?: string | Record, - ): void => { - if (!sort) return; - - if (typeof sort === "string") { - const parts = sort.split(","); - for (const p of parts) { - const dir = p.startsWith("-") ? "desc" : "asc"; - const col = p.replace(/^[-+]/, ""); - assertFieldAllowed(col); - qb.orderBy(col, dir); - } - } else { - Object.entries(sort).forEach(([col, dir]) => { - assertFieldAllowed(col); - const direction = - dir === -1 || String(dir).toLowerCase() === "desc" ? "desc" : "asc"; - qb.orderBy(col, direction); - }); - } - }; - - const shapePage = ( - data: T[], - page: number, - limit: number, - total: number, - ): PageResult => { - const pages = Math.max(1, Math.ceil((total || 0) / (limit || 1))); - return { data, page, limit, total, pages }; + ): void => applyPostgresSort(qb, sort, assertFieldAllowed); + + const params: PostgresRepoParams = { + kx, + table, + pk, + baseFilter, + notDeletedFilter, + softDeleteEnabled, + softDeleteField, + addCreatedAt, + addUpdatedAt, + hooks, + applyFilter, + applySort, }; - const repo: Repository = { - async create(data: Partial): Promise { - // Run beforeCreate hook - let processedData = await runBeforeCreate(data); - processedData = addCreatedAt( - processedData as Record, - ) as Partial; - - const [row] = await kx(table).insert(processedData).returning("*"); - const entity = row as T; - - // Run afterCreate hook - await runAfterCreate(entity); - - return entity; - }, - - async findById(id: string | number): Promise { - const mergedFilter = { ...baseFilter, ...notDeletedFilter }; - const qb = kx(table) - .select("*") - .where({ [pk]: id }); - applyFilter(qb, mergedFilter); - const row = await qb.first(); - return (row as T) || null; - }, - - async findAll(filter: Record = {}): Promise { - const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; - const qb = kx(table).select("*"); - applyFilter(qb, mergedFilter); - const rows = await qb; - return rows as T[]; - }, - - async findOne(filter: Record): Promise { - const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; - const qb = kx(table).select("*"); - applyFilter(qb, mergedFilter); - const row = await qb.first(); - return (row as T) || null; - }, - - async findPage(options: PageOptions = {}): Promise> { - const { filter = {}, page = 1, limit = 10, sort } = options; - const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; - - const offset = Math.max(0, (page - 1) * limit); - - const qb = kx(table).select("*"); - applyFilter(qb, mergedFilter); - applySort(qb, sort); - - const data = (await qb.clone().limit(limit).offset(offset)) as T[]; - - const countRow = await kx(table) - .count<{ count: string }[]>({ count: "*" }) - .modify((q) => applyFilter(q, mergedFilter)); - const total = Number(countRow[0]?.count || 0); - - return shapePage(data, page, limit, total); - }, - - async updateById( - id: string | number, - update: Partial, - ): Promise { - // Run beforeUpdate hook - let processedUpdate = await runBeforeUpdate(update); - processedUpdate = addUpdatedAt( - processedUpdate as Record, - ) as Partial; - - const mergedFilter = { ...baseFilter, ...notDeletedFilter }; - const qb = kx(table).where({ [pk]: id }); - applyFilter(qb, mergedFilter); - const [row] = await qb.update(processedUpdate).returning("*"); - const entity = (row as T) || null; - - // Run afterUpdate hook - await runAfterUpdate(entity); - - return entity; - }, - - async deleteById(id: string | number): Promise { - // Run beforeDelete hook - await runBeforeDelete(id); - - const mergedFilter = { ...baseFilter, ...notDeletedFilter }; - let success: boolean; - - // If soft delete is enabled, update instead of delete - if (softDeleteEnabled) { - const qb = kx(table).where({ [pk]: id }); - applyFilter(qb, mergedFilter); - const affectedRows = await qb.update({ - [softDeleteField]: new Date(), - }); - success = affectedRows > 0; - } else { - const qb = kx(table).where({ [pk]: id }); - applyFilter(qb, mergedFilter); - const affectedRows = await qb.delete(); - success = affectedRows > 0; - } - - // Run afterDelete hook - await runAfterDelete(success); - - return success; - }, - - async count(filter: Record = {}): Promise { - const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; - const [{ count }] = await kx(table) - .count<{ count: string }[]>({ count: "*" }) - .modify((q) => applyFilter(q, mergedFilter)); - return Number(count || 0); - }, - - async exists(filter: Record = {}): Promise { - const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; - const row = await kx(table) - .select([pk]) - .modify((q) => applyFilter(q, mergedFilter)) - .first(); - return !!row; - }, - - // ----------------------------- - // Bulk Operations - // ----------------------------- + const hookHandlers = createPostgresHookHandlers(hooks); - async insertMany(data: Partial[]): Promise { - if (data.length === 0) return []; - - // Add createdAt timestamp to each record - const timestampedData = data.map((item) => - addCreatedAt(item as Record), - ); - - const rows = await kx(table).insert(timestampedData).returning("*"); - - return rows as T[]; - }, - - async updateMany( - filter: Record, - update: Partial, - ): Promise { - const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; - const timestampedUpdate = addUpdatedAt( - update as Record, - ); - - const affectedRows = await kx(table) - .modify((q) => applyFilter(q, mergedFilter)) - .update(timestampedUpdate); - - return affectedRows; - }, - - async deleteMany(filter: Record): Promise { - const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; - - // If soft delete is enabled, update instead of delete - if (softDeleteEnabled) { - const affectedRows = await kx(table) - .modify((q) => applyFilter(q, mergedFilter)) - .update({ [softDeleteField]: new Date() }); - return affectedRows; - } - - const affectedRows = await kx(table) - .modify((q) => applyFilter(q, mergedFilter)) - .delete(); - - return affectedRows; - }, - - // ----------------------------- - // Advanced Query Operations - // ----------------------------- - - async upsert( - filter: Record, - data: Partial, - ): Promise { - const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; - - // Try to find existing record - const qb = kx(table).select("*"); - applyFilter(qb, mergedFilter); - const existing = await qb.first(); - - if (existing) { - // Update existing record - const timestampedUpdate = addUpdatedAt( - data as Record, - ); - const updateQb = kx(table).where({ [pk]: existing[pk] }); - const [row] = await updateQb.update(timestampedUpdate).returning("*"); - return row as T; - } else { - // Insert new record - const timestampedData = addCreatedAt({ ...filter, ...data } as Record< - string, - unknown - >); - const [row] = await kx(table).insert(timestampedData).returning("*"); - return row as T; - } - }, - - async distinct( - field: K, - filter: Record = {}, - ): Promise { - const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; - const qb = kx(table) - .distinct(String(field)) - .modify((q) => applyFilter(q, mergedFilter)); - const rows = await qb; - return rows.map( - (row: Record) => row[String(field)] as T[K], - ); - }, - - async select( - filter: Record, - fields: K[], - ): Promise[]> { - const mergedFilter = { ...baseFilter, ...notDeletedFilter, ...filter }; - const qb = kx(table) - .select(fields.map(String)) - .modify((q) => applyFilter(q, mergedFilter)); - const rows = await qb; - return rows as Pick[]; - }, - - // ----------------------------- - // Soft Delete Operations - // ----------------------------- - - softDelete: softDeleteEnabled - ? async (id: string | number): Promise => { - const mergedFilter = { ...baseFilter, ...notDeletedFilter }; - const qb = kx(table).where({ [pk]: id }); - applyFilter(qb, mergedFilter); - const affectedRows = await qb.update({ - [softDeleteField]: new Date(), - }); - return affectedRows > 0; - } - : undefined, - - softDeleteMany: softDeleteEnabled - ? async (filter: Record): Promise => { - const mergedFilter = { - ...baseFilter, - ...notDeletedFilter, - ...filter, - }; - const affectedRows = await kx(table) - .modify((q) => applyFilter(q, mergedFilter)) - .update({ [softDeleteField]: new Date() }); - return affectedRows; - } - : undefined, - - restore: softDeleteEnabled - ? async (id: string | number): Promise => { - const deletedFilter = { [softDeleteField]: { isNotNull: true } }; - const mergedFilter = { ...baseFilter, ...deletedFilter }; - const qb = kx(table).where({ [pk]: id }); - applyFilter(qb, mergedFilter); - const [row] = await qb - .update({ [softDeleteField]: null }) - .returning("*"); - return (row as T) || null; - } - : undefined, - - restoreMany: softDeleteEnabled - ? async (filter: Record): Promise => { - const deletedFilter = { [softDeleteField]: { isNotNull: true } }; - const mergedFilter = { ...baseFilter, ...deletedFilter, ...filter }; - const affectedRows = await kx(table) - .modify((q) => applyFilter(q, mergedFilter)) - .update({ [softDeleteField]: null }); - return affectedRows; - } - : undefined, - - findAllWithDeleted: softDeleteEnabled - ? async (filter: Record = {}): Promise => { - // Ignore soft delete filter, include all records - const mergedFilter = { ...baseFilter, ...filter }; - const qb = kx(table).select("*"); - applyFilter(qb, mergedFilter); - const rows = await qb; - return rows as T[]; - } - : undefined, - - findDeleted: softDeleteEnabled - ? async (filter: Record = {}): Promise => { - // Only find deleted records - const deletedFilter = { [softDeleteField]: { isNotNull: true } }; - const mergedFilter = { ...baseFilter, ...deletedFilter, ...filter }; - const qb = kx(table).select("*"); - applyFilter(qb, mergedFilter); - const rows = await qb; - return rows as T[]; - } - : undefined, + return { + ...createPostgresReadMethods(params), + ...createPostgresWriteMethods(params, hookHandlers), + ...createPostgresBulkMethods(params), + ...createPostgresAdvancedMethods(params), + ...createPostgresSoftDeleteMethods(params), }; - - return repo; } /** @@ -676,7 +839,7 @@ export class PostgresAdapter { options: TransactionOptions = {}, ): Promise { const { - isolationLevel = "read committed", + isolationLevel = 'read committed', retries = 0, timeout = DATABASE_KIT_CONSTANTS.DEFAULT_TRANSACTION_TIMEOUT, } = options; @@ -725,23 +888,23 @@ export class PostgresAdapter { } } - throw lastError || new Error("Transaction failed"); + throw lastError || new Error('Transaction failed'); } /** * Checks if a PostgreSQL error is retryable. */ private isRetryableError(error: unknown): boolean { - if (error && typeof error === "object") { + if (error && typeof error === 'object') { const pgError = error as { code?: string; routine?: string }; // PostgreSQL serialization failure codes const retryableCodes = [ - "40001", // serialization_failure - "40P01", // deadlock_detected - "55P03", // lock_not_available - "57P01", // admin_shutdown - "57014", // query_canceled (timeout) + '40001', // serialization_failure + '40P01', // deadlock_detected + '55P03', // lock_not_available + '57P01', // admin_shutdown + '57014', // query_canceled (timeout) ]; if (pgError.code && retryableCodes.includes(pgError.code)) { diff --git a/src/config/database.config.spec.ts b/src/config/database.config.spec.ts index 3563893..afc7863 100644 --- a/src/config/database.config.spec.ts +++ b/src/config/database.config.spec.ts @@ -1,9 +1,9 @@ -import { DatabaseConfigHelper } from "./database.config"; -import { DEFAULTS, ENV_KEYS } from "./database.constants"; +import { DatabaseConfigHelper } from './database.config'; +import { DEFAULTS, ENV_KEYS } from './database.constants'; const originalEnv = { ...process.env }; -describe("DatabaseConfigHelper", () => { +describe('DatabaseConfigHelper', () => { beforeEach(() => { process.env = { ...originalEnv }; }); @@ -12,154 +12,154 @@ describe("DatabaseConfigHelper", () => { process.env = { ...originalEnv }; }); - describe("getEnv", () => { - it("should return the environment value when present", () => { - process.env.TEST_ENV = "value"; - expect(DatabaseConfigHelper.getEnv("TEST_ENV")).toBe("value"); + describe('getEnv', () => { + it('should return the environment value when present', () => { + process.env.TEST_ENV = 'value'; + expect(DatabaseConfigHelper.getEnv('TEST_ENV')).toBe('value'); }); - it("should throw when the environment variable is missing", () => { + it('should throw when the environment variable is missing', () => { delete process.env.MISSING_ENV; - expect(() => DatabaseConfigHelper.getEnv("MISSING_ENV")).toThrow( - "Environment variable MISSING_ENV is not configured", + expect(() => DatabaseConfigHelper.getEnv('MISSING_ENV')).toThrow( + 'Environment variable MISSING_ENV is not configured', ); }); }); - describe("getEnvOrDefault", () => { - it("should return env value when set", () => { - process.env.OPTIONAL_ENV = "present"; + describe('getEnvOrDefault', () => { + it('should return env value when set', () => { + process.env.OPTIONAL_ENV = 'present'; expect( - DatabaseConfigHelper.getEnvOrDefault("OPTIONAL_ENV", "fallback"), - ).toBe("present"); + DatabaseConfigHelper.getEnvOrDefault('OPTIONAL_ENV', 'fallback'), + ).toBe('present'); }); - it("should return default when env is missing", () => { + it('should return default when env is missing', () => { delete process.env.OPTIONAL_ENV; expect( - DatabaseConfigHelper.getEnvOrDefault("OPTIONAL_ENV", "fallback"), - ).toBe("fallback"); + DatabaseConfigHelper.getEnvOrDefault('OPTIONAL_ENV', 'fallback'), + ).toBe('fallback'); }); }); - describe("getEnvAsNumber", () => { - it("should parse a valid numeric value", () => { - process.env.NUM_ENV = "42"; - expect(DatabaseConfigHelper.getEnvAsNumber("NUM_ENV", 10)).toBe(42); + describe('getEnvAsNumber', () => { + it('should parse a valid numeric value', () => { + process.env.NUM_ENV = '42'; + expect(DatabaseConfigHelper.getEnvAsNumber('NUM_ENV', 10)).toBe(42); }); - it("should return default when missing", () => { + it('should return default when missing', () => { delete process.env.NUM_ENV; - expect(DatabaseConfigHelper.getEnvAsNumber("NUM_ENV", 10)).toBe(10); + expect(DatabaseConfigHelper.getEnvAsNumber('NUM_ENV', 10)).toBe(10); }); - it("should throw on invalid number", () => { - process.env.NUM_ENV = "not-a-number"; - expect(() => DatabaseConfigHelper.getEnvAsNumber("NUM_ENV", 10)).toThrow( - "Environment variable NUM_ENV must be a valid number", + it('should throw on invalid number', () => { + process.env.NUM_ENV = 'not-a-number'; + expect(() => DatabaseConfigHelper.getEnvAsNumber('NUM_ENV', 10)).toThrow( + 'Environment variable NUM_ENV must be a valid number', ); }); }); - describe("fromEnv", () => { - it("should build mongo config from env", () => { - process.env[ENV_KEYS.DATABASE_TYPE] = "mongo"; - process.env[ENV_KEYS.MONGO_URI] = "mongodb://localhost:27017/testdb"; + describe('fromEnv', () => { + it('should build mongo config from env', () => { + process.env[ENV_KEYS.DATABASE_TYPE] = 'mongo'; + process.env[ENV_KEYS.MONGO_URI] = 'mongodb://localhost:27017/testdb'; const config = DatabaseConfigHelper.fromEnv(); - expect(config.type).toBe("mongo"); - expect(config.connectionString).toBe("mongodb://localhost:27017/testdb"); + expect(config.type).toBe('mongo'); + expect(config.connectionString).toBe('mongodb://localhost:27017/testdb'); }); - it("should build postgres config from env", () => { - process.env[ENV_KEYS.DATABASE_TYPE] = "postgres"; - process.env[ENV_KEYS.POSTGRES_URI] = "postgresql://localhost:5432/testdb"; + it('should build postgres config from env', () => { + process.env[ENV_KEYS.DATABASE_TYPE] = 'postgres'; + process.env[ENV_KEYS.POSTGRES_URI] = 'postgresql://localhost:5432/testdb'; const config = DatabaseConfigHelper.fromEnv(); - expect(config.type).toBe("postgres"); + expect(config.type).toBe('postgres'); expect(config.connectionString).toBe( - "postgresql://localhost:5432/testdb", + 'postgresql://localhost:5432/testdb', ); }); - it("should throw on invalid database type", () => { - process.env[ENV_KEYS.DATABASE_TYPE] = "sqlite"; + it('should throw on invalid database type', () => { + process.env[ENV_KEYS.DATABASE_TYPE] = 'sqlite'; expect(() => DatabaseConfigHelper.fromEnv()).toThrow( - "Invalid DATABASE_TYPE", + 'Invalid DATABASE_TYPE', ); }); }); - describe("validate", () => { - it("should throw when type is missing", () => { + describe('validate', () => { + it('should throw when type is missing', () => { expect(() => DatabaseConfigHelper.validate( {} as unknown as { - type: "mongo"; + type: 'mongo'; connectionString: string; }, ), - ).toThrow("Database configuration must include a type"); + ).toThrow('Database configuration must include a type'); }); - it("should throw on invalid type", () => { + it('should throw on invalid type', () => { expect(() => DatabaseConfigHelper.validate({ - type: "sqlite" as unknown as "mongo", - connectionString: "file::memory:", + type: 'sqlite' as unknown as 'mongo', + connectionString: 'file::memory:', }), - ).toThrow("Invalid database type"); + ).toThrow('Invalid database type'); }); - it("should throw when connectionString is missing", () => { + it('should throw when connectionString is missing', () => { expect(() => DatabaseConfigHelper.validate({ - type: "mongo", - } as unknown as { type: "mongo"; connectionString: string }), - ).toThrow("Database configuration must include a connectionString"); + type: 'mongo', + } as unknown as { type: 'mongo'; connectionString: string }), + ).toThrow('Database configuration must include a connectionString'); }); - it("should reject invalid mongo connection string", () => { + it('should reject invalid mongo connection string', () => { expect(() => DatabaseConfigHelper.validate({ - type: "mongo", - connectionString: "invalid://localhost", + type: 'mongo', + connectionString: 'invalid://localhost', }), - ).toThrow("MongoDB connection string must start with"); + ).toThrow('MongoDB connection string must start with'); }); - it("should reject invalid postgres connection string", () => { + it('should reject invalid postgres connection string', () => { expect(() => DatabaseConfigHelper.validate({ - type: "postgres", - connectionString: "invalid://localhost", + type: 'postgres', + connectionString: 'invalid://localhost', }), - ).toThrow("PostgreSQL connection string must start with"); + ).toThrow('PostgreSQL connection string must start with'); }); - it("should accept valid configs", () => { + it('should accept valid configs', () => { expect(() => DatabaseConfigHelper.validate({ - type: "mongo", - connectionString: "mongodb://localhost:27017/testdb", + type: 'mongo', + connectionString: 'mongodb://localhost:27017/testdb', }), ).not.toThrow(); }); }); - describe("pool settings", () => { - it("should return pool size from env", () => { - process.env[ENV_KEYS.POOL_SIZE] = "20"; + describe('pool settings', () => { + it('should return pool size from env', () => { + process.env[ENV_KEYS.POOL_SIZE] = '20'; expect(DatabaseConfigHelper.getPoolSize()).toBe(20); }); - it("should return default pool size when missing", () => { + it('should return default pool size when missing', () => { delete process.env[ENV_KEYS.POOL_SIZE]; expect(DatabaseConfigHelper.getPoolSize()).toBe(DEFAULTS.POOL_SIZE); }); - it("should return connection timeout from env", () => { - process.env[ENV_KEYS.CONNECTION_TIMEOUT] = "7000"; + it('should return connection timeout from env', () => { + process.env[ENV_KEYS.CONNECTION_TIMEOUT] = '7000'; expect(DatabaseConfigHelper.getConnectionTimeout()).toBe(7000); }); }); diff --git a/src/config/database.config.ts b/src/config/database.config.ts index aad0e92..985f47b 100644 --- a/src/config/database.config.ts +++ b/src/config/database.config.ts @@ -3,9 +3,9 @@ import type { DatabaseConfig, DatabaseType, -} from "../contracts/database.contracts"; +} from '../contracts/database.contracts'; -import { ENV_KEYS, DEFAULTS } from "./database.constants"; +import { ENV_KEYS, DEFAULTS } from './database.constants'; /** * Helper class for environment-driven database configuration. @@ -71,21 +71,21 @@ export class DatabaseConfigHelper { static fromEnv(): DatabaseConfig { const type = this.getEnv(ENV_KEYS.DATABASE_TYPE) as DatabaseType; - if (type !== "mongo" && type !== "postgres") { + if (type !== 'mongo' && type !== 'postgres') { throw new Error( `Invalid DATABASE_TYPE: "${String(type)}". Must be "mongo" or "postgres".`, ); } - if (type === "mongo") { + if (type === 'mongo') { return { - type: "mongo", + type: 'mongo', connectionString: this.getEnv(ENV_KEYS.MONGO_URI), }; } return { - type: "postgres", + type: 'postgres', connectionString: this.getEnv(ENV_KEYS.POSTGRES_URI), }; } @@ -101,24 +101,24 @@ export class DatabaseConfigHelper { const rawConfig = config as unknown as Record; if (!rawConfig.type) { - throw new Error("Database configuration must include a type"); + throw new Error('Database configuration must include a type'); } - if (rawConfig.type !== "mongo" && rawConfig.type !== "postgres") { + if (rawConfig.type !== 'mongo' && rawConfig.type !== 'postgres') { throw new Error( `Invalid database type: "${rawConfig.type}". Must be "mongo" or "postgres".`, ); } if (!rawConfig.connectionString) { - throw new Error("Database configuration must include a connectionString"); + throw new Error('Database configuration must include a connectionString'); } // Basic connection string validation - if (config.type === "mongo") { + if (config.type === 'mongo') { if ( - !config.connectionString.startsWith("mongodb://") && - !config.connectionString.startsWith("mongodb+srv://") + !config.connectionString.startsWith('mongodb://') && + !config.connectionString.startsWith('mongodb+srv://') ) { throw new Error( 'MongoDB connection string must start with "mongodb://" or "mongodb+srv://"', @@ -126,10 +126,10 @@ export class DatabaseConfigHelper { } } - if (config.type === "postgres") { + if (config.type === 'postgres') { if ( - !config.connectionString.startsWith("postgresql://") && - !config.connectionString.startsWith("postgres://") + !config.connectionString.startsWith('postgresql://') && + !config.connectionString.startsWith('postgres://') ) { throw new Error( 'PostgreSQL connection string must start with "postgresql://" or "postgres://"', diff --git a/src/config/database.constants.ts b/src/config/database.constants.ts index 42064ac..d09417d 100644 --- a/src/config/database.constants.ts +++ b/src/config/database.constants.ts @@ -4,28 +4,28 @@ * Injection token for the main DatabaseService instance. * Use with @Inject(DATABASE_TOKEN) or @InjectDatabase() decorator. */ -export const DATABASE_TOKEN = "DATABASE_KIT_DEFAULT"; +export const DATABASE_TOKEN = 'DATABASE_KIT_DEFAULT'; /** * Injection token for DatabaseKit module options. * Used internally for async configuration. */ -export const DATABASE_OPTIONS_TOKEN = "DATABASE_KIT_OPTIONS"; +export const DATABASE_OPTIONS_TOKEN = 'DATABASE_KIT_OPTIONS'; /** * Environment variable names used by DatabaseKit. */ export const ENV_KEYS = { /** MongoDB connection string */ - MONGO_URI: "MONGO_URI", + MONGO_URI: 'MONGO_URI', /** PostgreSQL connection string */ - POSTGRES_URI: "DATABASE_URL", + POSTGRES_URI: 'DATABASE_URL', /** Database type ('mongo' or 'postgres') */ - DATABASE_TYPE: "DATABASE_TYPE", + DATABASE_TYPE: 'DATABASE_TYPE', /** Connection pool size */ - POOL_SIZE: "DATABASE_POOL_SIZE", + POOL_SIZE: 'DATABASE_POOL_SIZE', /** Connection timeout in milliseconds */ - CONNECTION_TIMEOUT: "DATABASE_CONNECTION_TIMEOUT", + CONNECTION_TIMEOUT: 'DATABASE_CONNECTION_TIMEOUT', } as const; /** diff --git a/src/contracts/database.contracts.ts b/src/contracts/database.contracts.ts index 4c0f8c3..12eb3ac 100644 --- a/src/contracts/database.contracts.ts +++ b/src/contracts/database.contracts.ts @@ -10,7 +10,7 @@ /** * Supported database types. */ -export type DatabaseType = "mongo" | "postgres"; +export type DatabaseType = 'mongo' | 'postgres'; /** * Connection pool configuration options. @@ -42,7 +42,7 @@ export interface DatabaseConfigBase { * MongoDB-specific configuration. */ export interface MongoDatabaseConfig extends DatabaseConfigBase { - type: "mongo"; + type: 'mongo'; /** Server selection timeout in milliseconds (default: 5000) */ serverSelectionTimeoutMS?: number; /** Socket timeout in milliseconds (default: 45000) */ @@ -53,7 +53,7 @@ export interface MongoDatabaseConfig extends DatabaseConfigBase { * PostgreSQL-specific configuration. */ export interface PostgresDatabaseConfig extends DatabaseConfigBase { - type: "postgres"; + type: 'postgres'; /** Statement timeout in milliseconds (default: none) */ statementTimeout?: number; /** Query timeout in milliseconds (default: none) */ @@ -77,7 +77,7 @@ export interface HookContext { /** The entity data being operated on */ data: T; /** The operation being performed */ - operation: "create" | "update" | "delete" | "upsert"; + operation: 'create' | 'update' | 'delete' | 'upsert'; /** Whether this is a bulk operation */ isBulk: boolean; } @@ -162,7 +162,7 @@ export interface PageOptions> { /** Items per page (default: 10) */ limit?: number; /** Sort order (string or object) */ - sort?: string | Record; + sort?: string | Record; } // ----------------------------- @@ -484,10 +484,10 @@ export interface DatabaseKitModuleAsyncOptions { * MongoDB doesn't support isolation levels in the same way. */ export type TransactionIsolationLevel = - | "read uncommitted" - | "read committed" - | "repeatable read" - | "serializable"; + | 'read uncommitted' + | 'read committed' + | 'repeatable read' + | 'serializable'; /** * Options for transaction execution. diff --git a/src/database-kit.module.spec.ts b/src/database-kit.module.spec.ts index 8fc76fc..a095c3b 100644 --- a/src/database-kit.module.spec.ts +++ b/src/database-kit.module.spec.ts @@ -1,22 +1,22 @@ -import { Logger } from "@nestjs/common"; +import { Logger } from '@nestjs/common'; -import { DATABASE_TOKEN } from "./config/database.constants"; -import { DatabaseKitModule } from "./database-kit.module"; -import { DatabaseService } from "./services/database.service"; +import { DATABASE_TOKEN } from './config/database.constants'; +import { DatabaseKitModule } from './database-kit.module'; +import { DatabaseService } from './services/database.service'; -describe("DatabaseKitModule", () => { - it("should create providers with autoConnect enabled", async () => { +describe('DatabaseKitModule', () => { + it('should create providers with autoConnect enabled', async () => { const connectSpy = jest - .spyOn(DatabaseService.prototype, "connect") + .spyOn(DatabaseService.prototype, 'connect') .mockResolvedValue(undefined); const logSpy = jest - .spyOn(Logger.prototype, "log") + .spyOn(Logger.prototype, 'log') .mockImplementation(() => undefined); const module = DatabaseKitModule.forRoot({ config: { - type: "mongo", - connectionString: "mongodb://localhost:27017/testdb", + type: 'mongo', + connectionString: 'mongodb://localhost:27017/testdb', }, }); @@ -31,15 +31,15 @@ describe("DatabaseKitModule", () => { expect(logSpy).toHaveBeenCalled(); }); - it("should skip autoConnect when disabled", async () => { + it('should skip autoConnect when disabled', async () => { const connectSpy = jest - .spyOn(DatabaseService.prototype, "connect") + .spyOn(DatabaseService.prototype, 'connect') .mockResolvedValue(undefined); const module = DatabaseKitModule.forRoot({ config: { - type: "mongo", - connectionString: "mongodb://localhost:27017/testdb", + type: 'mongo', + connectionString: 'mongodb://localhost:27017/testdb', }, autoConnect: false, }); @@ -53,16 +53,16 @@ describe("DatabaseKitModule", () => { expect(connectSpy).not.toHaveBeenCalled(); }); - it("should build async module with provided factory", async () => { + it('should build async module with provided factory', async () => { const connectSpy = jest - .spyOn(DatabaseService.prototype, "connect") + .spyOn(DatabaseService.prototype, 'connect') .mockResolvedValue(undefined); const module = DatabaseKitModule.forRootAsync({ useFactory: () => ({ config: { - type: "postgres", - connectionString: "postgresql://localhost:5432/testdb", + type: 'postgres', + connectionString: 'postgresql://localhost:5432/testdb', }, autoConnect: false, }), @@ -74,8 +74,8 @@ describe("DatabaseKitModule", () => { await provider.useFactory({ config: { - type: "postgres", - connectionString: "postgresql://localhost:5432/testdb", + type: 'postgres', + connectionString: 'postgresql://localhost:5432/testdb', }, autoConnect: false, }); @@ -83,18 +83,18 @@ describe("DatabaseKitModule", () => { expect(connectSpy).not.toHaveBeenCalled(); }); - it("should create feature module and connect", async () => { + it('should create feature module and connect', async () => { const connectSpy = jest - .spyOn(DatabaseService.prototype, "connect") + .spyOn(DatabaseService.prototype, 'connect') .mockResolvedValue(undefined); - const module = DatabaseKitModule.forFeature("FEATURE_DB", { - type: "mongo", - connectionString: "mongodb://localhost:27017/testdb", + const module = DatabaseKitModule.forFeature('FEATURE_DB', { + type: 'mongo', + connectionString: 'mongodb://localhost:27017/testdb', }); const provider = (module.providers || []).find( - (entry) => (entry as { provide: string }).provide === "FEATURE_DB", + (entry) => (entry as { provide: string }).provide === 'FEATURE_DB', ) as { useFactory: () => Promise }; await provider.useFactory(); diff --git a/src/database-kit.module.ts b/src/database-kit.module.ts index 63c97c9..d4a1c83 100644 --- a/src/database-kit.module.ts +++ b/src/database-kit.module.ts @@ -6,19 +6,19 @@ import { Module, Provider, Logger, -} from "@nestjs/common"; +} from '@nestjs/common'; import { DATABASE_TOKEN, DATABASE_OPTIONS_TOKEN, -} from "./config/database.constants"; +} from './config/database.constants'; import { DatabaseConfig, DatabaseKitModuleOptions, DatabaseKitModuleAsyncOptions, -} from "./contracts/database.contracts"; -import { DatabaseService } from "./services/database.service"; -import { LoggerService } from "./services/logger.service"; +} from './contracts/database.contracts'; +import { DatabaseService } from './services/database.service'; +import { LoggerService } from './services/logger.service'; /** * DatabaseKitModule - Main NestJS module for DatabaseKit. @@ -135,7 +135,7 @@ export class DatabaseKitModule { return { module: DatabaseKitModule, - imports: (options.imports || []) as DynamicModule["imports"], + imports: (options.imports || []) as DynamicModule['imports'], providers, exports: [DATABASE_TOKEN, LoggerService], }; diff --git a/src/filters/database-exception.filter.spec.ts b/src/filters/database-exception.filter.spec.ts index 6e7efee..7a89aea 100644 --- a/src/filters/database-exception.filter.spec.ts +++ b/src/filters/database-exception.filter.spec.ts @@ -2,37 +2,22 @@ import { BadRequestException, HttpStatus, InternalServerErrorException, -} from "@nestjs/common"; -import type { ArgumentsHost } from "@nestjs/common"; - -import { DatabaseExceptionFilter } from "./database-exception.filter"; - -const createHost = () => { - const response = { - status: jest.fn().mockReturnThis(), - json: jest.fn(), - }; - const request = { url: "/test" }; - const host = { - switchToHttp: () => ({ - getResponse: () => response, - getRequest: () => request, - }), - } as unknown as ArgumentsHost; - - return { host, response }; -}; - -describe("DatabaseExceptionFilter", () => { +} from '@nestjs/common'; + +import { createMockHost, testExceptionMapping } from '../test/test.utils'; + +import { DatabaseExceptionFilter } from './database-exception.filter'; + +describe('DatabaseExceptionFilter', () => { let filter: DatabaseExceptionFilter; beforeEach(() => { filter = new DatabaseExceptionFilter(); }); - it("should handle HttpException", () => { - const { host, response } = createHost(); - const exception = new BadRequestException("Bad request"); + it('should handle HttpException', () => { + const { host, response } = createMockHost(); + const exception = new BadRequestException('Bad request'); filter.catch(exception, host); @@ -40,98 +25,60 @@ describe("DatabaseExceptionFilter", () => { expect(response.json).toHaveBeenCalledWith( expect.objectContaining({ statusCode: HttpStatus.BAD_REQUEST, - error: "BadRequestException", - path: "/test", + error: 'BadRequestException', + path: '/test', }), ); }); - it("should handle MongoDB duplicate key error", () => { - const { host, response } = createHost(); - const exception = { - name: "MongoServerError", - code: 11000, - message: "duplicate key", - }; - - filter.catch(exception, host); - - expect(response.status).toHaveBeenCalledWith(HttpStatus.CONFLICT); - expect(response.json).toHaveBeenCalledWith( - expect.objectContaining({ - statusCode: HttpStatus.CONFLICT, - error: "DuplicateKeyError", - }), + it('should handle MongoDB duplicate key error', () => { + testExceptionMapping( + filter, + { name: 'MongoServerError', code: 11000, message: 'duplicate key' }, + HttpStatus.CONFLICT, + 'DuplicateKeyError', ); }); - it("should handle MongoDB cast error", () => { - const { host, response } = createHost(); - const exception = { name: "CastError", message: "invalid id" }; - - filter.catch(exception, host); - - expect(response.status).toHaveBeenCalledWith(HttpStatus.BAD_REQUEST); - expect(response.json).toHaveBeenCalledWith( - expect.objectContaining({ - statusCode: HttpStatus.BAD_REQUEST, - error: "CastError", - }), + it('should handle MongoDB cast error', () => { + testExceptionMapping( + filter, + { name: 'CastError', message: 'invalid id' }, + HttpStatus.BAD_REQUEST, + 'CastError', ); }); - it("should handle MongoDB validation error", () => { - const { host, response } = createHost(); - const exception = { name: "ValidationError", message: "invalid" }; - - filter.catch(exception, host); - - expect(response.status).toHaveBeenCalledWith(HttpStatus.BAD_REQUEST); - expect(response.json).toHaveBeenCalledWith( - expect.objectContaining({ - statusCode: HttpStatus.BAD_REQUEST, - error: "ValidationError", - }), + it('should handle MongoDB validation error', () => { + testExceptionMapping( + filter, + { name: 'ValidationError', message: 'invalid' }, + HttpStatus.BAD_REQUEST, + 'ValidationError', ); }); - it("should handle postgres unique constraint", () => { - const { host, response } = createHost(); - const exception = { - code: "23505", - message: "unique", - constraint: "users_email_key", - }; - - filter.catch(exception, host); - - expect(response.status).toHaveBeenCalledWith(HttpStatus.CONFLICT); - expect(response.json).toHaveBeenCalledWith( - expect.objectContaining({ - statusCode: HttpStatus.CONFLICT, - error: "UniqueConstraintViolation", - }), + it('should handle postgres unique constraint', () => { + testExceptionMapping( + filter, + { code: '23505', message: 'unique', constraint: 'users_email_key' }, + HttpStatus.CONFLICT, + 'UniqueConstraintViolation', ); }); - it("should handle postgres foreign key error", () => { - const { host, response } = createHost(); - const exception = { code: "23503", message: "fk" }; - - filter.catch(exception, host); - - expect(response.status).toHaveBeenCalledWith(HttpStatus.BAD_REQUEST); - expect(response.json).toHaveBeenCalledWith( - expect.objectContaining({ - statusCode: HttpStatus.BAD_REQUEST, - error: "ForeignKeyViolation", - }), + it('should handle postgres foreign key error', () => { + testExceptionMapping( + filter, + { code: '23503', message: 'fk' }, + HttpStatus.BAD_REQUEST, + 'ForeignKeyViolation', ); }); - it("should handle generic errors", () => { - const { host, response } = createHost(); - const exception = new InternalServerErrorException("boom"); + it('should handle generic errors', () => { + const { host, response } = createMockHost(); + const exception = new InternalServerErrorException('boom'); filter.catch(exception, host); @@ -141,15 +88,15 @@ describe("DatabaseExceptionFilter", () => { expect(response.json).toHaveBeenCalledWith( expect.objectContaining({ statusCode: HttpStatus.INTERNAL_SERVER_ERROR, - error: "InternalServerErrorException", + error: 'InternalServerErrorException', }), ); }); - it("should handle unknown errors", () => { - const { host, response } = createHost(); + it('should handle unknown errors', () => { + const { host, response } = createMockHost(); - filter.catch("unknown", host); + filter.catch('unknown', host); expect(response.status).toHaveBeenCalledWith( HttpStatus.INTERNAL_SERVER_ERROR, @@ -157,7 +104,7 @@ describe("DatabaseExceptionFilter", () => { expect(response.json).toHaveBeenCalledWith( expect.objectContaining({ statusCode: HttpStatus.INTERNAL_SERVER_ERROR, - error: "InternalServerError", + error: 'InternalServerError', }), ); }); diff --git a/src/filters/database-exception.filter.ts b/src/filters/database-exception.filter.ts index 3923e01..cde9041 100644 --- a/src/filters/database-exception.filter.ts +++ b/src/filters/database-exception.filter.ts @@ -7,7 +7,7 @@ import { HttpException, HttpStatus, Logger, -} from "@nestjs/common"; +} from '@nestjs/common'; /** * Standard error response format. @@ -53,7 +53,7 @@ export class DatabaseExceptionFilter implements ExceptionFilter { message, error, timestamp: new Date().toISOString(), - path: request?.url || "/", + path: request?.url || '/', }; // Log the error @@ -74,7 +74,7 @@ export class DatabaseExceptionFilter implements ExceptionFilter { if (exception instanceof HttpException) { const response = exception.getResponse(); const message = - typeof response === "string" + typeof response === 'string' ? response : (response as { message?: string }).message || exception.message; @@ -100,7 +100,7 @@ export class DatabaseExceptionFilter implements ExceptionFilter { return { statusCode: HttpStatus.BAD_REQUEST, message: (exception as { message: string }).message, - error: "ValidationError", + error: 'ValidationError', }; } @@ -108,16 +108,16 @@ export class DatabaseExceptionFilter implements ExceptionFilter { if (exception instanceof Error) { return { statusCode: HttpStatus.INTERNAL_SERVER_ERROR, - message: exception.message || "An unexpected error occurred", - error: exception.name || "InternalServerError", + message: exception.message || 'An unexpected error occurred', + error: exception.name || 'InternalServerError', }; } // Fallback for unknown errors return { statusCode: HttpStatus.INTERNAL_SERVER_ERROR, - message: "An unexpected error occurred", - error: "InternalServerError", + message: 'An unexpected error occurred', + error: 'InternalServerError', }; } @@ -125,14 +125,14 @@ export class DatabaseExceptionFilter implements ExceptionFilter { * Checks if the exception is a MongoDB error. */ private isMongoError(exception: unknown): boolean { - if (!exception || typeof exception !== "object") return false; + if (!exception || typeof exception !== 'object') return false; const err = exception as { name?: string }; return ( - err.name === "MongoError" || - err.name === "MongoServerError" || - err.name === "MongooseError" || - err.name === "CastError" || - err.name === "ValidationError" + err.name === 'MongoError' || + err.name === 'MongoServerError' || + err.name === 'MongooseError' || + err.name === 'CastError' || + err.name === 'ValidationError' ); } @@ -150,33 +150,33 @@ export class DatabaseExceptionFilter implements ExceptionFilter { if (err.code === 11000) { return { statusCode: HttpStatus.CONFLICT, - message: "A record with this value already exists", - error: "DuplicateKeyError", + message: 'A record with this value already exists', + error: 'DuplicateKeyError', }; } // Cast error (invalid ObjectId, etc.) - if (err.name === "CastError") { + if (err.name === 'CastError') { return { statusCode: HttpStatus.BAD_REQUEST, - message: "Invalid ID format", - error: "CastError", + message: 'Invalid ID format', + error: 'CastError', }; } // Mongoose validation error - if (err.name === "ValidationError") { + if (err.name === 'ValidationError') { return { statusCode: HttpStatus.BAD_REQUEST, message: err.message, - error: "ValidationError", + error: 'ValidationError', }; } return { statusCode: HttpStatus.INTERNAL_SERVER_ERROR, - message: "Database operation failed", - error: "DatabaseError", + message: 'Database operation failed', + error: 'DatabaseError', }; } @@ -184,10 +184,10 @@ export class DatabaseExceptionFilter implements ExceptionFilter { * Checks if the exception is a Knex/PostgreSQL error. */ private isKnexError(exception: unknown): boolean { - if (!exception || typeof exception !== "object") return false; + if (!exception || typeof exception !== 'object') return false; const err = exception as { code?: string }; // PostgreSQL error codes start with numbers - return typeof err.code === "string" && /^[0-9A-Z]{5}$/.test(err.code); + return typeof err.code === 'string' && /^[0-9A-Z]{5}$/.test(err.code); } /** @@ -205,54 +205,54 @@ export class DatabaseExceptionFilter implements ExceptionFilter { }; // Unique constraint violation - if (err.code === "23505") { + if (err.code === '23505') { return { statusCode: HttpStatus.CONFLICT, - message: `A record with this value already exists${err.constraint ? ` (${err.constraint})` : ""}`, - error: "UniqueConstraintViolation", + message: `A record with this value already exists${err.constraint ? ` (${err.constraint})` : ''}`, + error: 'UniqueConstraintViolation', }; } // Foreign key violation - if (err.code === "23503") { + if (err.code === '23503') { return { statusCode: HttpStatus.BAD_REQUEST, - message: "Referenced record does not exist", - error: "ForeignKeyViolation", + message: 'Referenced record does not exist', + error: 'ForeignKeyViolation', }; } // Not null violation - if (err.code === "23502") { + if (err.code === '23502') { return { statusCode: HttpStatus.BAD_REQUEST, - message: "Required field is missing", - error: "NotNullViolation", + message: 'Required field is missing', + error: 'NotNullViolation', }; } // Check constraint violation - if (err.code === "23514") { + if (err.code === '23514') { return { statusCode: HttpStatus.BAD_REQUEST, - message: "Value does not meet constraint requirements", - error: "CheckConstraintViolation", + message: 'Value does not meet constraint requirements', + error: 'CheckConstraintViolation', }; } // Connection errors - if (err.code === "08006" || err.code === "08001" || err.code === "08004") { + if (err.code === '08006' || err.code === '08001' || err.code === '08004') { return { statusCode: HttpStatus.SERVICE_UNAVAILABLE, - message: "Database connection error", - error: "ConnectionError", + message: 'Database connection error', + error: 'ConnectionError', }; } return { statusCode: HttpStatus.INTERNAL_SERVER_ERROR, - message: "Database operation failed", - error: "DatabaseError", + message: 'Database operation failed', + error: 'DatabaseError', }; } @@ -260,9 +260,9 @@ export class DatabaseExceptionFilter implements ExceptionFilter { * Checks if the exception is a validation error. */ private isValidationError(exception: unknown): boolean { - if (!exception || typeof exception !== "object") return false; + if (!exception || typeof exception !== 'object') return false; const err = exception as { name?: string }; - return err.name === "ValidationError"; + return err.name === 'ValidationError'; } /** diff --git a/src/index.ts b/src/index.ts index 36ce78f..64dce80 100644 --- a/src/index.ts +++ b/src/index.ts @@ -17,14 +17,14 @@ // Module (Primary export) // ----------------------------------------------------------------------------- -export { DatabaseKitModule } from "./database-kit.module"; +export { DatabaseKitModule } from './database-kit.module'; // ----------------------------------------------------------------------------- // Services (For direct injection if needed) // ----------------------------------------------------------------------------- -export { DatabaseService } from "./services/database.service"; -export { LoggerService } from "./services/logger.service"; +export { DatabaseService } from './services/database.service'; +export { LoggerService } from './services/logger.service'; // ----------------------------------------------------------------------------- // Decorators (For dependency injection) @@ -33,25 +33,25 @@ export { LoggerService } from "./services/logger.service"; export { InjectDatabase, InjectDatabaseByToken, -} from "./middleware/database.decorators"; +} from './middleware/database.decorators'; // ----------------------------------------------------------------------------- // Filters (For global exception handling) // ----------------------------------------------------------------------------- -export { DatabaseExceptionFilter } from "./filters/database-exception.filter"; +export { DatabaseExceptionFilter } from './filters/database-exception.filter'; // ----------------------------------------------------------------------------- // Configuration Helpers (For advanced configuration) // ----------------------------------------------------------------------------- -export { DatabaseConfigHelper } from "./config/database.config"; +export { DatabaseConfigHelper } from './config/database.config'; export { DATABASE_TOKEN, DATABASE_OPTIONS_TOKEN, ENV_KEYS, DEFAULTS, -} from "./config/database.constants"; +} from './config/database.constants'; // ----------------------------------------------------------------------------- // Contracts (Types and Interfaces for consumers) @@ -101,7 +101,7 @@ export { // Constants DATABASE_KIT_CONSTANTS, -} from "./contracts/database.contracts"; +} from './contracts/database.contracts'; // ----------------------------------------------------------------------------- // Utilities (For common operations) @@ -113,7 +113,7 @@ export { createPageResult, parseSortString, calculateOffset, -} from "./utils/pagination.utils"; +} from './utils/pagination.utils'; export { isValidMongoId, @@ -123,7 +123,7 @@ export { validateRequiredFields, pickFields, omitFields, -} from "./utils/validation.utils"; +} from './utils/validation.utils'; // ============================================================================= // NOT EXPORTED (Internal implementation details) diff --git a/src/middleware/database.decorators.spec.ts b/src/middleware/database.decorators.spec.ts index ea1a1dc..fd6e281 100644 --- a/src/middleware/database.decorators.spec.ts +++ b/src/middleware/database.decorators.spec.ts @@ -1,27 +1,27 @@ -import { Inject } from "@nestjs/common"; +import { Inject } from '@nestjs/common'; -import { DATABASE_TOKEN } from "../config/database.constants"; +import { DATABASE_TOKEN } from '../config/database.constants'; -import { InjectDatabase, InjectDatabaseByToken } from "./database.decorators"; +import { InjectDatabase, InjectDatabaseByToken } from './database.decorators'; -jest.mock("@nestjs/common", () => { +jest.mock('@nestjs/common', () => { return { - Inject: jest.fn(() => "decorator"), + Inject: jest.fn(() => 'decorator'), }; }); -describe("database.decorators", () => { - it("should create InjectDatabase decorator with DATABASE_TOKEN", () => { +describe('database.decorators', () => { + it('should create InjectDatabase decorator with DATABASE_TOKEN', () => { const decorator = InjectDatabase(); expect(Inject).toHaveBeenCalledWith(DATABASE_TOKEN); - expect(decorator).toBe("decorator"); + expect(decorator).toBe('decorator'); }); - it("should create InjectDatabaseByToken decorator with custom token", () => { - const decorator = InjectDatabaseByToken("ANALYTICS_DB"); + it('should create InjectDatabaseByToken decorator with custom token', () => { + const decorator = InjectDatabaseByToken('ANALYTICS_DB'); - expect(Inject).toHaveBeenCalledWith("ANALYTICS_DB"); - expect(decorator).toBe("decorator"); + expect(Inject).toHaveBeenCalledWith('ANALYTICS_DB'); + expect(decorator).toBe('decorator'); }); }); diff --git a/src/middleware/database.decorators.ts b/src/middleware/database.decorators.ts index 20b985b..3756efd 100644 --- a/src/middleware/database.decorators.ts +++ b/src/middleware/database.decorators.ts @@ -1,8 +1,8 @@ // src/middleware/database.decorators.ts -import { Inject } from "@nestjs/common"; +import { Inject } from '@nestjs/common'; -import { DATABASE_TOKEN } from "../config/database.constants"; +import { DATABASE_TOKEN } from '../config/database.constants'; /** * Decorator to inject the DatabaseService instance. diff --git a/src/services/database.service.spec.ts b/src/services/database.service.spec.ts index 12aaf15..8d8ee1d 100644 --- a/src/services/database.service.spec.ts +++ b/src/services/database.service.spec.ts @@ -1,54 +1,42 @@ // src/services/database.service.spec.ts -import { Logger } from "@nestjs/common"; +import { Logger } from '@nestjs/common'; -import { MongoAdapter } from "../adapters/mongo.adapter"; -import { PostgresAdapter } from "../adapters/postgres.adapter"; +import { MongoAdapter } from '../adapters/mongo.adapter'; +import { PostgresAdapter } from '../adapters/postgres.adapter'; import type { MongoDatabaseConfig, PostgresDatabaseConfig, -} from "../contracts/database.contracts"; +} from '../contracts/database.contracts'; +import { + createMockAdapter, + testDatabaseServiceBasics, +} from '../test/test.utils'; -import { DatabaseService } from "./database.service"; +import { DatabaseService } from './database.service'; -jest.mock("../adapters/mongo.adapter", () => { +jest.mock('../adapters/mongo.adapter', () => { return { - MongoAdapter: jest.fn().mockImplementation(() => ({ - connect: jest.fn().mockResolvedValue(undefined), - disconnect: jest.fn().mockResolvedValue(undefined), - isConnected: jest.fn().mockReturnValue(true), - createRepository: jest.fn().mockReturnValue({ create: jest.fn() }), - withTransaction: jest.fn(async (cb: (ctx: unknown) => unknown) => cb({})), - healthCheck: jest - .fn() - .mockResolvedValue({ healthy: true, responseTimeMs: 1, type: "mongo" }), - })), + MongoAdapter: jest + .fn() + .mockImplementation(() => createMockAdapter('mongo')), }; }); -jest.mock("../adapters/postgres.adapter", () => { +jest.mock('../adapters/postgres.adapter', () => { return { - PostgresAdapter: jest.fn().mockImplementation(() => ({ - connect: jest.fn().mockReturnValue(undefined), - disconnect: jest.fn().mockResolvedValue(undefined), - isConnected: jest.fn().mockReturnValue(true), - createRepository: jest.fn().mockReturnValue({ create: jest.fn() }), - withTransaction: jest.fn(async (cb: (ctx: unknown) => unknown) => cb({})), - healthCheck: jest.fn().mockResolvedValue({ - healthy: true, - responseTimeMs: 2, - type: "postgres", - }), - })), + PostgresAdapter: jest + .fn() + .mockImplementation(() => createMockAdapter('postgres')), }; }); -describe("DatabaseService", () => { - describe("MongoDB", () => { +describe('DatabaseService', () => { + describe('MongoDB', () => { let service: DatabaseService; const mockConfig: MongoDatabaseConfig = { - type: "mongo", - connectionString: "mongodb://localhost:27017/testdb", + type: 'mongo', + connectionString: 'mongodb://localhost:27017/testdb', }; beforeEach(() => { @@ -60,85 +48,17 @@ describe("DatabaseService", () => { jest.clearAllMocks(); }); - it("should be defined", () => { - expect(service).toBeDefined(); - }); - - it("should return correct database type", () => { - expect(service.type).toBe("mongo"); - }); - - it("should not be connected initially", () => { - expect(service.isConnected()).toBe(false); - }); - - it("should throw when creating postgres repository with mongo config", () => { - expect(() => - service.createPostgresRepository({ - table: "users", - }), - ).toThrow('Database type is "mongo"'); - }); - - it("should throw when using withPostgresTransaction with mongo config", async () => { - await expect( - service.withPostgresTransaction(async () => { - return "test"; - }), - ).rejects.toThrow('Database type is "mongo"'); - }); - - it("should have withMongoTransaction method", () => { - expect(typeof service.withMongoTransaction).toBe("function"); - }); - - it("should have withTransaction method", () => { - expect(typeof service.withTransaction).toBe("function"); - }); - - it("should connect and initialize mongo adapter", async () => { - await service.connect(); - - expect(MongoAdapter).toHaveBeenCalledTimes(1); - const adapterInstance = (MongoAdapter as jest.Mock).mock.results[0] - ?.value as { connect: jest.Mock }; - expect(adapterInstance.connect).toHaveBeenCalled(); - expect(service.isConnected()).toBe(true); - }); - - it("should create mongo repository through adapter", () => { - const repo = service.createMongoRepository({ model: {} }); - - expect(repo).toBeDefined(); - const adapterInstance = (MongoAdapter as jest.Mock).mock.results[0] - ?.value as { createRepository: jest.Mock }; - expect(adapterInstance.createRepository).toHaveBeenCalledWith({ - model: {}, - }); - }); - - it("should run mongo transaction via adapter", async () => { - const result = await service.withMongoTransaction(async () => "ok"); - - expect(result).toBe("ok"); - const adapterInstance = (MongoAdapter as jest.Mock).mock.results[0] - ?.value as { withTransaction: jest.Mock }; - expect(adapterInstance.withTransaction).toHaveBeenCalled(); - }); - - it("should return health check from mongo adapter", async () => { - const result = await service.healthCheck(); - - expect(result.healthy).toBe(true); - expect(result.type).toBe("mongo"); + testDatabaseServiceBasics('mongo', () => service, MongoAdapter, { + repo: 'postgres', + transaction: 'withPostgresTransaction', }); }); - describe("PostgreSQL", () => { + describe('PostgreSQL', () => { let service: DatabaseService; const mockConfig: PostgresDatabaseConfig = { - type: "postgres", - connectionString: "postgresql://localhost:5432/testdb", + type: 'postgres', + connectionString: 'postgresql://localhost:5432/testdb', }; beforeEach(() => { @@ -150,89 +70,25 @@ describe("DatabaseService", () => { jest.clearAllMocks(); }); - it("should be defined", () => { - expect(service).toBeDefined(); - }); - - it("should return correct database type", () => { - expect(service.type).toBe("postgres"); - }); - - it("should throw when creating mongo repository with postgres config", () => { - expect(() => - service.createMongoRepository({ - model: {}, - }), - ).toThrow('Database type is "postgres"'); - }); - - it("should throw when using withMongoTransaction with postgres config", async () => { - await expect( - service.withMongoTransaction(async () => { - return "test"; - }), - ).rejects.toThrow('Database type is "postgres"'); - }); - - it("should have withPostgresTransaction method", () => { - expect(typeof service.withPostgresTransaction).toBe("function"); - }); - - it("should have withTransaction method", () => { - expect(typeof service.withTransaction).toBe("function"); - }); - - it("should have healthCheck method", () => { - expect(typeof service.healthCheck).toBe("function"); + testDatabaseServiceBasics('postgres', () => service, PostgresAdapter, { + repo: 'mongo', + transaction: 'withMongoTransaction', }); - it("should connect and initialize postgres adapter", async () => { - await service.connect(); - - expect(PostgresAdapter).toHaveBeenCalledTimes(1); - const adapterInstance = (PostgresAdapter as jest.Mock).mock.results[0] - ?.value as { connect: jest.Mock }; - expect(adapterInstance.connect).toHaveBeenCalled(); - expect(service.isConnected()).toBe(true); - }); - - it("should create postgres repository through adapter", () => { - const repo = service.createPostgresRepository({ table: "users" }); - - expect(repo).toBeDefined(); - const adapterInstance = (PostgresAdapter as jest.Mock).mock.results[0] - ?.value as { createRepository: jest.Mock }; - expect(adapterInstance.createRepository).toHaveBeenCalledWith({ - table: "users", - }); - }); - - it("should run postgres transaction via adapter", async () => { - const result = await service.withPostgresTransaction(async () => "ok"); - - expect(result).toBe("ok"); - const adapterInstance = (PostgresAdapter as jest.Mock).mock.results[0] - ?.value as { withTransaction: jest.Mock }; - expect(adapterInstance.withTransaction).toHaveBeenCalled(); - }); - - it("should return health check from postgres adapter", async () => { - const result = await service.healthCheck(); - - expect(result.healthy).toBe(true); - expect(result.type).toBe("postgres"); + it('should have healthCheck method', () => { + expect(typeof service.healthCheck).toBe('function'); }); }); - describe("disconnect", () => { - it("should log and rethrow disconnect errors", async () => { + describe('disconnect', () => { + it('should log and rethrow disconnect errors', async () => { const service = new DatabaseService({ - type: "mongo", - connectionString: "mongodb://localhost:27017/testdb", + type: 'mongo', + connectionString: 'mongodb://localhost:27017/testdb', }); - const error = new Error("disconnect failed"); + const error = new Error('disconnect failed'); const loggerSpy = jest - .spyOn(Logger.prototype, "error") + .spyOn(Logger.prototype, 'error') .mockImplementation(() => undefined); ( @@ -241,46 +97,46 @@ describe("DatabaseService", () => { disconnect: jest.fn().mockRejectedValue(error), }; - await expect(service.disconnect()).rejects.toThrow("disconnect failed"); + await expect(service.disconnect()).rejects.toThrow('disconnect failed'); expect(loggerSpy).toHaveBeenCalled(); }); }); - describe("adapter accessors", () => { - it("should throw when getMongoAdapter is called for postgres", () => { + describe('adapter accessors', () => { + it('should throw when getMongoAdapter is called for postgres', () => { const service = new DatabaseService({ - type: "postgres", - connectionString: "postgresql://localhost:5432/testdb", + type: 'postgres', + connectionString: 'postgresql://localhost:5432/testdb', }); expect(() => service.getMongoAdapter()).toThrow( - "getMongoAdapter() is only available for MongoDB connections", + 'getMongoAdapter() is only available for MongoDB connections', ); }); - it("should throw when getPostgresAdapter is called for mongo", () => { + it('should throw when getPostgresAdapter is called for mongo', () => { const service = new DatabaseService({ - type: "mongo", - connectionString: "mongodb://localhost:27017/testdb", + type: 'mongo', + connectionString: 'mongodb://localhost:27017/testdb', }); expect(() => service.getPostgresAdapter()).toThrow( - "getPostgresAdapter() is only available for PostgreSQL connections", + 'getPostgresAdapter() is only available for PostgreSQL connections', ); }); }); - describe("healthCheck", () => { - it("should return unhealthy result for unsupported types", async () => { + describe('healthCheck', () => { + it('should return unhealthy result for unsupported types', async () => { const service = new DatabaseService({ - type: "sqlite" as unknown as "mongo", - connectionString: "file::memory:", + type: 'sqlite' as unknown as 'mongo', + connectionString: 'file::memory:', }); const result = await service.healthCheck(); expect(result.healthy).toBe(false); - expect(result.error).toContain("Unsupported database type"); + expect(result.error).toContain('Unsupported database type'); }); }); }); diff --git a/src/services/database.service.ts b/src/services/database.service.ts index b6ac126..bc45563 100644 --- a/src/services/database.service.ts +++ b/src/services/database.service.ts @@ -1,7 +1,7 @@ -import { Injectable, Logger, OnModuleDestroy } from "@nestjs/common"; +import { Injectable, Logger, OnModuleDestroy } from '@nestjs/common'; -import { MongoAdapter } from "../adapters/mongo.adapter"; -import { PostgresAdapter } from "../adapters/postgres.adapter"; +import { MongoAdapter } from '../adapters/mongo.adapter'; +import { PostgresAdapter } from '../adapters/postgres.adapter'; import { DatabaseConfig, MongoDatabaseConfig, @@ -14,7 +14,7 @@ import { TransactionOptions, TransactionCallback, HealthCheckResult, -} from "../contracts/database.contracts"; +} from '../contracts/database.contracts'; /** * Main database service that provides a unified interface @@ -54,14 +54,14 @@ export class DatabaseService implements OnModuleDestroy { * Gracefully closes all database connections. */ async onModuleDestroy(): Promise { - this.logger.log("Cleaning up database connections..."); + this.logger.log('Cleaning up database connections...'); await this.disconnect(); } /** * Returns the current database type. */ - get type(): "mongo" | "postgres" { + get type(): 'mongo' | 'postgres' { return this.config.type; } @@ -70,9 +70,9 @@ export class DatabaseService implements OnModuleDestroy { */ isConnected(): boolean { switch (this.config.type) { - case "mongo": + case 'mongo': return this.mongoAdapter?.isConnected() ?? false; - case "postgres": + case 'postgres': return this.postgresAdapter?.isConnected() ?? false; default: return false; @@ -85,25 +85,25 @@ export class DatabaseService implements OnModuleDestroy { */ async connect(): Promise { switch (this.config.type) { - case "mongo": { + case 'mongo': { if (!this.mongoAdapter) { this.mongoAdapter = new MongoAdapter( this.config as MongoDatabaseConfig, ); } await this.mongoAdapter.connect(); - this.logger.log("MongoDB connection established"); + this.logger.log('MongoDB connection established'); break; } - case "postgres": { + case 'postgres': { if (!this.postgresAdapter) { this.postgresAdapter = new PostgresAdapter( this.config as PostgresDatabaseConfig, ); } this.postgresAdapter.connect(); - this.logger.log("PostgreSQL connection pool established"); + this.logger.log('PostgreSQL connection pool established'); break; } @@ -132,9 +132,9 @@ export class DatabaseService implements OnModuleDestroy { this.postgresAdapter = undefined; } - this.logger.log("All database connections closed"); + this.logger.log('All database connections closed'); } catch (error) { - this.logger.error("Error during database disconnect", error); + this.logger.error('Error during database disconnect', error); throw error; } } @@ -155,7 +155,7 @@ export class DatabaseService implements OnModuleDestroy { createMongoRepository( options: MongoRepositoryOptions, ): Repository { - if (this.config.type !== "mongo") { + if (this.config.type !== 'mongo') { throw new Error( `Database type is "${this.config.type}". createMongoRepository can only be used when type === "mongo".`, ); @@ -187,7 +187,7 @@ export class DatabaseService implements OnModuleDestroy { createPostgresRepository( cfg: PostgresEntityConfig, ): Repository { - if (this.config.type !== "postgres") { + if (this.config.type !== 'postgres') { throw new Error( `Database type is "${this.config.type}". createPostgresRepository can only be used when type === "postgres".`, ); @@ -210,9 +210,9 @@ export class DatabaseService implements OnModuleDestroy { * @throws Error if database type is not 'mongo' */ getMongoAdapter(): MongoAdapter { - if (this.config.type !== "mongo") { + if (this.config.type !== 'mongo') { throw new Error( - "getMongoAdapter() is only available for MongoDB connections", + 'getMongoAdapter() is only available for MongoDB connections', ); } @@ -230,9 +230,9 @@ export class DatabaseService implements OnModuleDestroy { * @throws Error if database type is not 'postgres' */ getPostgresAdapter(): PostgresAdapter { - if (this.config.type !== "postgres") { + if (this.config.type !== 'postgres') { throw new Error( - "getPostgresAdapter() is only available for PostgreSQL connections", + 'getPostgresAdapter() is only available for PostgreSQL connections', ); } @@ -269,7 +269,7 @@ export class DatabaseService implements OnModuleDestroy { callback: TransactionCallback, options?: TransactionOptions, ): Promise { - if (this.config.type !== "mongo") { + if (this.config.type !== 'mongo') { throw new Error( `Database type is "${this.config.type}". withMongoTransaction can only be used when type === "mongo".`, ); @@ -301,7 +301,7 @@ export class DatabaseService implements OnModuleDestroy { callback: TransactionCallback, options?: TransactionOptions, ): Promise { - if (this.config.type !== "postgres") { + if (this.config.type !== 'postgres') { throw new Error( `Database type is "${this.config.type}". withPostgresTransaction can only be used when type === "postgres".`, ); @@ -336,12 +336,12 @@ export class DatabaseService implements OnModuleDestroy { options?: TransactionOptions, ): Promise { switch (this.config.type) { - case "mongo": + case 'mongo': return this.withMongoTransaction( callback as TransactionCallback, options, ); - case "postgres": + case 'postgres': return this.withPostgresTransaction( callback as TransactionCallback, options, @@ -376,11 +376,11 @@ export class DatabaseService implements OnModuleDestroy { */ async healthCheck(): Promise { switch (this.config.type) { - case "mongo": { + case 'mongo': { const adapter = this.getMongoAdapter(); return adapter.healthCheck(); } - case "postgres": { + case 'postgres': { const adapter = this.getPostgresAdapter(); return adapter.healthCheck(); } diff --git a/src/services/logger.service.spec.ts b/src/services/logger.service.spec.ts index 22038a0..17cd142 100644 --- a/src/services/logger.service.spec.ts +++ b/src/services/logger.service.spec.ts @@ -1,71 +1,71 @@ -import { Logger } from "@nestjs/common"; +import { Logger } from '@nestjs/common'; -import { LoggerService } from "./logger.service"; +import { LoggerService } from './logger.service'; -describe("LoggerService", () => { +describe('LoggerService', () => { let service: LoggerService; beforeEach(() => { service = new LoggerService(); }); - it("should log messages", () => { + it('should log messages', () => { const logSpy = jest - .spyOn(Logger.prototype, "log") + .spyOn(Logger.prototype, 'log') .mockImplementation(() => undefined); - service.log("message", "context"); + service.log('message', 'context'); - expect(logSpy).toHaveBeenCalledWith("message", "context"); + expect(logSpy).toHaveBeenCalledWith('message', 'context'); }); - it("should log errors", () => { + it('should log errors', () => { const errorSpy = jest - .spyOn(Logger.prototype, "error") + .spyOn(Logger.prototype, 'error') .mockImplementation(() => undefined); - service.error("error", "trace", "context"); + service.error('error', 'trace', 'context'); - expect(errorSpy).toHaveBeenCalledWith("error", "trace", "context"); + expect(errorSpy).toHaveBeenCalledWith('error', 'trace', 'context'); }); - it("should log warnings", () => { + it('should log warnings', () => { const warnSpy = jest - .spyOn(Logger.prototype, "warn") + .spyOn(Logger.prototype, 'warn') .mockImplementation(() => undefined); - service.warn("warning", "context"); + service.warn('warning', 'context'); - expect(warnSpy).toHaveBeenCalledWith("warning", "context"); + expect(warnSpy).toHaveBeenCalledWith('warning', 'context'); }); - it("should log debug", () => { + it('should log debug', () => { const debugSpy = jest - .spyOn(Logger.prototype, "debug") + .spyOn(Logger.prototype, 'debug') .mockImplementation(() => undefined); - service.debug("debug", "context"); + service.debug('debug', 'context'); - expect(debugSpy).toHaveBeenCalledWith("debug", "context"); + expect(debugSpy).toHaveBeenCalledWith('debug', 'context'); }); - it("should log verbose", () => { + it('should log verbose', () => { const verboseSpy = jest - .spyOn(Logger.prototype, "verbose") + .spyOn(Logger.prototype, 'verbose') .mockImplementation(() => undefined); - service.verbose("verbose", "context"); + service.verbose('verbose', 'context'); - expect(verboseSpy).toHaveBeenCalledWith("verbose", "context"); + expect(verboseSpy).toHaveBeenCalledWith('verbose', 'context'); }); - it("should set log levels", () => { + it('should set log levels', () => { const overrideSpy = jest - .spyOn(Logger, "overrideLogger") + .spyOn(Logger, 'overrideLogger') .mockImplementation(() => undefined); - service.setLogLevels(["log", "error"]); + service.setLogLevels(['log', 'error']); - expect(overrideSpy).toHaveBeenCalledWith(["log", "error"]); + expect(overrideSpy).toHaveBeenCalledWith(['log', 'error']); }); }); diff --git a/src/services/logger.service.ts b/src/services/logger.service.ts index d0ee7a1..a568912 100644 --- a/src/services/logger.service.ts +++ b/src/services/logger.service.ts @@ -1,6 +1,6 @@ // src/services/logger.service.ts -import { Injectable, Logger, LogLevel } from "@nestjs/common"; +import { Injectable, Logger, LogLevel } from '@nestjs/common'; /** * Centralized logging service for DatabaseKit. @@ -20,7 +20,7 @@ import { Injectable, Logger, LogLevel } from "@nestjs/common"; */ @Injectable() export class LoggerService { - private readonly logger = new Logger("DatabaseKit"); + private readonly logger = new Logger('DatabaseKit'); /** * Logs a message at the 'log' level. diff --git a/src/test/test.utils.ts b/src/test/test.utils.ts new file mode 100644 index 0000000..e09faab --- /dev/null +++ b/src/test/test.utils.ts @@ -0,0 +1,531 @@ +/** + * Shared test utilities and mock factories for DatabaseKit tests. + * Reduces code duplication across test files. + */ + +import { NotFoundException, BadRequestException } from '@nestjs/common'; + +import type { + Repository, + PageResult, + HealthCheckResult, +} from '../contracts/database.contracts'; + +/** + * Creates a mock repository with default implementations. + * Override methods as needed in tests. + */ +export function createMockRepository( + overrides?: Partial>, +): Repository { + return { + async create(data: Partial): Promise { + return { id: 'test-id', ...data } as T; + }, + async findById(_id: string | number): Promise { + return null; + }, + async findAll(): Promise { + return []; + }, + async findOne(): Promise { + return null; + }, + async updateById( + _id: string | number, + _update: Partial, + ): Promise { + return null; + }, + async deleteById(_id: string | number): Promise { + return true; + }, + async count(): Promise { + return 0; + }, + async exists(): Promise { + return false; + }, + async findPage(): Promise> { + return { data: [], page: 1, limit: 10, total: 0, pages: 0 }; + }, + async insertMany(): Promise { + return []; + }, + async updateMany(): Promise { + return 0; + }, + async deleteMany(): Promise { + return 0; + }, + async upsert( + _filter: Record, + data: Partial, + ): Promise { + return { id: 'test-id', ...data } as T; + }, + async distinct() { + return [] as any; + }, + async select() { + return [] as any; + }, + async transaction(callback: () => Promise): Promise { + return callback(); + }, + ...overrides, + } as any; +} + +/** + * Creates a mock health check result (success). + */ +export function createMockHealthCheckSuccess( + type: 'mongo' | 'postgres', +): HealthCheckResult { + return { + healthy: true, + responseTimeMs: 10, + type, + details: { version: '1.0.0' }, + }; +} + +/** + * Creates a mock health check result (failure). + */ +export function createMockHealthCheckError( + type: 'mongo' | 'postgres', + error = 'Connection failed', +): HealthCheckResult { + return { + healthy: false, + responseTimeMs: 50, + type, + error, + }; +} + +/** + * Creates a mock paginated result. + */ +export function createMockPageResult( + data: T[], + page = 1, + limit = 10, + total?: number, +): PageResult { + const totalItems = total ?? data.length; + const pages = Math.max(1, Math.ceil(totalItems / limit)); + return { data, page, limit, total: totalItems, pages }; +} + +/** + * Test data factories. + */ +export const testData = { + user: (overrides?: Partial) => ({ + id: 'user-1', + name: 'Test User', + email: 'test@example.com', + role: 'user', + createdAt: new Date(), + ...overrides, + }), + + users: (count = 3, overrides?: Partial) => + Array.from({ length: count }, (_, i) => ({ + id: `user-${i + 1}`, + name: `User ${i + 1}`, + email: `user${i + 1}@example.com`, + role: i === 0 ? 'admin' : 'user', + createdAt: new Date(), + ...overrides, + })), + + filter: (overrides?: Partial) => ({ + status: 'active', + ...overrides, + }), + + pageOptions: (overrides?: Partial) => ({ + page: 1, + limit: 10, + ...overrides, + }), +}; + +/** + * Common error test cases. + */ +export const errorTestCases = [ + { + name: 'should throw NotFoundException when record not found', + error: new NotFoundException('Record not found'), + expectedStatus: 404, + }, + { + name: 'should throw BadRequestException on invalid input', + error: new BadRequestException('Invalid input'), + expectedStatus: 400, + }, +]; + +/** + * Creates a mock ArgumentsHost for testing exception filters. + */ +export function createMockHost() { + const response = { + status: jest.fn().mockReturnThis(), + json: jest.fn(), + }; + const request = { url: '/test' }; + const host = { + switchToHttp: () => ({ + getResponse: () => response, + getRequest: () => request, + }), + } as any; + + return { host, response }; +} + +/** + * Creates a mock adapter with default health check and connection methods. + */ +export function createMockAdapter(type: 'mongo' | 'postgres') { + return { + connect: jest.fn().mockResolvedValue(undefined), + disconnect: jest.fn().mockResolvedValue(undefined), + isConnected: jest.fn().mockReturnValue(true), + createRepository: jest.fn().mockReturnValue({ create: jest.fn() }), + withTransaction: jest.fn(async (cb: (ctx: unknown) => unknown) => cb({})), + healthCheck: jest.fn().mockResolvedValue({ + healthy: true, + responseTimeMs: type === 'mongo' ? 1 : 2, + type, + }), + }; +} + +/** + * Creates a mock Mongoose model for testing MongoDB adapter. + * Returns a chainable mock with all common query methods. + */ +export function createMockMongoModel(overrides?: Partial) { + const mockModel = { + create: jest.fn(), + findById: jest.fn().mockReturnThis(), + find: jest.fn().mockReturnThis(), + findOne: jest.fn().mockReturnThis(), + findByIdAndUpdate: jest.fn().mockReturnThis(), + findByIdAndDelete: jest.fn().mockReturnThis(), + findOneAndUpdate: jest.fn().mockReturnThis(), + distinct: jest.fn().mockReturnThis(), + countDocuments: jest.fn().mockReturnThis(), + exists: jest.fn(), + insertMany: jest.fn(), + updateMany: jest.fn().mockReturnThis(), + updateOne: jest.fn().mockReturnThis(), + deleteMany: jest.fn().mockReturnThis(), + lean: jest.fn().mockReturnThis(), + exec: jest.fn(), + skip: jest.fn().mockReturnThis(), + limit: jest.fn().mockReturnThis(), + sort: jest.fn().mockReturnThis(), + ...overrides, + }; + return mockModel; +} + +/** + * Creates mock MongoDB documents with toObject() method. + * Useful for testing insertMany and find operations. + */ +export function createMockMongoDocs>( + data: T[], +): Array T }> { + return data.map((item) => ({ + ...item, + toObject: () => item, + })); +} + +/** + * Creates a mock Knex query builder for testing PostgreSQL adapter. + * Returns a chainable mock with all common query methods. + */ +export function createMockQueryBuilder(overrides?: Partial) { + return { + select: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + whereIn: jest.fn().mockReturnThis(), + whereNull: jest.fn().mockReturnThis(), + first: jest.fn(), + returning: jest.fn().mockReturnThis(), + insert: jest.fn().mockReturnThis(), + update: jest.fn().mockReturnThis(), + delete: jest.fn().mockReturnThis(), + del: jest.fn().mockReturnThis(), + count: jest.fn().mockReturnThis(), + distinct: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + limit: jest.fn().mockReturnThis(), + offset: jest.fn().mockReturnThis(), + modify: jest.fn().mockReturnThis(), + transacting: jest.fn().mockReturnThis(), + ...overrides, + }; +} + +/** + * Creates a mock Knex instance for testing PostgreSQL adapter. + * Returns a function that creates query builders when called with a table name. + */ +export function createMockKnex(queryBuilderOverrides?: Partial) { + const mockQb = createMockQueryBuilder(queryBuilderOverrides); + const mockKnex = jest.fn(() => mockQb) as any; + mockKnex.raw = jest.fn(); + mockKnex.transaction = jest.fn(); + return { mockKnex, mockQb }; +} + +/** + * Assertion helpers for common patterns. + */ +export const assertions = { + /** + * Assert a repository method throws the expected error. + */ + async throwsError( + fn: () => Promise, + expectedErrorClass: any, + expectedMessage?: string, + ): Promise { + try { + await fn(); + throw new Error('Expected error was not thrown'); + } catch (err) { + const error = err instanceof Error ? err : new Error(String(err)); + if (!(error instanceof expectedErrorClass)) { + throw error; + } + if (expectedMessage && !error.message.includes(expectedMessage)) { + throw new Error( + `Expected message "${expectedMessage}" not found in "${error.message}"`, + ); + } + } + }, + + /** + * Assert paginated result has correct structure. + */ + isValidPageResult(_result: any): boolean { + const result = _result as PageResult; + return ( + typeof result === 'object' && + Array.isArray(result.data) && + typeof result.page === 'number' && + typeof result.limit === 'number' && + typeof result.total === 'number' && + typeof result.pages === 'number' + ); + }, + + /** + * Assert health check result has correct structure. + */ + isValidHealthCheck(result: any): boolean { + return ( + typeof result === 'object' && + typeof result.healthy === 'boolean' && + typeof result.responseTimeMs === 'number' && + ['mongo', 'postgres'].includes(result.type) + ); + }, +}; + +/** + * Test exception filter mappings. + * Reduces duplication in database-exception.filter.spec.ts. + */ +export function testExceptionMapping( + filter: any, + exception: any, + expectedStatus: number, + expectedError: string, +) { + const { host, response } = createMockHost(); + filter.catch(exception, host); + expect(response.status).toHaveBeenCalledWith(expectedStatus); + expect(response.json).toHaveBeenCalledWith( + expect.objectContaining({ + statusCode: expectedStatus, + error: expectedError, + }), + ); +} + +/** + * Shared test suite for DatabaseService basic functionality. + * Tests common behaviors across both MongoDB and PostgreSQL adapters. + * Call this from within a describe block where 'service' is in scope. + */ +export function testDatabaseServiceBasics( + type: 'mongo' | 'postgres', + getService: () => any, + AdapterClass: any, + oppositeMethods: { repo: string; transaction: string }, +) { + it('should be defined', () => { + expect(getService()).toBeDefined(); + }); + + it('should return correct database type', () => { + expect(getService().type).toBe(type); + }); + + it('should not be connected initially', () => { + expect(getService().isConnected()).toBe(false); + }); + + it(`should throw when creating ${oppositeMethods.repo} repository with ${type} config`, () => { + const methodName = + type === 'mongo' ? 'createPostgresRepository' : 'createMongoRepository'; + const arg = type === 'mongo' ? { table: 'users' } : { model: {} }; + + expect(() => getService()[methodName](arg)).toThrow( + `Database type is "${type}"`, + ); + }); + + it(`should throw when using ${oppositeMethods.transaction} with ${type} config`, async () => { + const methodName = + type === 'mongo' ? 'withPostgresTransaction' : 'withMongoTransaction'; + + await expect(getService()[methodName](async () => 'test')).rejects.toThrow( + `Database type is "${type}"`, + ); + }); + + it(`should have ${type === 'mongo' ? 'withMongoTransaction' : 'withPostgresTransaction'} method`, () => { + const methodName = + type === 'mongo' ? 'withMongoTransaction' : 'withPostgresTransaction'; + expect(typeof getService()[methodName]).toBe('function'); + }); + + it('should have withTransaction method', () => { + expect(typeof getService().withTransaction).toBe('function'); + }); + + it(`should connect and initialize ${type} adapter`, async () => { + await getService().connect(); + + expect(AdapterClass).toHaveBeenCalledTimes(1); + const adapterInstance = (AdapterClass as jest.Mock).mock.results[0] + ?.value as { connect: jest.Mock }; + expect(adapterInstance.connect).toHaveBeenCalled(); + expect(getService().isConnected()).toBe(true); + }); + + it(`should create ${type} repository through adapter`, () => { + const methodName = + type === 'mongo' ? 'createMongoRepository' : 'createPostgresRepository'; + const arg = type === 'mongo' ? { model: {} } : { table: 'users' }; + + const repo = getService()[methodName](arg); + + expect(repo).toBeDefined(); + const adapterInstance = (AdapterClass as jest.Mock).mock.results[0] + ?.value as { createRepository: jest.Mock }; + expect(adapterInstance.createRepository).toHaveBeenCalledWith(arg); + }); + + it(`should run ${type} transaction via adapter`, async () => { + const methodName = + type === 'mongo' ? 'withMongoTransaction' : 'withPostgresTransaction'; + + const result = await getService()[methodName](async () => 'ok'); + + expect(result).toBe('ok'); + const adapterInstance = (AdapterClass as jest.Mock).mock.results[0] + ?.value as { withTransaction: jest.Mock }; + expect(adapterInstance.withTransaction).toHaveBeenCalled(); + }); + + it(`should return health check from ${type} adapter`, async () => { + const result = await getService().healthCheck(); + + expect(result.healthy).toBe(true); + expect(result.type).toBe(type); + }); +} + +/** + * Test soft delete method availability given configuration. + */ +export function testSoftDeleteMethods(repo: any, shouldExist: boolean) { + const methods = [ + 'softDelete', + 'softDeleteMany', + 'restore', + 'restoreMany', + 'findAllWithDeleted', + 'findDeleted', + ]; + + methods.forEach((method) => { + if (shouldExist) { + expect(typeof repo[method]).toBe('function'); + } else { + expect(repo[method]).toBeUndefined(); + } + }); +} + +/** + * Assert timestamp field was added to the method call. + */ +export function expectTimestampAdded( + mockMethod: jest.Mock, + field: 'createdAt' | 'updatedAt', +) { + expect(mockMethod).toHaveBeenCalledWith( + expect.objectContaining({ + [field]: expect.any(Date), + }), + ); +} + +/** + * Assert timestamp field was NOT added to the method call. + */ +export function expectTimestampOmitted( + mockMethod: jest.Mock, + field: 'createdAt' | 'updatedAt', +) { + const calls = mockMethod.mock.calls; + calls.forEach((call) => { + expect(call[0]).not.toHaveProperty(field); + }); +} + +/** + * Test that a repository has all expected CRUD and bulk operation methods. + */ +export function testRepositoryMethods(repo: any) { + expect(repo).toBeDefined(); + expect(typeof repo.create).toBe('function'); + expect(typeof repo.findById).toBe('function'); + expect(typeof repo.findAll).toBe('function'); + expect(typeof repo.findPage).toBe('function'); + expect(typeof repo.updateById).toBe('function'); + expect(typeof repo.deleteById).toBe('function'); + expect(typeof repo.count).toBe('function'); + expect(typeof repo.exists).toBe('function'); + // Bulk operations + expect(typeof repo.insertMany).toBe('function'); + expect(typeof repo.updateMany).toBe('function'); + expect(typeof repo.deleteMany).toBe('function'); +} diff --git a/src/utils/adapter.utils.ts b/src/utils/adapter.utils.ts new file mode 100644 index 0000000..643699c --- /dev/null +++ b/src/utils/adapter.utils.ts @@ -0,0 +1,124 @@ +import type { + PageResult, + HealthCheckResult, +} from '../contracts/database.contracts'; + +/** + * Shared adapter utilities to reduce code duplication. + * These functions are used by both MongoAdapter and PostgresAdapter. + */ + +/** + * Shapes paginated data into a consistent PageResult format. + * Used by both MongoDB and PostgreSQL adapters. + * + * @param data - Array of data items + * @param page - Current page number (1-indexed) + * @param limit - Items per page + * @param total - Total number of items + * @returns Formatted page result + */ +export function shapePage( + data: T[], + page: number, + limit: number, + total: number, +): PageResult { + const pages = Math.max(1, Math.ceil((total || 0) / (limit || 1))); + return { data, page, limit, total, pages }; +} + +/** + * Adds createdAt timestamp to data if timestamps are enabled. + * + * @param data - Data object to add timestamp to + * @param enabled - Whether timestamps are enabled + * @param field - Name of the createdAt field + * @returns Data with timestamp added if enabled + */ +export function addCreatedAtTimestamp>( + data: T, + enabled: boolean, + field: string, +): T { + return addTimestamp(data, enabled, field); +} + +/** + * Adds updatedAt timestamp to data if timestamps are enabled. + * + * @param data - Data object to add timestamp to + * @param enabled - Whether timestamps are enabled + * @param field - Name of the updatedAt field + * @returns Data with timestamp added if enabled + */ +export function addUpdatedAtTimestamp>( + data: T, + enabled: boolean, + field: string, +): T { + return addTimestamp(data, enabled, field); +} + +/** + * Adds a timestamp to a specific field when enabled. + */ +function addTimestamp>( + data: T, + enabled: boolean, + field: string, +): T { + if (!enabled) { + return data; + } + + return { ...data, [field]: new Date() }; +} + +/** + * Creates a consistent error health check result. + * Used when health check fails or connection is not established. + * + * @param type - Database type ('mongo' | 'postgres') + * @param error - Error message + * @param startTime - Start time to calculate response time + * @returns Health check result indicating failure + */ +export function createErrorHealthResult( + type: 'mongo' | 'postgres', + error: string, + startTime: number, +): HealthCheckResult { + return { + healthy: false, + responseTimeMs: Date.now() - startTime, + type, + error, + }; +} + +/** + * Creates a successful health check result. + * + * @param type - Database type ('mongo' | 'postgres') + * @param startTime - Start time to calculate response time + * @param details - Optional additional details + * @returns Health check result indicating success + */ +export function createSuccessHealthResult( + type: 'mongo' | 'postgres', + startTime: number, + details?: Record, +): HealthCheckResult { + const result: HealthCheckResult = { + healthy: true, + responseTimeMs: Date.now() - startTime, + type, + }; + + if (details) { + result.details = details; + } + + return result; +} diff --git a/src/utils/pagination.utils.spec.ts b/src/utils/pagination.utils.spec.ts index 00f682a..071d621 100644 --- a/src/utils/pagination.utils.spec.ts +++ b/src/utils/pagination.utils.spec.ts @@ -6,11 +6,11 @@ import { createPageResult, parseSortString, calculateOffset, -} from "./pagination.utils"; +} from './pagination.utils'; -describe("Pagination Utils", () => { - describe("normalizePaginationOptions", () => { - it("should return defaults when no options provided", () => { +describe('Pagination Utils', () => { + describe('normalizePaginationOptions', () => { + it('should return defaults when no options provided', () => { const result = normalizePaginationOptions(); expect(result.page).toBe(1); expect(result.limit).toBe(10); @@ -18,65 +18,65 @@ describe("Pagination Utils", () => { expect(result.sort).toEqual({}); }); - it("should normalize negative page to 1", () => { + it('should normalize negative page to 1', () => { const result = normalizePaginationOptions({ page: -5 }); expect(result.page).toBe(1); }); - it("should cap limit at max", () => { + it('should cap limit at max', () => { const result = normalizePaginationOptions({ limit: 1000 }); expect(result.limit).toBe(100); }); - it("should normalize zero limit to 1", () => { + it('should normalize zero limit to 1', () => { const result = normalizePaginationOptions({ limit: 0 }); expect(result.limit).toBe(1); }); - it("should preserve valid options", () => { + it('should preserve valid options', () => { const result = normalizePaginationOptions({ page: 5, limit: 25, - filter: { status: "active" }, + filter: { status: 'active' }, sort: { createdAt: -1 }, }); expect(result.page).toBe(5); expect(result.limit).toBe(25); - expect(result.filter).toEqual({ status: "active" }); + expect(result.filter).toEqual({ status: 'active' }); expect(result.sort).toEqual({ createdAt: -1 }); }); }); - describe("calculatePagination", () => { - it("should calculate correct pagination metadata", () => { + describe('calculatePagination', () => { + it('should calculate correct pagination metadata', () => { const result = calculatePagination(100, 3, 10); expect(result.pages).toBe(10); expect(result.hasNext).toBe(true); expect(result.hasPrev).toBe(true); }); - it("should handle first page", () => { + it('should handle first page', () => { const result = calculatePagination(50, 1, 10); expect(result.pages).toBe(5); expect(result.hasNext).toBe(true); expect(result.hasPrev).toBe(false); }); - it("should handle last page", () => { + it('should handle last page', () => { const result = calculatePagination(50, 5, 10); expect(result.pages).toBe(5); expect(result.hasNext).toBe(false); expect(result.hasPrev).toBe(true); }); - it("should handle single page", () => { + it('should handle single page', () => { const result = calculatePagination(5, 1, 10); expect(result.pages).toBe(1); expect(result.hasNext).toBe(false); expect(result.hasPrev).toBe(false); }); - it("should handle empty results", () => { + it('should handle empty results', () => { const result = calculatePagination(0, 1, 10); expect(result.pages).toBe(1); expect(result.hasNext).toBe(false); @@ -84,8 +84,8 @@ describe("Pagination Utils", () => { }); }); - describe("createPageResult", () => { - it("should create correct page result", () => { + describe('createPageResult', () => { + it('should create correct page result', () => { const data = [{ id: 1 }, { id: 2 }]; const result = createPageResult(data, 2, 10, 25); @@ -97,45 +97,45 @@ describe("Pagination Utils", () => { }); }); - describe("parseSortString", () => { - it("should parse descending fields with minus", () => { - const result = parseSortString("-createdAt"); - expect(result).toEqual({ createdAt: "desc" }); + describe('parseSortString', () => { + it('should parse descending fields with minus', () => { + const result = parseSortString('-createdAt'); + expect(result).toEqual({ createdAt: 'desc' }); }); - it("should parse ascending fields with plus", () => { - const result = parseSortString("+name"); - expect(result).toEqual({ name: "asc" }); + it('should parse ascending fields with plus', () => { + const result = parseSortString('+name'); + expect(result).toEqual({ name: 'asc' }); }); - it("should default to ascending without prefix", () => { - const result = parseSortString("email"); - expect(result).toEqual({ email: "asc" }); + it('should default to ascending without prefix', () => { + const result = parseSortString('email'); + expect(result).toEqual({ email: 'asc' }); }); - it("should parse multiple fields", () => { - const result = parseSortString("-createdAt,name,+updatedAt"); + it('should parse multiple fields', () => { + const result = parseSortString('-createdAt,name,+updatedAt'); expect(result).toEqual({ - createdAt: "desc", - name: "asc", - updatedAt: "asc", + createdAt: 'desc', + name: 'asc', + updatedAt: 'asc', }); }); - it("should handle empty string", () => { - const result = parseSortString(""); + it('should handle empty string', () => { + const result = parseSortString(''); expect(result).toEqual({}); }); }); - describe("calculateOffset", () => { - it("should calculate correct offset", () => { + describe('calculateOffset', () => { + it('should calculate correct offset', () => { expect(calculateOffset(1, 10)).toBe(0); expect(calculateOffset(2, 10)).toBe(10); expect(calculateOffset(3, 20)).toBe(40); }); - it("should handle page 0 or negative", () => { + it('should handle page 0 or negative', () => { expect(calculateOffset(0, 10)).toBe(0); expect(calculateOffset(-1, 10)).toBe(0); }); diff --git a/src/utils/pagination.utils.ts b/src/utils/pagination.utils.ts index be72df2..ef0652d 100644 --- a/src/utils/pagination.utils.ts +++ b/src/utils/pagination.utils.ts @@ -1,7 +1,7 @@ // src/utils/pagination.utils.ts -import type { PageOptions, PageResult } from "../contracts/database.contracts"; -import { DATABASE_KIT_CONSTANTS } from "../contracts/database.contracts"; +import type { PageOptions, PageResult } from '../contracts/database.contracts'; +import { DATABASE_KIT_CONSTANTS } from '../contracts/database.contracts'; /** * Utility functions for pagination operations. @@ -97,23 +97,23 @@ export function createPageResult( */ export function parseSortString( sortString: string, -): Record { - const result: Record = {}; +): Record { + const result: Record = {}; if (!sortString) return result; const fields = sortString - .split(",") + .split(',') .map((f) => f.trim()) .filter(Boolean); for (const field of fields) { - if (field.startsWith("-")) { - result[field.slice(1)] = "desc"; - } else if (field.startsWith("+")) { - result[field.slice(1)] = "asc"; + if (field.startsWith('-')) { + result[field.slice(1)] = 'desc'; + } else if (field.startsWith('+')) { + result[field.slice(1)] = 'asc'; } else { - result[field] = "asc"; + result[field] = 'asc'; } } diff --git a/src/utils/validation.utils.spec.ts b/src/utils/validation.utils.spec.ts index 92c6a2c..045ae9e 100644 --- a/src/utils/validation.utils.spec.ts +++ b/src/utils/validation.utils.spec.ts @@ -8,118 +8,118 @@ import { validateRequiredFields, pickFields, omitFields, -} from "./validation.utils"; +} from './validation.utils'; -describe("Validation Utils", () => { - describe("isValidMongoId", () => { - it("should return true for valid MongoDB ObjectId", () => { - expect(isValidMongoId("507f1f77bcf86cd799439011")).toBe(true); - expect(isValidMongoId("000000000000000000000000")).toBe(true); - expect(isValidMongoId("ffffffffffffffffffffffff")).toBe(true); +describe('Validation Utils', () => { + describe('isValidMongoId', () => { + it('should return true for valid MongoDB ObjectId', () => { + expect(isValidMongoId('507f1f77bcf86cd799439011')).toBe(true); + expect(isValidMongoId('000000000000000000000000')).toBe(true); + expect(isValidMongoId('ffffffffffffffffffffffff')).toBe(true); }); - it("should return false for invalid MongoDB ObjectId", () => { - expect(isValidMongoId("")).toBe(false); - expect(isValidMongoId("invalid")).toBe(false); - expect(isValidMongoId("507f1f77bcf86cd79943901")).toBe(false); // 23 chars - expect(isValidMongoId("507f1f77bcf86cd7994390111")).toBe(false); // 25 chars - expect(isValidMongoId("507f1f77bcf86cd79943901g")).toBe(false); // invalid char + it('should return false for invalid MongoDB ObjectId', () => { + expect(isValidMongoId('')).toBe(false); + expect(isValidMongoId('invalid')).toBe(false); + expect(isValidMongoId('507f1f77bcf86cd79943901')).toBe(false); // 23 chars + expect(isValidMongoId('507f1f77bcf86cd7994390111')).toBe(false); // 25 chars + expect(isValidMongoId('507f1f77bcf86cd79943901g')).toBe(false); // invalid char }); - it("should return false for null/undefined", () => { + it('should return false for null/undefined', () => { expect(isValidMongoId(null as unknown as string)).toBe(false); expect(isValidMongoId(undefined as unknown as string)).toBe(false); }); }); - describe("isValidUuid", () => { - it("should return true for valid UUID v4", () => { - expect(isValidUuid("550e8400-e29b-41d4-a716-446655440000")).toBe(true); - expect(isValidUuid("6ba7b810-9dad-41d4-80b4-00c04fd430c8")).toBe(true); + describe('isValidUuid', () => { + it('should return true for valid UUID v4', () => { + expect(isValidUuid('550e8400-e29b-41d4-a716-446655440000')).toBe(true); + expect(isValidUuid('6ba7b810-9dad-41d4-80b4-00c04fd430c8')).toBe(true); }); - it("should return false for invalid UUID", () => { - expect(isValidUuid("")).toBe(false); - expect(isValidUuid("invalid")).toBe(false); - expect(isValidUuid("550e8400-e29b-11d4-a716-446655440000")).toBe(false); // v1 + it('should return false for invalid UUID', () => { + expect(isValidUuid('')).toBe(false); + expect(isValidUuid('invalid')).toBe(false); + expect(isValidUuid('550e8400-e29b-11d4-a716-446655440000')).toBe(false); // v1 }); }); - describe("isPositiveInteger", () => { - it("should return true for positive integers", () => { + describe('isPositiveInteger', () => { + it('should return true for positive integers', () => { expect(isPositiveInteger(1)).toBe(true); expect(isPositiveInteger(100)).toBe(true); - expect(isPositiveInteger("5")).toBe(true); + expect(isPositiveInteger('5')).toBe(true); }); - it("should return false for non-positive integers", () => { + it('should return false for non-positive integers', () => { expect(isPositiveInteger(0)).toBe(false); expect(isPositiveInteger(-1)).toBe(false); expect(isPositiveInteger(1.5)).toBe(false); - expect(isPositiveInteger("1.5")).toBe(false); - expect(isPositiveInteger("abc")).toBe(false); + expect(isPositiveInteger('1.5')).toBe(false); + expect(isPositiveInteger('abc')).toBe(false); }); }); - describe("sanitizeFilter", () => { - it("should remove undefined and null values", () => { - const filter = { a: 1, b: undefined, c: null, d: "test" }; + describe('sanitizeFilter', () => { + it('should remove undefined and null values', () => { + const filter = { a: 1, b: undefined, c: null, d: 'test' }; const result = sanitizeFilter(filter); - expect(result).toEqual({ a: 1, d: "test" }); + expect(result).toEqual({ a: 1, d: 'test' }); }); - it("should keep falsy values that are not null/undefined", () => { - const filter = { a: 0, b: "", c: false }; + it('should keep falsy values that are not null/undefined', () => { + const filter = { a: 0, b: '', c: false }; const result = sanitizeFilter(filter); - expect(result).toEqual({ a: 0, b: "", c: false }); + expect(result).toEqual({ a: 0, b: '', c: false }); }); }); - describe("validateRequiredFields", () => { - it("should return valid when all required fields are present", () => { - const obj = { name: "John", email: "john@example.com" }; - const result = validateRequiredFields(obj, ["name", "email"]); + describe('validateRequiredFields', () => { + it('should return valid when all required fields are present', () => { + const obj = { name: 'John', email: 'john@example.com' }; + const result = validateRequiredFields(obj, ['name', 'email']); expect(result.isValid).toBe(true); expect(result.missing).toEqual([]); }); - it("should return invalid with missing fields", () => { - const obj = { name: "John" }; - const result = validateRequiredFields(obj, ["name", "email", "age"]); + it('should return invalid with missing fields', () => { + const obj = { name: 'John' }; + const result = validateRequiredFields(obj, ['name', 'email', 'age']); expect(result.isValid).toBe(false); - expect(result.missing).toEqual(["email", "age"]); + expect(result.missing).toEqual(['email', 'age']); }); - it("should treat empty strings as missing", () => { - const obj = { name: "" }; - const result = validateRequiredFields(obj, ["name"]); + it('should treat empty strings as missing', () => { + const obj = { name: '' }; + const result = validateRequiredFields(obj, ['name']); expect(result.isValid).toBe(false); - expect(result.missing).toEqual(["name"]); + expect(result.missing).toEqual(['name']); }); }); - describe("pickFields", () => { - it("should pick only allowed fields", () => { + describe('pickFields', () => { + it('should pick only allowed fields', () => { const obj = { a: 1, b: 2, c: 3 }; - const result = pickFields(obj, ["a", "c"]); + const result = pickFields(obj, ['a', 'c']); expect(result).toEqual({ a: 1, c: 3 }); }); - it("should ignore non-existent fields", () => { + it('should ignore non-existent fields', () => { const obj = { a: 1 }; - const result = pickFields(obj, ["a", "b"]); + const result = pickFields(obj, ['a', 'b']); expect(result).toEqual({ a: 1 }); }); }); - describe("omitFields", () => { - it("should omit specified fields", () => { + describe('omitFields', () => { + it('should omit specified fields', () => { const obj = { a: 1, b: 2, c: 3 }; - const result = omitFields(obj, ["b"]); + const result = omitFields(obj, ['b']); expect(result).toEqual({ a: 1, c: 3 }); }); - it("should return same object if no fields to omit", () => { + it('should return same object if no fields to omit', () => { const obj = { a: 1, b: 2 }; const result = omitFields(obj, []); expect(result).toEqual({ a: 1, b: 2 }); diff --git a/src/utils/validation.utils.ts b/src/utils/validation.utils.ts index aceae93..60e4fe2 100644 --- a/src/utils/validation.utils.ts +++ b/src/utils/validation.utils.ts @@ -11,7 +11,7 @@ * @returns True if valid ObjectId format */ export function isValidMongoId(id: string): boolean { - if (!id || typeof id !== "string") return false; + if (!id || typeof id !== 'string') return false; return /^[a-f\d]{24}$/i.test(id); } @@ -22,7 +22,7 @@ export function isValidMongoId(id: string): boolean { * @returns True if valid UUID format */ export function isValidUuid(id: string): boolean { - if (!id || typeof id !== "string") return false; + if (!id || typeof id !== 'string') return false; return /^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test( id, ); @@ -35,10 +35,10 @@ export function isValidUuid(id: string): boolean { * @returns True if positive integer */ export function isPositiveInteger(value: unknown): boolean { - if (typeof value === "number") { + if (typeof value === 'number') { return Number.isInteger(value) && value > 0; } - if (typeof value === "string") { + if (typeof value === 'string') { const parsed = parseInt(value, 10); return !isNaN(parsed) && parsed > 0 && String(parsed) === value; } @@ -79,7 +79,7 @@ export function validateRequiredFields( const missing: string[] = []; for (const field of requiredFields) { - if (obj[field] === undefined || obj[field] === null || obj[field] === "") { + if (obj[field] === undefined || obj[field] === null || obj[field] === '') { missing.push(field); } }