Testing

Learn about testing practices and guidelines for ao-forge.

Overview

Testing is a critical part of the ao-forge development process. This guide covers testing strategies, best practices, and tools used in the project.

Testing Philosophy

Testing Principles

  1. Test Early and Often - Write tests as you develop
  2. Test Behavior, Not Implementation - Focus on what the code does
  3. Keep Tests Simple - Tests should be easy to understand
  4. Make Tests Reliable - Tests should be deterministic
  5. Test Edge Cases - Cover error conditions and boundary cases

Testing Pyramid

        ┌─────────────────┐
        │   E2E Tests     │  ← Few, slow, expensive
        │   (Integration) │
        ├─────────────────┤
        │  Integration    │  ← Some, medium speed
        │     Tests       │
        ├─────────────────┤
        │   Unit Tests    │  ← Many, fast, cheap
        │                 │
        └─────────────────┘

Testing Tools

Test Framework

  • Jest - JavaScript testing framework
  • Supertest - HTTP assertion library
  • @testing-library/jest-dom - Custom Jest matchers

Test Utilities

  • ts-jest - TypeScript preprocessor for Jest
  • jest-environment-node - Node.js environment for Jest
  • jest-mock-extended - Extended mocking utilities

Configuration

// jest.config.js
module.exports = {
  preset: 'ts-jest',
  testEnvironment: 'node',
  roots: ['<rootDir>/src', '<rootDir>/tests'],
  testMatch: ['**/__tests__/**/*.ts', '**/?(*.)+(spec|test).ts'],
  collectCoverageFrom: [
    'src/**/*.ts',
    '!src/**/*.d.ts',
    '!src/**/*.test.ts',
    '!src/**/*.spec.ts'
  ],
  coverageDirectory: 'coverage',
  coverageReporters: ['text', 'lcov', 'html']
}

Unit Testing

Test Structure

// tests/managers/project-manager.test.ts
import { ProjectManager } from '../../src/managers/project-manager'
import { Logger } from '../../src/utils/logger'
import { FileSystem } from '../../src/utils/file-system'

describe('ProjectManager', () => {
  let projectManager: ProjectManager
  let mockLogger: jest.Mocked<Logger>
  let mockFileSystem: jest.Mocked<FileSystem>
  
  beforeEach(() => {
    mockLogger = createMockLogger()
    mockFileSystem = createMockFileSystem()
    projectManager = new ProjectManager(mockLogger, mockFileSystem)
  })
  
  describe('createProject', () => {
    it('should create a new project successfully', async () => {
      // Arrange
      const options = {
        name: 'test-project',
        framework: 'nextjs' as const,
        packageManager: 'pnpm' as const
      }
      
      mockFileSystem.exists.mockResolvedValue(false)
      mockFileSystem.mkdir.mockResolvedValue()
      mockFileSystem.writeFile.mockResolvedValue()
      
      // Act
      await projectManager.createProject(options)
      
      // Assert
      expect(mockFileSystem.mkdir).toHaveBeenCalledWith('test-project')
      expect(mockFileSystem.writeFile).toHaveBeenCalledWith(
        'test-project/package.json',
        expect.stringContaining('"name": "test-project"')
      )
    })
    
    it('should throw error if project already exists', async () => {
      // Arrange
      const options = {
        name: 'existing-project',
        framework: 'nextjs' as const,
        packageManager: 'pnpm' as const
      }
      
      mockFileSystem.exists.mockResolvedValue(true)
      
      // Act & Assert
      await expect(projectManager.createProject(options)).rejects.toThrow(
        'Project already exists'
      )
    })
  })
})

Mocking

// tests/mocks/logger.mock.ts
export const createMockLogger = (): jest.Mocked<Logger> => ({
  info: jest.fn(),
  warn: jest.fn(),
  error: jest.fn(),
  debug: jest.fn()
})

// tests/mocks/file-system.mock.ts
export const createMockFileSystem = (): jest.Mocked<FileSystem> => ({
  readFile: jest.fn(),
  writeFile: jest.fn(),
  exists: jest.fn(),
  mkdir: jest.fn(),
  rm: jest.fn()
})

Test Utilities

// tests/utils/test-helpers.ts
import { mkdtemp, rm } from 'fs/promises'
import { tmpdir } from 'os'
import { join } from 'path'

export async function createTempDir(): Promise<string> {
  const tempDir = await mkdtemp(join(tmpdir(), 'ao-forge-test-'))
  return tempDir
}

export async function cleanupTempDir(dir: string): Promise<void> {
  await rm(dir, { recursive: true, force: true })
}

export function createTestConfig(): ProjectConfig {
  return {
    name: 'test-project',
    framework: 'nextjs',
    packageManager: 'pnpm',
    luaFiles: ['counter.lua'],
    autoStart: false,
    ports: {
      dev: 3000,
      ao: 8080,
      monitoring: 9090
    }
  }
}

Integration Testing

CLI Testing

// tests/cli/init.test.ts
import { execSync } from 'child_process'
import { createTempDir, cleanupTempDir } from '../utils/test-helpers'

describe('Init Command', () => {
  let tempDir: string
  
  beforeEach(async () => {
    tempDir = await createTempDir()
  })
  
  afterEach(async () => {
    await cleanupTempDir(tempDir)
  })
  
  it('should create a new project', () => {
    // Act
    const result = execSync('node dist/cli.js init test-project', {
      cwd: tempDir,
      encoding: 'utf8'
    })
    
    // Assert
    expect(result).toContain('Project created successfully')
  })
  
  it('should handle invalid framework', () => {
    // Act & Assert
    expect(() => {
      execSync('node dist/cli.js init test-project --framework invalid', {
        cwd: tempDir,
        encoding: 'utf8'
      })
    }).toThrow()
  })
})

Manager Integration

// tests/integration/project-manager.test.ts
import { ProjectManager } from '../../src/managers/project-manager'
import { Logger } from '../../src/utils/logger'
import { FileSystem } from '../../src/utils/file-system'
import { createTempDir, cleanupTempDir } from '../utils/test-helpers'

describe('ProjectManager Integration', () => {
  let projectManager: ProjectManager
  let tempDir: string
  
  beforeEach(async () => {
    tempDir = await createTempDir()
    const logger = new Logger()
    const fileSystem = new FileSystem()
    projectManager = new ProjectManager(logger, fileSystem)
  })
  
  afterEach(async () => {
    await cleanupTempDir(tempDir)
  })
  
  it('should create complete project structure', async () => {
    // Arrange
    const options = {
      name: 'integration-test',
      framework: 'nextjs' as const,
      packageManager: 'pnpm' as const
    }
    
    // Act
    await projectManager.createProject(options)
    
    // Assert
    const projectPath = join(tempDir, 'integration-test')
    expect(await fileSystem.exists(join(projectPath, 'package.json'))).toBe(true)
    expect(await fileSystem.exists(join(projectPath, 'ao.config.yml'))).toBe(true)
    expect(await fileSystem.exists(join(projectPath, 'src'))).toBe(true)
  })
})

End-to-End Testing

E2E Test Structure

// tests/e2e/init.test.ts
import { execSync } from 'child_process'
import { createTempDir, cleanupTempDir } from '../utils/test-helpers'

describe('Init Command E2E', () => {
  let tempDir: string
  
  beforeEach(async () => {
    tempDir = await createTempDir()
  })
  
  afterEach(async () => {
    await cleanupTempDir(tempDir)
  })
  
  it('should create and build a complete project', () => {
    // Create project
    execSync('node dist/cli.js init e2e-test --framework nextjs', {
      cwd: tempDir,
      encoding: 'utf8'
    })
    
    // Build project
    const buildResult = execSync('node dist/cli.js build', {
      cwd: join(tempDir, 'e2e-test'),
      encoding: 'utf8'
    })
    
    expect(buildResult).toContain('Build completed successfully')
  })
})

Testing Best Practices

Test Organization

// Group related tests
describe('ProjectManager', () => {
  describe('createProject', () => {
    it('should create project successfully', () => {})
    it('should handle existing project', () => {})
    it('should validate options', () => {})
  })
  
  describe('validateProject', () => {
    it('should validate valid project', () => {})
    it('should reject invalid project', () => {})
  })
})

Test Naming

// Use descriptive test names
it('should create a new project with valid options', () => {})
it('should throw error when project already exists', () => {})
it('should validate framework option', () => {})
it('should handle network errors gracefully', () => {})

Test Data

// Use test data builders
class ProjectOptionsBuilder {
  private options: Partial<CreateProjectOptions> = {}
  
  withName(name: string): this {
    this.options.name = name
    return this
  }
  
  withFramework(framework: Framework): this {
    this.options.framework = framework
    return this
  }
  
  build(): CreateProjectOptions {
    return {
      name: 'default-project',
      framework: 'nextjs',
      packageManager: 'pnpm',
      ...this.options
    }
  }
}

// Use in tests
const options = new ProjectOptionsBuilder()
  .withName('test-project')
  .withFramework('nuxtjs')
  .build()

Assertions

// Use specific assertions
expect(result).toBeDefined()
expect(result.name).toBe('test-project')
expect(result.framework).toBe('nextjs')
expect(mockLogger.info).toHaveBeenCalledWith('Project created', { name: 'test-project' })

// Use custom matchers
expect(result).toMatchObject({
  name: 'test-project',
  framework: 'nextjs'
})

Coverage

Coverage Goals

  • Statements: 90%+
  • Branches: 85%+
  • Functions: 90%+
  • Lines: 90%+

Coverage Configuration

// jest.config.js
module.exports = {
  collectCoverageFrom: [
    'src/**/*.ts',
    '!src/**/*.d.ts',
    '!src/**/*.test.ts',
    '!src/**/*.spec.ts'
  ],
  coverageThreshold: {
    global: {
      branches: 85,
      functions: 90,
      lines: 90,
      statements: 90
    }
  }
}

Coverage Reports

# Generate coverage report
npm run test:coverage

# View coverage report
open coverage/lcov-report/index.html

Continuous Integration

GitHub Actions

# .github/workflows/test.yml
name: Test

on:
  push:
    branches: [main, develop]
  pull_request:
    branches: [main]

jobs:
  test:
    runs-on: ubuntu-latest
    
    steps:
      - uses: actions/checkout@v3
      
      - uses: actions/setup-node@v3
        with:
          node-version: '18'
          cache: 'npm'
      
      - run: npm ci
      
      - run: npm run test:coverage
      
      - uses: codecov/codecov-action@v3
        with:
          file: ./coverage/lcov.info

Debugging Tests

Debug Configuration

{
  "version": "0.2.0",
  "configurations": [
    {
      "name": "Debug Tests",
      "type": "node",
      "request": "launch",
      "program": "${workspaceFolder}/node_modules/.bin/jest",
      "args": ["--runInBand", "--no-cache"],
      "console": "integratedTerminal",
      "internalConsoleOptions": "neverOpen"
    }
  ]
}

Test Debugging

# Run specific test with debug output
npm test -- --testNamePattern="should create project" --verbose

# Run tests in watch mode
npm run test:watch

# Run tests with coverage
npm run test:coverage

Performance Testing

Performance Tests

// tests/performance/project-creation.test.ts
describe('Project Creation Performance', () => {
  it('should create project within acceptable time', async () => {
    const startTime = Date.now()
    
    await projectManager.createProject({
      name: 'perf-test',
      framework: 'nextjs',
      packageManager: 'pnpm'
    })
    
    const duration = Date.now() - startTime
    expect(duration).toBeLessThan(5000) // 5 seconds
  })
})

Best Practices

General Practices

  1. Write tests first - Use TDD when possible
  2. Keep tests simple - One assertion per test
  3. Use descriptive names - Test names should explain what they test
  4. Test edge cases - Cover error conditions and boundary cases
  5. Keep tests independent - Tests should not depend on each other

Mocking Practices

  1. Mock external dependencies - Don't test third-party code
  2. Use realistic mocks - Mocks should behave like real objects
  3. Verify interactions - Check that mocks are called correctly
  4. Reset mocks - Clear mocks between tests
  5. Use partial mocks - Mock only what you need

Test Data

  1. Use builders - Create test data with builders
  2. Use factories - Generate test data with factories
  3. Keep data minimal - Only include necessary data
  4. Use realistic data - Test data should be realistic
  5. Avoid magic numbers - Use named constants

Next Steps