From 83e0c3e45ac31e39df88e7c8fefad971bd98050b Mon Sep 17 00:00:00 2001 From: William Phetsinorath Date: Fri, 13 Mar 2026 17:22:32 +0100 Subject: [PATCH] refactor(argocd): migrate ArgoCD to NestJS Signed-off-by: William Phetsinorath --- apps/server-nestjs/package.json | 1 + .../configuration/configuration.service.ts | 50 +- .../argocd/argocd-controller.service.spec.ts | 196 +++++++ .../argocd/argocd-controller.service.ts | 146 +++++ .../argocd/argocd-datastore.service.ts | 63 ++ .../src/modules/argocd/argocd.module.ts | 14 + .../src/modules/argocd/argocd.utils.ts | 236 ++++++++ .../src/modules/gitlab/files/.gitlab-ci.yml | 22 + .../src/modules/gitlab/files/mirror.sh | 83 +++ .../modules/gitlab/gitlab-client.service.ts | 15 + .../gitlab/gitlab-controller.service.spec.ts | 279 +++++++++ .../gitlab/gitlab-controller.service.ts | 344 +++++++++++ .../gitlab/gitlab-datastore.service.spec.ts | 36 ++ .../gitlab/gitlab-datastore.service.ts | 103 ++++ .../modules/gitlab/gitlab-testing.utils.ts | 335 +++++++++++ .../src/modules/gitlab/gitlab.constants.ts | 10 + .../src/modules/gitlab/gitlab.module.ts | 15 + .../src/modules/gitlab/gitlab.service.spec.ts | 538 ++++++++++++++++++ .../src/modules/gitlab/gitlab.service.ts | 314 ++++++++++ .../src/modules/gitlab/gitlab.utils.ts | 144 +++++ .../src/modules/vault/vault-client.service.ts | 305 ++++++++++ .../vault/vault-controller.service.spec.ts | 72 +++ .../modules/vault/vault-controller.service.ts | 222 ++++++++ .../modules/vault/vault-datastore.service.ts | 58 ++ .../src/modules/vault/vault.module.ts | 14 + .../src/modules/vault/vault.service.spec.ts | 121 ++++ .../src/modules/vault/vault.service.ts | 476 ++++++++++++++++ .../src/modules/vault/vault.utils.ts | 17 + .../src/prisma/schema/project.prisma | 1 + apps/server-nestjs/test/gitlab.e2e-spec.ts | 231 ++++++++ apps/server-nestjs/test/vault.e2e-spec.ts | 105 ++++ pnpm-lock.yaml | 3 + 32 files changed, 4568 insertions(+), 1 deletion(-) create mode 100644 apps/server-nestjs/src/modules/argocd/argocd-controller.service.spec.ts create mode 100644 apps/server-nestjs/src/modules/argocd/argocd-controller.service.ts create mode 100644 apps/server-nestjs/src/modules/argocd/argocd-datastore.service.ts create mode 100644 apps/server-nestjs/src/modules/argocd/argocd.module.ts create mode 100644 apps/server-nestjs/src/modules/argocd/argocd.utils.ts create mode 100644 apps/server-nestjs/src/modules/gitlab/files/.gitlab-ci.yml create mode 100644 apps/server-nestjs/src/modules/gitlab/files/mirror.sh create mode 100644 apps/server-nestjs/src/modules/gitlab/gitlab-client.service.ts create mode 100644 apps/server-nestjs/src/modules/gitlab/gitlab-controller.service.spec.ts create mode 100644 apps/server-nestjs/src/modules/gitlab/gitlab-controller.service.ts create mode 100644 apps/server-nestjs/src/modules/gitlab/gitlab-datastore.service.spec.ts create mode 100644 apps/server-nestjs/src/modules/gitlab/gitlab-datastore.service.ts create mode 100644 apps/server-nestjs/src/modules/gitlab/gitlab-testing.utils.ts create mode 100644 apps/server-nestjs/src/modules/gitlab/gitlab.constants.ts create mode 100644 apps/server-nestjs/src/modules/gitlab/gitlab.module.ts create mode 100644 apps/server-nestjs/src/modules/gitlab/gitlab.service.spec.ts create mode 100644 apps/server-nestjs/src/modules/gitlab/gitlab.service.ts create mode 100644 apps/server-nestjs/src/modules/gitlab/gitlab.utils.ts create mode 100644 apps/server-nestjs/src/modules/vault/vault-client.service.ts create mode 100644 apps/server-nestjs/src/modules/vault/vault-controller.service.spec.ts create mode 100644 apps/server-nestjs/src/modules/vault/vault-controller.service.ts create mode 100644 apps/server-nestjs/src/modules/vault/vault-datastore.service.ts create mode 100644 apps/server-nestjs/src/modules/vault/vault.module.ts create mode 100644 apps/server-nestjs/src/modules/vault/vault.service.spec.ts create mode 100644 apps/server-nestjs/src/modules/vault/vault.service.ts create mode 100644 apps/server-nestjs/src/modules/vault/vault.utils.ts create mode 100644 apps/server-nestjs/test/gitlab.e2e-spec.ts create mode 100644 apps/server-nestjs/test/vault.e2e-spec.ts diff --git a/apps/server-nestjs/package.json b/apps/server-nestjs/package.json index 1f84babde..b30254b95 100644 --- a/apps/server-nestjs/package.json +++ b/apps/server-nestjs/package.json @@ -44,6 +44,7 @@ "@fastify/swagger": "^8.15.0", "@fastify/swagger-ui": "^4.2.0", "@gitbeaker/core": "^40.6.0", + "@gitbeaker/requester-utils": "^40.6.0", "@gitbeaker/rest": "^40.6.0", "@keycloak/keycloak-admin-client": "^24.0.0", "@kubernetes-models/argo-cd": "^2.7.2", diff --git a/apps/server-nestjs/src/cpin-module/infrastructure/configuration/configuration.service.ts b/apps/server-nestjs/src/cpin-module/infrastructure/configuration/configuration.service.ts index f049b3764..750f450d7 100644 --- a/apps/server-nestjs/src/cpin-module/infrastructure/configuration/configuration.service.ts +++ b/apps/server-nestjs/src/cpin-module/infrastructure/configuration/configuration.service.ts @@ -36,10 +36,58 @@ export class ConfigurationService { = process.env.CONTACT_EMAIL ?? 'cloudpinative-relations@interieur.gouv.fr' + // argocd + argoNamespace = process.env.ARGO_NAMESPACE ?? 'argocd' + argocdUrl = process.env.ARGOCD_URL + argocdExtraRepositories = process.env.ARGOCD_EXTRA_REPOSITORIES + + // dso + dsoEnvChartVersion = process.env.DSO_ENV_CHART_VERSION ?? 'dso-env-1.6.0' + dsoNsChartVersion = process.env.DSO_NS_CHART_VERSION ?? 'dso-ns-1.1.5' + // plugins mockPlugins = process.env.MOCK_PLUGINS === 'true' - projectRootDir = process.env.PROJECTS_ROOT_DIR + projectRootPath = process.env.PROJECTS_ROOT_DIR pluginsDir = process.env.PLUGINS_DIR ?? '/plugins' + + // gitlab + gitlabToken = process.env.GITLAB_TOKEN + gitlabUrl = process.env.GITLAB_URL + gitlabInternalUrl = process.env.GITLAB_INTERNAL_URL + ? process.env.GITLAB_INTERNAL_URL + : process.env.GITLAB_URL + + gitlabMirrorTokenExpirationDays = Number(process.env.GITLAB_MIRROR_TOKEN_EXPIRATION_DAYS ?? 180) + gitlabMirrorTokenRotationThresholdDays = Number(process.env.GITLAB_MIRROR_TOKEN_ROTATION_THRESHOLD_DAYS ?? 90) + + // vault + vaultToken = process.env.VAULT_TOKEN + vaultUrl = process.env.VAULT_URL + vaultInternalUrl = process.env.VAULT_INTERNAL_URL + ? process.env.VAULT_INTERNAL_URL + : process.env.VAULT_URL + + vaultKvName = process.env.VAULT_KV_NAME ?? 'forge-dso' + + // registry (harbor) + harborUrl = process.env.HARBOR_URL + harborInternalUrl = process.env.HARBOR_INTERNAL_URL ?? process.env.HARBOR_URL + harborAdmin = process.env.HARBOR_ADMIN + harborAdminPassword = process.env.HARBOR_ADMIN_PASSWORD + harborRuleTemplate = process.env.HARBOR_RULE_TEMPLATE + harborRuleCount = process.env.HARBOR_RULE_COUNT + harborRetentionCron = process.env.HARBOR_RETENTION_CRON + + // nexus + nexusUrl = process.env.NEXUS_URL + nexusInternalUrl = process.env.NEXUS_INTERNAL_URL ?? process.env.NEXUS_URL + nexusAdmin = process.env.NEXUS_ADMIN + nexusAdminPassword = process.env.NEXUS_ADMIN_PASSWORD + nexusSecretExposedUrl + = process.env.NEXUS__SECRET_EXPOSE_INTERNAL_URL === 'true' + ? (process.env.NEXUS_INTERNAL_URL ?? process.env.NEXUS_URL) + : process.env.NEXUS_URL + NODE_ENV = process.env.NODE_ENV === 'test' ? 'test' diff --git a/apps/server-nestjs/src/modules/argocd/argocd-controller.service.spec.ts b/apps/server-nestjs/src/modules/argocd/argocd-controller.service.spec.ts new file mode 100644 index 000000000..09301cbc4 --- /dev/null +++ b/apps/server-nestjs/src/modules/argocd/argocd-controller.service.spec.ts @@ -0,0 +1,196 @@ +import { Test } from '@nestjs/testing' +import type { TestingModule } from '@nestjs/testing' +import { describe, it, expect, beforeEach, vi } from 'vitest' +import type { Mocked } from 'vitest' +import { stringify } from 'yaml' +import { ArgoCDControllerService } from './argocd-controller.service' +import { ArgoCDDatastoreService } from './argocd-datastore.service' +import type { ProjectWithDetails } from './argocd-datastore.service' +import { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import { GitlabService } from '../gitlab/gitlab.service' +import { VaultService } from '../vault/vault.service' +import { generateNamespaceName } from '@cpn-console/shared' +import { makeProjectSchema } from '../gitlab/gitlab-testing.utils' + +function createArgoCDControllerServiceTestingModule() { + return Test.createTestingModule({ + providers: [ + ArgoCDControllerService, + { + provide: ArgoCDDatastoreService, + useValue: { + getAllProjects: vi.fn(), + } satisfies Partial, + }, + { + provide: ConfigurationService, + useValue: { + argoNamespace: 'argocd', + argocdUrl: 'https://argocd.internal', + argocdExtraRepositories: 'repo3', + dsoEnvChartVersion: 'dso-env-1.6.0', + dsoNsChartVersion: 'dso-ns-1.1.5', + } satisfies Partial, + }, + { + provide: GitlabService, + useValue: { + getOrCreateInfraGroupRepo: vi.fn(), + getProjectGroupPublicUrl: vi.fn(), + getInfraGroupRepoPublicUrl: vi.fn(), + maybeCommitUpdate: vi.fn(), + maybeCommitDelete: vi.fn(), + listFiles: vi.fn(), + } satisfies Partial, + }, + { + provide: VaultService, + useValue: { + readProjectValues: vi.fn(), + } satisfies Partial, + }, + ], + }) +} + +describe('argoCDControllerService', () => { + let service: ArgoCDControllerService + let datastore: Mocked + let gitlab: Mocked + let vault: Mocked + + beforeEach(async () => { + vi.clearAllMocks() + const module: TestingModule = await createArgoCDControllerServiceTestingModule().compile() + service = module.get(ArgoCDControllerService) + datastore = module.get(ArgoCDDatastoreService) + gitlab = module.get(GitlabService) + vault = module.get(VaultService) + }) + + it('should be defined', () => { + expect(service).toBeDefined() + }) + + describe('reconcile', () => { + it('should sync project environments', async () => { + const mockProject = { + id: '123e4567-e89b-12d3-a456-426614174000', + slug: 'project-1', + name: 'Project 1', + environments: [ + { id: '123e4567-e89b-12d3-a456-426614174001', name: 'dev', clusterId: 'c1', cpu: 1, gpu: 0, memory: 1, autosync: true }, + { id: '123e4567-e89b-12d3-a456-426614174002', name: 'prod', clusterId: 'c1', cpu: 1, gpu: 0, memory: 1, autosync: true }, + ], + clusters: [ + { id: 'c1', label: 'cluster-1', zone: { slug: 'zone-1' } }, + ], + repositories: [ + { + id: 'repo-1', + internalRepoName: 'infra-repo', + isInfra: true, + deployRevision: 'HEAD', + deployPath: '.', + helmValuesFiles: '', + }, + ], + plugins: [{ pluginName: 'argocd', key: 'extraRepositories', value: 'repo2' }], + } satisfies ProjectWithDetails + + datastore.getAllProjects.mockResolvedValue([mockProject]) + gitlab.getOrCreateInfraGroupRepo.mockResolvedValue(makeProjectSchema({ id: 100, http_url_to_repo: 'https://gitlab.internal/infra' })) + gitlab.getProjectGroupPublicUrl.mockResolvedValue('https://gitlab.internal/group') + gitlab.getInfraGroupRepoPublicUrl.mockResolvedValue('https://gitlab.internal/infra-repo') + gitlab.listFiles.mockResolvedValue([]) + vault.readProjectValues.mockResolvedValue({ secret: 'value' }) + + const results = await service.reconcile() + + expect(results).toHaveLength(3) // 2 envs + 1 cleanup (1 zone) + + // Verify Gitlab calls + expect(gitlab.maybeCommitUpdate).toHaveBeenCalledTimes(2) + expect(gitlab.maybeCommitUpdate).toHaveBeenCalledWith( + 100, + [ + { + content: stringify({ + common: { + 'dso/project': 'Project 1', + 'dso/project.id': '123e4567-e89b-12d3-a456-426614174000', + 'dso/project.slug': 'project-1', + 'dso/environment': 'dev', + 'dso/environment.id': '123e4567-e89b-12d3-a456-426614174001', + }, + argocd: { + cluster: 'in-cluster', + namespace: 'argocd', + project: 'project-1-dev-6293', + envChartVersion: 'dso-env-1.6.0', + nsChartVersion: 'dso-ns-1.1.5', + }, + environment: { + valueFileRepository: 'https://gitlab.internal/infra', + valueFileRevision: 'HEAD', + valueFilePath: 'Project 1/cluster-1/dev/values.yaml', + roGroup: '/project-project-1/console/dev/RO', + rwGroup: '/project-project-1/console/dev/RW', + }, + application: { + quota: { + cpu: 1, + gpu: 0, + memory: '1Gi', + }, + sourceRepositories: [ + 'https://gitlab.internal/group/**', + 'repo3', + 'repo2', + ], + destination: { + namespace: generateNamespaceName(mockProject.id, mockProject.environments[0].id), + name: 'cluster-1', + }, + autosync: true, + vault: { secret: 'value' }, + repositories: [ + { + repoURL: 'https://gitlab.internal/infra-repo', + targetRevision: 'HEAD', + path: '.', + valueFiles: [], + }, + ], + }, + }), + filePath: 'Project 1/cluster-1/dev/values.yaml', + }, + ], + 'ci: :robot_face: Update Project 1/cluster-1/dev/values.yaml', + ) + }) + + it('should handle errors gracefully', async () => { + const mockProject = { + id: '123e4567-e89b-12d3-a456-426614174000', + slug: 'project-1', + name: 'Project 1', + environments: [{ id: '123e4567-e89b-12d3-a456-426614174001', name: 'dev', clusterId: 'c1', cpu: 1, gpu: 0, memory: 1, autosync: true }], + clusters: [ + { id: 'c1', label: 'cluster-1', zone: { slug: 'zone-1' } }, + ], + } as unknown as ProjectWithDetails + + datastore.getAllProjects.mockResolvedValue([mockProject]) + gitlab.getOrCreateInfraGroupRepo.mockRejectedValue(new Error('Sync failed')) + + const results = await service.reconcile() + + // 1 env (fails) + 1 cleanup (fails because getOrCreateInfraProject fails) + expect(results).toHaveLength(2) + const failed = results.filter((r: any) => r.status === 'rejected') + expect(failed).toHaveLength(2) + }) + }) +}) diff --git a/apps/server-nestjs/src/modules/argocd/argocd-controller.service.ts b/apps/server-nestjs/src/modules/argocd/argocd-controller.service.ts new file mode 100644 index 000000000..d879af78f --- /dev/null +++ b/apps/server-nestjs/src/modules/argocd/argocd-controller.service.ts @@ -0,0 +1,146 @@ +import { Inject, Injectable, Logger } from '@nestjs/common' +import { OnEvent } from '@nestjs/event-emitter' +import { Cron, CronExpression } from '@nestjs/schedule' +import { stringify } from 'yaml' + +import { ArgoCDDatastoreService } from './argocd-datastore.service' +import type { ProjectWithDetails } from './argocd-datastore.service' +import { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import { GitlabService } from '../gitlab/gitlab.service' +import { VaultService } from '../vault/vault.service' +import { + formatEnvironmentValuesFilePath, + formatValues, + getDistinctZones, +} from './argocd.utils' + +@Injectable() +export class ArgoCDControllerService { + private readonly logger = new Logger(ArgoCDControllerService.name) + + constructor( + @Inject(ArgoCDDatastoreService) private readonly argoCDDatastore: ArgoCDDatastoreService, + @Inject(ConfigurationService) private readonly config: ConfigurationService, + @Inject(GitlabService) private readonly gitlab: GitlabService, + @Inject(VaultService) private readonly vault: VaultService, + ) { + this.logger.log('ArgoCDControllerService initialized') + } + + @OnEvent('project.upsert') + async handleUpsert(project: ProjectWithDetails) { + this.logger.log(`Handling project upsert for ${project.slug}`) + return this.reconcile() + } + + @OnEvent('project.delete') + async handleDelete(project: ProjectWithDetails) { + this.logger.log(`Handling project delete for ${project.slug}`) + return this.reconcile() + } + + @Cron(CronExpression.EVERY_HOUR) + async handleCron() { + this.logger.log('Starting ArgoCD reconciliation') + await this.reconcile() + } + + async reconcile() { + const projects = await this.argoCDDatastore.getAllProjects() + const results: PromiseSettledResult[] = [] + + const projectResults = await Promise.all(projects.map(async (project) => { + const pResults: PromiseSettledResult[] = [] + + const ensureResults = await Promise.allSettled( + project.environments.map(env => this.generateValues(project, env)), + ) + pResults.push(...ensureResults) + + const cleanupResults = await this.cleanupStaleValues(project) + pResults.push(...cleanupResults) + + return pResults + })) + + results.push(...projectResults.flat()) + + results.forEach((result) => { + if (result.status === 'rejected') { + this.logger.error(`Reconciliation failed: ${result.reason}`) + } + }) + + return results + } + + private async cleanupStaleValues(project: ProjectWithDetails) { + const zones = getDistinctZones(project) + return Promise.allSettled(zones.map(async (zoneSlug) => { + const infraProject = await this.gitlab.getOrCreateInfraGroupRepo(zoneSlug) + const existingFiles = await this.gitlab.listFiles(infraProject.id, { + path: `${project.name}/`, + recursive: true, + }) + + const neededFiles = new Set( + project.environments + .filter((env) => { + const cluster = project.clusters.find(c => c.id === env.clusterId) + return cluster?.zone.slug === zoneSlug + }) + .map((env) => { + const cluster = project.clusters.find(c => c.id === env.clusterId)! + return formatEnvironmentValuesFilePath(project, cluster, env) + }), + ) + + const filesToDelete = existingFiles + .filter((existingFile) => { + return ( + existingFile.name === 'values.yaml' + && !neededFiles.has(existingFile.path) + ) + }) + .map(existingFile => existingFile.path) + + await this.gitlab.maybeCommitDelete(infraProject.id, filesToDelete) + })) + } + + async generateValues( + project: ProjectWithDetails, + environment: ProjectWithDetails['environments'][number], + ) { + const vaultValues = await this.vault.readProjectValues(project.id) + const cluster = project.clusters.find(c => c.id === environment.clusterId) + if (!cluster) throw new Error(`Cluster not found for environment ${environment.id}`) + + const infraProject = await this.gitlab.getOrCreateInfraGroupRepo(cluster.zone.slug) + const valueFilePath = formatEnvironmentValuesFilePath(project, cluster, environment) + + const repo = project.repositories.find(r => r.isInfra) + if (!repo) throw new Error(`Infra repository not found for project ${project.id}`) + const repoUrl = await this.gitlab.getInfraGroupRepoPublicUrl(repo.internalRepoName) + + const values = formatValues({ + project, + environment, + cluster, + gitlabPublicGroupUrl: await this.gitlab.getProjectGroupPublicUrl(), + argocdExtraRepositories: this.config.argocdExtraRepositories, + infraProject, + valueFilePath, + repoUrl, + vaultValues, + argoNamespace: this.config.argoNamespace, + envChartVersion: this.config.dsoEnvChartVersion, + nsChartVersion: this.config.dsoNsChartVersion, + }) + + await this.gitlab.maybeCommitUpdate(infraProject.id, [{ + content: stringify(values), + filePath: valueFilePath, + }], `ci: :robot_face: Update ${valueFilePath}`) + } +} diff --git a/apps/server-nestjs/src/modules/argocd/argocd-datastore.service.ts b/apps/server-nestjs/src/modules/argocd/argocd-datastore.service.ts new file mode 100644 index 000000000..1eccc780e --- /dev/null +++ b/apps/server-nestjs/src/modules/argocd/argocd-datastore.service.ts @@ -0,0 +1,63 @@ +import { Inject, Injectable } from '@nestjs/common' +import type { Prisma } from '@prisma/client' +import { PrismaService } from '@/cpin-module/infrastructure/database/prisma.service' + +export const projectSelect = { + id: true, + name: true, + slug: true, + plugins: { + select: { + pluginName: true, + key: true, + value: true, + }, + }, + repositories: { + select: { + id: true, + internalRepoName: true, + isInfra: true, + helmValuesFiles: true, + deployRevision: true, + deployPath: true, + }, + }, + environments: { + select: { + id: true, + name: true, + clusterId: true, + cpu: true, + gpu: true, + memory: true, + autosync: true, + }, + }, + clusters: { + select: { + id: true, + label: true, + zone: { + select: { + slug: true, + }, + }, + }, + }, +} satisfies Prisma.ProjectSelect + +export type ProjectWithDetails = Prisma.ProjectGetPayload<{ + select: typeof projectSelect +}> + +@Injectable() +export class ArgoCDDatastoreService { + constructor(@Inject(PrismaService) private readonly prisma: PrismaService) {} + + async getAllProjects(): Promise { + return this.prisma.project.findMany({ + select: projectSelect, + }) + } +} diff --git a/apps/server-nestjs/src/modules/argocd/argocd.module.ts b/apps/server-nestjs/src/modules/argocd/argocd.module.ts new file mode 100644 index 000000000..ee8080467 --- /dev/null +++ b/apps/server-nestjs/src/modules/argocd/argocd.module.ts @@ -0,0 +1,14 @@ +import { Module } from '@nestjs/common' +import { ArgoCDControllerService } from './argocd-controller.service' +import { ArgoCDDatastoreService } from './argocd-datastore.service' +import { ConfigurationModule } from '@/cpin-module/infrastructure/configuration/configuration.module' +import { InfrastructureModule } from '@/cpin-module/infrastructure/infrastructure.module' +import { GitlabModule } from '../gitlab/gitlab.module' +import { VaultModule } from '../vault/vault.module' + +@Module({ + imports: [ConfigurationModule, InfrastructureModule, GitlabModule, VaultModule], + providers: [ArgoCDControllerService, ArgoCDDatastoreService], + exports: [], +}) +export class ArgoCDModule {} diff --git a/apps/server-nestjs/src/modules/argocd/argocd.utils.ts b/apps/server-nestjs/src/modules/argocd/argocd.utils.ts new file mode 100644 index 000000000..fbb480baa --- /dev/null +++ b/apps/server-nestjs/src/modules/argocd/argocd.utils.ts @@ -0,0 +1,236 @@ +import { createHmac } from 'node:crypto' +import { generateNamespaceName, inClusterLabel } from '@cpn-console/shared' +import type { ProjectWithDetails } from './argocd-datastore.service.js' +import z from 'zod' + +export const valuesSchema = z.object({ + common: z.object({ + 'dso/project': z.string(), + 'dso/project.id': z.string(), + 'dso/project.slug': z.string(), + 'dso/environment': z.string(), + 'dso/environment.id': z.string(), + }), + argocd: z.object({ + cluster: z.string(), + namespace: z.string(), + project: z.string(), + envChartVersion: z.string(), + nsChartVersion: z.string(), + }), + environment: z.object({ + valueFileRepository: z.string(), + valueFileRevision: z.string(), + valueFilePath: z.string(), + roGroup: z.string(), + rwGroup: z.string(), + }), + application: z.object({ + quota: z.object({ + cpu: z.number(), + gpu: z.number(), + memory: z.string(), + }), + sourceRepositories: z.array(z.string()), + destination: z.object({ + namespace: z.string(), + name: z.string(), + }), + autosync: z.boolean(), + vault: z.record(z.any()), + repositories: z.array(z.object({ + repoURL: z.string(), + targetRevision: z.string(), + path: z.string(), + valueFiles: z.array(z.string()), + })), + }), +}) + +export function formatReadOnlyGroupName(projectSlug: string, environmentName: string) { + return `/project-${projectSlug}/console/${environmentName}/RO` +} + +export function formatReadWriteGroupName(projectSlug: string, environmentName: string) { + return `/project-${projectSlug}/console/${environmentName}/RW` +} + +export function formatAppProjectName(projectSlug: string, env: string) { + const envHash = createHmac('sha256', '') + .update(env) + .digest('hex') + .slice(0, 4) + return `${projectSlug}-${env}-${envHash}` +} + +export function formatEnvironmentValuesFilePath(project: { name: string }, cluster: { label: string }, env: { name: string }): string { + return `${project.name}/${cluster.label}/${env.name}/values.yaml` +} + +export function getDistinctZones(project: ProjectWithDetails) { + const zones = new Set() + project.clusters.forEach(c => zones.add(c.zone.slug)) + return [...zones] +} + +export function splitExtraRepositories(extraRepositories: string | undefined): string[] { + if (!extraRepositories) return [] + return extraRepositories.split(',').map(r => r.trim()).filter(r => r.length > 0) +} + +export function formatRepositoriesValues( + repositories: ProjectWithDetails['repositories'], + repoUrl: string, + envName: string, +) { + return repositories + .filter(repo => repo.isInfra) + .map((repository) => { + const valueFiles = splitExtraRepositories(repository.helmValuesFiles?.replaceAll('', envName)) + return { + repoURL: repoUrl, + targetRevision: repository.deployRevision || 'HEAD', + path: repository.deployPath || '.', + valueFiles, + } satisfies z.infer[number] + }) +} + +export function formatEnvironmentValues( + infraProject: { http_url_to_repo: string }, + valueFilePath: string, + roGroup: string, + rwGroup: string, +) { + return { + valueFileRepository: infraProject.http_url_to_repo, + valueFileRevision: 'HEAD', + valueFilePath, + roGroup, + rwGroup, + } satisfies z.infer +} + +export interface FormatSourceRepositoriesValuesOptions { + gitlabPublicGroupUrl: string + argocdExtraRepositories?: string + projectPlugins?: ProjectWithDetails['plugins'] +} + +export function formatSourceRepositoriesValues( + { gitlabPublicGroupUrl, argocdExtraRepositories, projectPlugins }: FormatSourceRepositoriesValuesOptions, +): string[] { + let projectExtraRepositories = '' + if (projectPlugins) { + const argocdPlugin = projectPlugins.find(p => p.pluginName === 'argocd' && p.key === 'extraRepositories') + if (argocdPlugin) projectExtraRepositories = argocdPlugin.value + } + + return [ + `${gitlabPublicGroupUrl}/**`, + ...splitExtraRepositories(argocdExtraRepositories), + ...splitExtraRepositories(projectExtraRepositories), + ] +} + +export interface FormatCommonOptions { + project: ProjectWithDetails + environment: ProjectWithDetails['environments'][number] +} + +export function formatCommon({ project, environment }: FormatCommonOptions) { + return { + 'dso/project': project.name, + 'dso/project.id': project.id, + 'dso/project.slug': project.slug, + 'dso/environment': environment.name, + 'dso/environment.id': environment.id, + } satisfies z.infer +} + +export interface FormatArgoCDValuesOptions { + namespace: string + project: string + envChartVersion: string + nsChartVersion: string +} + +export function formatArgoCDValues(options: FormatArgoCDValuesOptions) { + const { namespace, project, envChartVersion, nsChartVersion } = options + return { + cluster: inClusterLabel, + namespace, + project, + envChartVersion, + nsChartVersion, + } satisfies z.infer +} + +export interface FormatValuesOptions { + project: ProjectWithDetails + environment: ProjectWithDetails['environments'][number] + cluster: ProjectWithDetails['clusters'][number] + gitlabPublicGroupUrl: string + argocdExtraRepositories?: string + vaultValues: Record + infraProject: { http_url_to_repo: string } + valueFilePath: string + repoUrl: string + argoNamespace: string + envChartVersion: string + nsChartVersion: string +} + +export function formatValues({ + project, + environment, + cluster, + gitlabPublicGroupUrl, + argocdExtraRepositories, + vaultValues, + infraProject, + valueFilePath, + repoUrl, + argoNamespace, + envChartVersion, + nsChartVersion, +}: FormatValuesOptions) { + return { + common: formatCommon({ project, environment }), + argocd: formatArgoCDValues({ + namespace: argoNamespace, + project: formatAppProjectName(project.slug, environment.name), + envChartVersion, + nsChartVersion, + }), + environment: formatEnvironmentValues( + infraProject, + valueFilePath, + formatReadOnlyGroupName(project.slug, environment.name), + formatReadWriteGroupName(project.slug, environment.name), + ), + application: { + quota: { + cpu: environment.cpu, + gpu: environment.gpu, + memory: `${environment.memory}Gi`, + }, + sourceRepositories: formatSourceRepositoriesValues({ + gitlabPublicGroupUrl, + argocdExtraRepositories, + projectPlugins: project.plugins, + }), + destination: { + namespace: generateNamespaceName(project.id, environment.id), + name: cluster.label, + }, + autosync: environment.autosync, + vault: vaultValues, + repositories: formatRepositoriesValues( + project.repositories, + repoUrl, + environment.name, + ), + }, + } satisfies z.infer +} diff --git a/apps/server-nestjs/src/modules/gitlab/files/.gitlab-ci.yml b/apps/server-nestjs/src/modules/gitlab/files/.gitlab-ci.yml new file mode 100644 index 000000000..ca9be2984 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/files/.gitlab-ci.yml @@ -0,0 +1,22 @@ +variables: + PROJECT_NAME: + description: Nom du dépôt (dans ce Gitlab) à synchroniser. + GIT_BRANCH_DEPLOY: + description: Nom de la branche à synchroniser. + value: main + SYNC_ALL: + description: Synchroniser toutes les branches. + value: "false" + +include: + - project: $CATALOG_PATH + file: mirror.yml + ref: main + +repo_pull_sync: + extends: .repo_pull_sync + only: + - api + - triggers + - web + - schedules diff --git a/apps/server-nestjs/src/modules/gitlab/files/mirror.sh b/apps/server-nestjs/src/modules/gitlab/files/mirror.sh new file mode 100644 index 000000000..c50c923f8 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/files/mirror.sh @@ -0,0 +1,83 @@ +#!/bin/bash + +set -e + +# Colorize terminal +red='\\e[0;31m' +no_color='\\033[0m' + +# Console step increment +i=1 + +# Default values +BRANCH_TO_SYNC=main + +print_help() { + TEXT_HELPER="\\nThis script aims to send a synchronization request to DSO.\\nFollowing flags are available: + -a Api url to send the synchronization request + -b Branch which is wanted to be synchronize for the given repository (default '$BRANCH_TO_SYNC') + -g GitLab token to trigger the pipeline on the gitlab mirror project + -i Gitlab mirror project id + -r Gitlab repository name to mirror + -h Print script help\\n" + printf "$TEXT_HELPER" +} + +print_args() { + printf "\\nArguments received: + -a API_URL: $API_URL + -b BRANCH_TO_SYNC: $BRANCH_TO_SYNC + -g GITLAB_TRIGGER_TOKEN length: \${#GITLAB_TRIGGER_TOKEN} + -i GITLAB_MIRROR_PROJECT_ID: $GITLAB_MIRROR_PROJECT_ID + -r REPOSITORY_NAME: $REPOSITORY_NAME\\n" +} + +# Parse options +while getopts :ha:b:g:i:r: flag +do + case "\${flag}" in + a) + API_URL=\${OPTARG};; + b) + BRANCH_TO_SYNC=\${OPTARG};; + g) + GITLAB_TRIGGER_TOKEN=\${OPTARG};; + i) + GITLAB_MIRROR_PROJECT_ID=\${OPTARG};; + r) + REPOSITORY_NAME=\${OPTARG};; + h) + printf "\\nHelp requested.\\n" + print_help + printf "\\nExiting.\\n" + exit 0;; + *) + printf "\\nInvalid argument \${OPTARG} (\${flag}).\\n" + print_help + print_args + exit 1;; + esac +done + +# Test if arguments are missing +if [ -z \${API_URL} ] || [ -z \${BRANCH_TO_SYNC} ] || [ -z \${GITLAB_TRIGGER_TOKEN} ] || [ -z \${GITLAB_MIRROR_PROJECT_ID} ] || [ -z \${REPOSITORY_NAME} ]; then + printf "\\nArgument(s) missing!\\n" + print_help + print_args + exit 2 +fi + +# Print arguments +print_args + +# Send synchronization request +printf "\\n\${red}\${i}.\${no_color} Send request to DSO api.\\n\\n" + +curl \\ + -X POST \\ + --fail \\ + -F token=\${GITLAB_TRIGGER_TOKEN} \\ + -F ref=main \\ + -F variables[GIT_BRANCH_DEPLOY]=\${BRANCH_TO_SYNC} \\ + -F variables[PROJECT_NAME]=\${REPOSITORY_NAME} \\ + "\${API_URL}/api/v4/projects/\${GITLAB_MIRROR_PROJECT_ID}/trigger/pipeline" diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab-client.service.ts b/apps/server-nestjs/src/modules/gitlab/gitlab-client.service.ts new file mode 100644 index 000000000..1e5e67d38 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab-client.service.ts @@ -0,0 +1,15 @@ +import { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import { Gitlab } from '@gitbeaker/rest' +import { Injectable, Inject } from '@nestjs/common' + +@Injectable() +export class GitlabClientService extends Gitlab { + constructor( + @Inject(ConfigurationService) readonly config: ConfigurationService, + ) { + super({ + token: config.gitlabToken, + host: config.gitlabInternalUrl, + }) + } +} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab-controller.service.spec.ts b/apps/server-nestjs/src/modules/gitlab/gitlab-controller.service.spec.ts new file mode 100644 index 000000000..b359cfa9b --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab-controller.service.spec.ts @@ -0,0 +1,279 @@ +import { Test } from '@nestjs/testing' +import { ENABLED } from '@cpn-console/shared' +import { GitlabControllerService } from './gitlab-controller.service' +import { GitlabService } from './gitlab.service' +import { GitlabDatastoreService } from './gitlab-datastore.service' +import { VaultService } from '../vault/vault.service' +import { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import { vi, describe, beforeEach, it, expect } from 'vitest' +import type { Mocked } from 'vitest' +import type { AccessTokenExposedSchema } from '@gitbeaker/core' +import { AccessLevel } from '@gitbeaker/core' +import { faker } from '@faker-js/faker' +import { makeProjectWithDetails, makeGroupSchema, makeProjectSchema, makePipelineTriggerToken, makeSimpleUserSchema, makeMemberSchema, makeExpandedUserSchema } from './gitlab-testing.utils' + +function createGitlabControllerServiceTestingModule() { + return Test.createTestingModule({ + providers: [ + GitlabControllerService, + { + provide: GitlabService, + useValue: { + getOrCreateProjectSubGroup: vi.fn(), + getGroupMembers: vi.fn(), + addGroupMember: vi.fn(), + editGroupMember: vi.fn(), + removeGroupMember: vi.fn(), + getUserByEmail: vi.fn(), + createUser: vi.fn(), + getRepos: vi.fn(), + getProjectToken: vi.fn(), + getInfraGroupRepoPublicUrl: vi.fn(), + maybeCommitUpdate: vi.fn(), + deleteGroup: vi.fn(), + commitMirror: vi.fn(), + getOrCreateMirrorPipelineTriggerToken: vi.fn(), + createProjectToken: vi.fn(), + createMirrorAccessToken: vi.fn(), + upsertProjectGroupRepo: vi.fn(), + upsertProjectMirrorRepo: vi.fn(), + getProjectGroupInternalRepoUrl: vi.fn(), + } satisfies Partial, + }, + { + provide: GitlabDatastoreService, + useValue: { + getAllProjects: vi.fn(), + } satisfies Partial, + }, + { + provide: VaultService, + useValue: { + read: vi.fn(), + write: vi.fn(), + destroy: vi.fn(), + readGitlabMirrorCreds: vi.fn(), + writeGitlabMirrorCreds: vi.fn(), + deleteGitlabMirrorCreds: vi.fn(), + readTechnReadOnlyCreds: vi.fn(), + writeTechReadOnlyCreds: vi.fn(), + writeMirrorTriggerToken: vi.fn(), + } satisfies Partial, + }, + { + provide: ConfigurationService, + useValue: { + projectRootDir: 'forge/console', + projectRootPath: 'forge', + }, + }, + ], + }) +} + +describe('gitlabControllerService', () => { + let service: GitlabControllerService + let gitlab: Mocked + let vault: Mocked + let gitlabDatastore: Mocked + + beforeEach(async () => { + const moduleRef = await createGitlabControllerServiceTestingModule().compile() + service = moduleRef.get(GitlabControllerService) + gitlab = moduleRef.get(GitlabService) + vault = moduleRef.get(VaultService) + gitlabDatastore = moduleRef.get(GitlabDatastoreService) + + vault.writeGitlabMirrorCreds.mockResolvedValue(undefined) + vault.deleteGitlabMirrorCreds.mockResolvedValue(undefined) + vault.writeTechReadOnlyCreds.mockResolvedValue(undefined) + vault.writeMirrorTriggerToken.mockResolvedValue(undefined) + vault.readTechnReadOnlyCreds.mockResolvedValue(null) + vault.readGitlabMirrorCreds.mockResolvedValue(null) + }) + + it('should be defined', () => { + expect(service).toBeDefined() + }) + + describe('handleUpsert', () => { + it('should reconcile project members and repositories', async () => { + const project = makeProjectWithDetails() + const group = makeGroupSchema({ + id: 123, + full_path: 'forge/console/project-1', + full_name: 'forge/console/project-1', + name: 'project-1', + path: 'project-1', + parent_id: 1, + }) + + gitlab.getOrCreateProjectSubGroup.mockResolvedValue(group) + gitlab.getGroupMembers.mockResolvedValue([]) + gitlab.getRepos.mockReturnValue((async function* () { })()) + gitlab.upsertProjectGroupRepo.mockResolvedValue(makeProjectSchema({ id: 1 })) + gitlab.upsertProjectMirrorRepo.mockResolvedValue(makeProjectSchema({ id: 1, name: 'mirror', path: 'mirror', path_with_namespace: 'forge/console/project-1/mirror', empty_repo: false })) + gitlab.getInfraGroupRepoPublicUrl.mockResolvedValue('https://gitlab.internal/repo') + gitlab.getOrCreateMirrorPipelineTriggerToken.mockResolvedValue(makePipelineTriggerToken()) + gitlab.getUserByEmail.mockResolvedValue(makeSimpleUserSchema({ id: 123, username: 'user' })) + + await service.handleUpsert(project) + + expect(gitlab.getOrCreateProjectSubGroup).toHaveBeenCalledWith(project.slug) + expect(gitlab.getGroupMembers).toHaveBeenCalledWith(group.id) + expect(gitlab.getRepos).toHaveBeenCalledWith(project.slug) + }) + + it('should remove orphan member if purge enabled', async () => { + const project = makeProjectWithDetails({ + plugins: [{ key: 'purge', value: ENABLED }], + }) + const group = makeGroupSchema({ id: 123, name: 'project-1', path: 'project-1', full_path: 'forge/console/project-1', full_name: 'forge/console/project-1', parent_id: 1 }) + + gitlab.getOrCreateProjectSubGroup.mockResolvedValue(group) + gitlab.getGroupMembers.mockResolvedValue([makeMemberSchema({ id: 999, username: 'orphan' })]) + gitlab.getRepos.mockReturnValue((async function* () { })()) + gitlab.upsertProjectMirrorRepo.mockResolvedValue(makeProjectSchema({ id: 1, name: 'mirror', path: 'mirror', path_with_namespace: 'forge/console/project-1/mirror', empty_repo: false })) + gitlab.getOrCreateMirrorPipelineTriggerToken.mockResolvedValue(makePipelineTriggerToken()) + + await service.handleUpsert(project) + + expect(gitlab.removeGroupMember).toHaveBeenCalledWith(group.id, 999) + }) + + it('should not remove managed user (bot) even if purge enabled', async () => { + const project = makeProjectWithDetails({ + plugins: [{ key: 'purge', value: ENABLED }], + }) + const group = makeGroupSchema({ id: 123, name: 'project-1', path: 'project-1', full_path: 'forge/console/project-1', full_name: 'forge/console/project-1', parent_id: 1 }) + + gitlab.getOrCreateProjectSubGroup.mockResolvedValue(group) + gitlab.getGroupMembers.mockResolvedValue([makeMemberSchema({ id: 888, username: 'group_123_bot' })]) + gitlab.getRepos.mockReturnValue((async function* () { })()) + gitlab.upsertProjectMirrorRepo.mockResolvedValue(makeProjectSchema({ id: 1, name: 'mirror', path: 'mirror', path_with_namespace: 'forge/console/project-1/mirror', empty_repo: false })) + gitlab.getOrCreateMirrorPipelineTriggerToken.mockResolvedValue(makePipelineTriggerToken()) + + await service.handleUpsert(project) + + expect(gitlab.removeGroupMember).not.toHaveBeenCalled() + }) + + it('should not remove orphan member if purge disabled', async () => { + const project = makeProjectWithDetails() + const group = makeGroupSchema({ id: 123, name: 'project-1', path: 'project-1', full_path: 'forge/console/project-1', full_name: 'forge/console/project-1', parent_id: 1 }) + + gitlab.getOrCreateProjectSubGroup.mockResolvedValue(group) + gitlab.getGroupMembers.mockResolvedValue([makeMemberSchema({ id: 999, username: 'orphan' })]) + gitlab.getRepos.mockReturnValue((async function* () { })()) + gitlab.upsertProjectMirrorRepo.mockResolvedValue(makeProjectSchema({ id: 1, name: 'mirror', path: 'mirror', path_with_namespace: 'forge/console/project-1/mirror', empty_repo: false })) + gitlab.getOrCreateMirrorPipelineTriggerToken.mockResolvedValue(makePipelineTriggerToken()) + + await service.handleUpsert(project) + + expect(gitlab.removeGroupMember).not.toHaveBeenCalled() + }) + + it('should create gitlab user if not exists', async () => { + const project = makeProjectWithDetails({ + members: [{ user: { id: 'u1', email: 'new@example.com', firstName: 'New', lastName: 'User', adminRoleIds: [] }, roleIds: [] }], + }) + const group = makeGroupSchema({ id: 123, name: 'project-1', path: 'project-1', full_path: 'forge/console/project-1', full_name: 'forge/console/project-1', parent_id: 1 }) + + gitlab.getOrCreateProjectSubGroup.mockResolvedValue(group) + gitlab.getGroupMembers.mockResolvedValue([]) + gitlab.getUserByEmail.mockResolvedValue(null) + gitlab.createUser.mockImplementation(async (email, username, name) => { + return makeExpandedUserSchema({ + id: email === 'new@example.com' ? 999 : 998, + email, + username, + name, + }) + }) + gitlab.getRepos.mockReturnValue((async function* () { })()) + gitlab.upsertProjectMirrorRepo.mockResolvedValue(makeProjectSchema({ id: 1, name: 'mirror', path: 'mirror', path_with_namespace: 'forge/console/project-1/mirror', empty_repo: false })) + gitlab.getOrCreateMirrorPipelineTriggerToken.mockResolvedValue(makePipelineTriggerToken()) + + await service.handleUpsert(project) + + expect(gitlab.createUser).toHaveBeenCalledWith('new@example.com', 'new.example.com', 'New User') + expect(gitlab.createUser).toHaveBeenCalledWith('owner@example.com', 'owner.example.com', 'Owner User') + expect(gitlab.addGroupMember).toHaveBeenCalledWith(group.id, 999, AccessLevel.GUEST) + expect(gitlab.addGroupMember).toHaveBeenCalledWith(group.id, 998, AccessLevel.OWNER) + }) + + it('should configure repository mirroring if external url is present', async () => { + const project = makeProjectWithDetails({ + repositories: [{ + id: 'r1', + internalRepoName: 'repo-1', + externalRepoUrl: 'https://github.com/org/repo.git', + isPrivate: true, + externalUserName: 'user', + isInfra: false, + }], + }) + const group = makeGroupSchema({ id: 123, name: 'project-1', path: 'project-1', full_path: 'forge/console/project-1', full_name: 'forge/console/project-1', parent_id: 1 }) + const gitlabRepo = makeProjectSchema({ id: 101, name: 'repo-1', path: 'repo-1', path_with_namespace: 'forge/console/project-1/repo-1' }) + const accessToken = { + id: 1, + user_id: 1, + scopes: ['read_api'], + name: 'bot', + expires_at: faker.date.future().toISOString(), + active: true, + created_at: faker.date.past().toISOString(), + revoked: false, + access_level: 40, + token: faker.internet.password(), + } satisfies AccessTokenExposedSchema + + gitlab.getOrCreateProjectSubGroup.mockResolvedValue(group) + gitlab.getGroupMembers.mockResolvedValue([]) + gitlab.getRepos.mockReturnValue((async function* () { yield gitlabRepo })()) + gitlab.getProjectGroupInternalRepoUrl.mockResolvedValue('https://gitlab.internal/group/repo-1.git') + gitlab.createMirrorAccessToken.mockResolvedValue(accessToken) + vault.readTechnReadOnlyCreds.mockResolvedValue(null) + gitlab.upsertProjectMirrorRepo.mockResolvedValue(makeProjectSchema({ id: 1, name: 'mirror', path: 'mirror', path_with_namespace: 'forge/console/project-1/mirror', empty_repo: false })) + gitlab.getOrCreateMirrorPipelineTriggerToken.mockResolvedValue(makePipelineTriggerToken()) + + await service.handleUpsert(project) + + expect(gitlab.createMirrorAccessToken).toHaveBeenCalledWith('project-1') + expect(gitlab.upsertProjectMirrorRepo).toHaveBeenCalledWith('project-1') + + expect(vault.writeGitlabMirrorCreds).toHaveBeenCalledWith( + 'project-1', + 'repo-1', + expect.objectContaining({ + GIT_INPUT_URL: 'github.com/org/repo.git', + GIT_OUTPUT_USER: 'bot', + GIT_OUTPUT_PASSWORD: accessToken.token, + }), + ) + expect(vault.writeTechReadOnlyCreds).toHaveBeenCalledWith('project-1', { + MIRROR_USER: 'bot', + MIRROR_TOKEN: accessToken.token, + }) + }) + }) + + describe('handleCron', () => { + it('should reconcile all projects', async () => { + const projects = [makeProjectWithDetails({ id: 'p1', slug: 'project-1' })] + gitlabDatastore.getAllProjects.mockResolvedValue(projects) + + const group = makeGroupSchema({ id: 123, name: 'project-1', path: 'project-1', full_path: 'forge/console/project-1', full_name: 'forge/console/project-1', parent_id: 1 }) + gitlab.getOrCreateProjectSubGroup.mockResolvedValue(group) + gitlab.getGroupMembers.mockResolvedValue([]) + gitlab.getRepos.mockReturnValue((async function* () { })()) + gitlab.upsertProjectMirrorRepo.mockResolvedValue(makeProjectSchema({ id: 1, name: 'mirror', path: 'mirror', path_with_namespace: 'forge/console/project-1/mirror', empty_repo: false })) + gitlab.getOrCreateMirrorPipelineTriggerToken.mockResolvedValue(makePipelineTriggerToken()) + + await service.handleCron() + + expect(gitlabDatastore.getAllProjects).toHaveBeenCalled() + expect(gitlab.getOrCreateProjectSubGroup).toHaveBeenCalledWith('project-1') + }) + }) +}) diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab-controller.service.ts b/apps/server-nestjs/src/modules/gitlab/gitlab-controller.service.ts new file mode 100644 index 000000000..142180215 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab-controller.service.ts @@ -0,0 +1,344 @@ +import { Inject, Injectable, Logger } from '@nestjs/common' +import { OnEvent } from '@nestjs/event-emitter' +import { Cron, CronExpression } from '@nestjs/schedule' +import { AccessLevel } from '@gitbeaker/core' +import type { MemberSchema, ProjectSchema } from '@gitbeaker/core' +import { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import { GitlabDatastoreService } from './gitlab-datastore.service' +import type { ProjectWithDetails } from './gitlab-datastore.service' +import { GitlabService } from './gitlab.service' +import { VaultService } from '../vault/vault.service' +import { INFRA_APPS_REPO_NAME } from './gitlab.constants' +import { daysAgoFromNow, generateAccessLevelMapping, generateUsername, getAll, getPluginConfig } from './gitlab.utils' +import type { VaultSecret } from '../vault/vault-client.service' +import { specificallyEnabled } from '@cpn-console/hooks' +import { trace } from '@opentelemetry/api' + +const ownedUserRegex = /group_\d+_bot/u +const tracer = trace.getTracer('gitlab-controller') + +@Injectable() +export class GitlabControllerService { + private readonly logger = new Logger(GitlabControllerService.name) + + constructor( + @Inject(GitlabDatastoreService) private readonly gitlabDatastore: GitlabDatastoreService, + @Inject(GitlabService) private readonly gitlab: GitlabService, + @Inject(VaultService) private readonly vault: VaultService, + @Inject(ConfigurationService) private readonly config: ConfigurationService, + ) { + this.logger.log('GitlabControllerService initialized') + } + + @OnEvent('project.upsert') + async handleUpsert(project: ProjectWithDetails) { + return tracer.startActiveSpan('handleUpsert', async (span) => { + try { + span.setAttribute('project.slug', project.slug) + this.logger.log(`Handling project upsert for ${project.slug}`) + await this.ensureProjectGroup(project) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + @OnEvent('project.delete') + async handleDelete(project: ProjectWithDetails) { + return tracer.startActiveSpan('handleDelete', async (span) => { + try { + span.setAttribute('project.slug', project.slug) + this.logger.log(`Handling project delete for ${project.slug}`) + await this.ensureProjectGroup(project) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + @Cron(CronExpression.EVERY_HOUR) + async handleCron() { + return tracer.startActiveSpan('handleCron', async (span) => { + try { + this.logger.log('Starting Gitlab reconciliation') + const projects = await this.gitlabDatastore.getAllProjects() + span.setAttribute('projects.count', projects.length) + await this.ensureProjectGroups(projects) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + private async ensureProjectGroups(projects: ProjectWithDetails[]) { + tracer.startActiveSpan('ensureProjectGroups', async (span) => { + try { + span.setAttribute('projects.count', projects.length) + await Promise.all(projects.map(p => this.ensureProjectGroup(p))) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + private async ensureProjectGroup(project: ProjectWithDetails) { + return tracer.startActiveSpan('ensureProject', async (span) => { + try { + const group = await this.gitlab.getOrCreateProjectSubGroup(project.slug) + const members = await this.gitlab.getGroupMembers(group.id) + span.setAttribute('project.slug', project.slug) + await this.ensureProjectGroupMembers(project, group.id, members) + await this.ensureProjectRepos(project) + await this.ensureSystemRepos(project) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + private async ensureProjectGroupMembers( + project: ProjectWithDetails, + groupId: number, + members: MemberSchema[], + ) { + await this.addMissingMembers(project, groupId, members) + await this.addMissingOwnerMember(project, groupId, members) + await this.purgeOrphanMembers(project, groupId, members) + } + + private async addMissingMembers( + project: ProjectWithDetails, + groupId: number, + members: MemberSchema[], + ) { + const membersById = new Map(members.map(m => [m.id, m])) + const accessLevelByUserId = generateAccessLevelMapping(project) + + await Promise.all(project.members.map(async ({ user }) => { + const gitlabUser = await this.upsertUser(user) + if (!gitlabUser) return + const accessLevel = accessLevelByUserId.get(user.id) ?? AccessLevel.NO_ACCESS + await this.ensureGroupMemberAccessLevel(groupId, gitlabUser.id, accessLevel, membersById) + })) + } + + private async upsertUser(user: ProjectWithDetails['members'][number]['user']) { + return await this.gitlab.getUserByEmail(user.email) + ?? await this.gitlab.createUser( + user.email, + generateUsername(user.email), + `${user.firstName} ${user.lastName}`, + ) + } + + private async ensureGroupMemberAccessLevel( + groupId: number, + gitlabUserId: number, + accessLevel: AccessLevel, + membersById: Map, + ) { + const existingMember = membersById.get(gitlabUserId) + + if (accessLevel === AccessLevel.NO_ACCESS) { + if (existingMember) { + await this.gitlab.removeGroupMember(groupId, gitlabUserId) + } + return + } + + if (!existingMember) { + await this.gitlab.addGroupMember(groupId, gitlabUserId, accessLevel) + return + } + + if (existingMember.access_level !== accessLevel) { + await this.gitlab.editGroupMember(groupId, gitlabUserId, accessLevel) + } + } + + private async addMissingOwnerMember( + project: ProjectWithDetails, + groupId: number, + members: MemberSchema[], + ) { + const gitlabUser = await this.upsertUser(project.owner) + if (!gitlabUser) return + const membersById = new Map(members.map(m => [m.id, m])) + await this.ensureGroupMemberAccessLevel(groupId, gitlabUser.id, AccessLevel.OWNER, membersById) + } + + private async purgeOrphanMembers( + project: ProjectWithDetails, + groupId: number, + members: MemberSchema[], + ) { + const purgeConfig = getPluginConfig(project, 'purge') + const usernames = new Set([ + generateUsername(project.owner.email), + ...project.members.map(m => generateUsername(m.user.email)), + ]) + const emails = new Set([ + project.owner.email.toLowerCase(), + ...project.members.map(m => m.user.email.toLowerCase()), + ]) + + const orphans = members.filter((member) => { + if (this.isOwnedUser(member)) return false + if (usernames.has(member.username)) return false + if (member.email && emails.has(member.email.toLowerCase())) return false + return true + }) + + if (specificallyEnabled(purgeConfig)) { + await Promise.all(orphans.map(async (orphan) => { + await this.gitlab.removeGroupMember(groupId, orphan.id) + this.logger.log(`Removed ${orphan.username} from gitlab group ${groupId}`) + })) + } else { + for (const orphan of orphans) { + this.logger.warn(`User ${orphan.username} is in Gitlab group but not in project (purge disabled)`) + } + } + } + + private isOwnedUser(member: MemberSchema) { + return ownedUserRegex.test(member.username) + } + + private async ensureProjectRepos(project: ProjectWithDetails) { + const gitlabRepositories = await getAll(this.gitlab.getRepos(project.slug)) + for (const repo of project.repositories) { + await this.ensureRepository(project, repo, gitlabRepositories) + + if (repo.externalRepoUrl) { + await this.configureRepositoryMirroring(project, repo) + } else { + await this.vault.deleteGitlabMirrorCreds(project.slug, repo.internalRepoName) + } + } + } + + private async ensureRepository( + project: ProjectWithDetails, + repo: ProjectWithDetails['repositories'][number], + gitlabRepositories: ProjectSchema[], + ) { + return gitlabRepositories.find(r => r.name === repo.internalRepoName) + ?? await this.gitlab.upsertProjectGroupRepo( + project.slug, + repo.internalRepoName, + ) + } + + private async configureRepositoryMirroring( + project: ProjectWithDetails, + repo: ProjectWithDetails['repositories'][number], + ) { + const currentVaultSecret = await this.vault.readGitlabMirrorCreds(project.slug, repo.internalRepoName) + if (!currentVaultSecret) { + this.logger.warn('No existing mirror credentials found in Vault, rotating new ones', { + projectSlug: project.slug, + repoName: repo.internalRepoName, + }) + } + + const internalRepoUrl = await this.gitlab.getProjectGroupInternalRepoUrl(project.slug, repo.internalRepoName) + const externalRepoUrn = repo.externalRepoUrl.split('://')[1] + const internalRepoUrn = internalRepoUrl.split('://')[1] + + const projectMirrorCreds = await this.getOrRotateMirrorCreds(project.slug) + + const mirrorSecretData = { + GIT_INPUT_URL: externalRepoUrn, + GIT_INPUT_USER: repo.isPrivate ? repo.externalUserName : undefined, + GIT_INPUT_PASSWORD: currentVaultSecret?.data?.GIT_INPUT_PASSWORD, // Preserve existing password as it's not in DB + GIT_OUTPUT_URL: internalRepoUrn, + GIT_OUTPUT_USER: projectMirrorCreds.MIRROR_USER, + GIT_OUTPUT_PASSWORD: projectMirrorCreds.MIRROR_TOKEN, + } + + // Write to vault if changed + // Using simplified check + await this.vault.writeGitlabMirrorCreds(project.slug, repo.internalRepoName, mirrorSecretData) + } + + private async ensureSystemRepos(project: ProjectWithDetails) { + await Promise.all([ + this.ensureInfraAppsRepo(project.slug), + this.ensureMirrorRepo(project.slug), + ]) + } + + private async ensureInfraAppsRepo(projectSlug: string) { + await this.gitlab.upsertProjectGroupRepo(projectSlug, INFRA_APPS_REPO_NAME) + } + + private async ensureMirrorRepo(projectSlug: string) { + const mirrorRepo = await this.gitlab.upsertProjectMirrorRepo(projectSlug) + if (mirrorRepo.empty_repo) { + await this.gitlab.commitMirror(mirrorRepo.id) + } + await this.ensureMirrorRepoTriggerToken(projectSlug) + } + + private async ensureMirrorRepoTriggerToken(projectSlug: string) { + const triggerToken = await this.gitlab.getOrCreateMirrorPipelineTriggerToken(projectSlug) + const gitlabSecret = { + PROJECT_SLUG: projectSlug, + GIT_MIRROR_PROJECT_ID: triggerToken.repoId, + GIT_MIRROR_TOKEN: triggerToken.token, + } + await this.vault.writeMirrorTriggerToken(gitlabSecret) + } + + private async getOrRotateMirrorCreds(projectSlug: string) { + const vaultSecret = await this.vault.readTechnReadOnlyCreds(projectSlug) + if (!vaultSecret) return this.createMirrorAccessToken(projectSlug) + + if (!this.isMirrorCredsExpiring(vaultSecret)) { + return vaultSecret.data as { MIRROR_USER: string, MIRROR_TOKEN: string } + } + return this.createMirrorAccessToken(projectSlug) + } + + private async createMirrorAccessToken(projectSlug: string) { + const token = await this.gitlab.createMirrorAccessToken(projectSlug) + const creds = { + MIRROR_USER: token.name, + MIRROR_TOKEN: token.token, + } + await this.vault.writeTechReadOnlyCreds(projectSlug, creds) + return creds + } + + private isMirrorCredsExpiring(vaultSecret: VaultSecret): boolean { + if (!vaultSecret?.metadata?.created_time) return false + const createdTime = new Date(vaultSecret.metadata.created_time) + return daysAgoFromNow(createdTime) > this.config.gitlabMirrorTokenRotationThresholdDays + } +} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab-datastore.service.spec.ts b/apps/server-nestjs/src/modules/gitlab/gitlab-datastore.service.spec.ts new file mode 100644 index 000000000..50432e5e9 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab-datastore.service.spec.ts @@ -0,0 +1,36 @@ +import { Test } from '@nestjs/testing' +import type { TestingModule } from '@nestjs/testing' +import { GitlabDatastoreService } from './gitlab-datastore.service' +import { PrismaService } from '@/cpin-module/infrastructure/database/prisma.service' +import { mockDeep } from 'vitest-mock-extended' +import { describe, beforeEach, it, expect } from 'vitest' + +const prismaMock = mockDeep() + +function createGitlabDatastoreServiceTestingModule() { + return Test.createTestingModule({ + providers: [ + GitlabDatastoreService, + { provide: PrismaService, useValue: prismaMock }, + ], + }) +} + +describe('gitlabDatastoreService', () => { + let service: GitlabDatastoreService + + beforeEach(async () => { + const module: TestingModule = await createGitlabDatastoreServiceTestingModule().compile() + service = module.get(GitlabDatastoreService) + }) + + it('should be defined', () => { + expect(service).toBeDefined() + }) + + it('should get user', async () => { + const user = { id: 'user-id' } + prismaMock.user.findUnique.mockResolvedValue(user as any) + await expect(service.getUser('user-id')).resolves.toEqual(user) + }) +}) diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab-datastore.service.ts b/apps/server-nestjs/src/modules/gitlab/gitlab-datastore.service.ts new file mode 100644 index 000000000..8c62b3ec9 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab-datastore.service.ts @@ -0,0 +1,103 @@ +import { Inject, Injectable } from '@nestjs/common' +import type { Prisma } from '@prisma/client' +import { PrismaService } from '@/cpin-module/infrastructure/database/prisma.service' + +export const projectSelect = { + id: true, + name: true, + slug: true, + description: true, + owner: { + select: { + id: true, + email: true, + firstName: true, + lastName: true, + adminRoleIds: true, + }, + }, + plugins: { + select: { + key: true, + value: true, + }, + }, + roles: { + select: { + id: true, + oidcGroup: true, + }, + }, + members: { + select: { + user: { + select: { + id: true, + email: true, + firstName: true, + lastName: true, + adminRoleIds: true, + }, + }, + roleIds: true, + }, + }, + repositories: { + select: { + id: true, + internalRepoName: true, + isInfra: true, + isPrivate: true, + externalRepoUrl: true, + externalUserName: true, + }, + }, + clusters: { + select: { + id: true, + label: true, + zone: { + select: { + slug: true, + }, + }, + }, + }, +} satisfies Prisma.ProjectSelect + +export type ProjectWithDetails = Prisma.ProjectGetPayload<{ + select: typeof projectSelect +}> + +@Injectable() +export class GitlabDatastoreService { + constructor(@Inject(PrismaService) private readonly prisma: PrismaService) {} + + async getAllProjects(): Promise { + return this.prisma.project.findMany({ + select: projectSelect, + where: { + plugins: { + some: { + pluginName: 'gitlab', + }, + }, + }, + }) + } + + async getProject(id: string): Promise { + return this.prisma.project.findUnique({ + where: { id }, + select: projectSelect, + }) + } + + async getUser(id: string) { + return this.prisma.user.findUnique({ + where: { + id, + }, + }) + } +} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab-testing.utils.ts b/apps/server-nestjs/src/modules/gitlab/gitlab-testing.utils.ts new file mode 100644 index 000000000..b6ed2d041 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab-testing.utils.ts @@ -0,0 +1,335 @@ +import { faker } from '@faker-js/faker' +import type { SimpleUserSchema, ExpandedUserSchema, MemberSchema, GroupSchema, ProjectSchema, PipelineTriggerTokenSchema, OffsetPagination, AccessTokenSchema, AccessTokenExposedSchema, RepositoryFileExpandedSchema, RepositoryTreeSchema } from '@gitbeaker/core' +import { AccessLevel } from '@gitbeaker/core' +import { GitbeakerRequestError } from '@gitbeaker/requester-utils' +import { VaultError } from '../vault/vault-client.service' +import type { ProjectWithDetails } from './gitlab-datastore.service' + +export function notFoundError(method: string, path: string) { + return new VaultError('NotFound', 'Not Found', { status: 404, method, path }) +} + +export function makeSimpleUserSchema(overrides: Partial = {}) { + return { + id: 1, + name: 'User', + username: 'user', + state: 'active', + avatar_url: '', + web_url: 'https://gitlab.example/users/user', + created_at: faker.date.past().toISOString(), + ...overrides, + } satisfies SimpleUserSchema +} + +export function makeExpandedUserSchema(overrides: Partial = {}): ExpandedUserSchema { + const isoDate = faker.date.past().toISOString() + return { + id: 1, + name: 'User', + username: 'user', + state: 'active', + avatar_url: '', + web_url: 'https://gitlab.example/users/user', + created_at: isoDate, + locked: null, + bio: null, + bot: false, + location: null, + public_email: null, + skype: null, + linkedin: null, + twitter: null, + discord: null, + website_url: null, + pronouns: null, + organization: null, + job_title: null, + work_information: null, + followers: null, + following: null, + local_time: null, + is_followed: null, + is_admin: null, + last_sign_in_at: isoDate, + confirmed_at: isoDate, + last_activity_on: isoDate, + email: 'user@example.com', + theme_id: 1, + color_scheme_id: 1, + projects_limit: 0, + current_sign_in_at: null, + note: null, + identities: null, + can_create_group: false, + can_create_project: false, + two_factor_enabled: false, + external: false, + private_profile: null, + namespace_id: null, + created_by: null, + ...overrides, + } satisfies ExpandedUserSchema +} + +export function makeMemberSchema(overrides: Partial = {}) { + return { + id: 1, + username: 'user', + name: 'User', + state: 'active', + avatar_url: '', + web_url: 'https://gitlab.example/users/user', + expires_at: faker.date.future().toISOString(), + access_level: 30, + email: 'user@example.com', + group_saml_identity: { + extern_uid: '', + provider: '', + saml_provider_id: 1, + }, + ...overrides, + } satisfies MemberSchema +} + +export function makeGroupSchema(overrides: Partial = {}) { + return { + id: 123, + web_url: 'https://gitlab.example/groups/forge', + name: 'forge', + avatar_url: '', + full_name: 'forge', + full_path: 'forge', + path: 'forge', + description: '', + visibility: 'private', + share_with_group_lock: false, + require_two_factor_authentication: false, + two_factor_grace_period: 0, + project_creation_level: 'maintainer', + subgroup_creation_level: 'maintainer', + lfs_enabled: true, + default_branch_protection: 0, + request_access_enabled: false, + created_at: faker.date.past().toISOString(), + parent_id: 0, + ...overrides, + } satisfies GroupSchema +} + +export function makeProjectSchema(overrides: Partial = {}) { + return { + id: 1, + web_url: 'https://gitlab.example/projects/1', + name: 'repo', + path: 'repo', + description: '', + name_with_namespace: 'forge / repo', + path_with_namespace: 'forge/repo', + created_at: faker.date.past().toISOString(), + default_branch: 'main', + topics: [], + ssh_url_to_repo: 'ssh://gitlab.example/forge/repo.git', + http_url_to_repo: 'https://gitlab.example/forge/repo.git', + readme_url: '', + forks_count: 0, + avatar_url: null, + star_count: 0, + last_activity_at: faker.date.future().toISOString(), + namespace: { id: 1, name: 'forge', path: 'forge', kind: 'group', full_path: 'forge', avatar_url: '', web_url: 'https://gitlab.example/groups/forge' }, + description_html: '', + visibility: 'private', + empty_repo: false, + owner: { id: 1, name: 'Owner', created_at: faker.date.past().toISOString() }, + issues_enabled: true, + open_issues_count: 0, + merge_requests_enabled: true, + jobs_enabled: true, + wiki_enabled: true, + snippets_enabled: true, + can_create_merge_request_in: true, + resolve_outdated_diff_discussions: false, + container_registry_access_level: 'enabled', + security_and_compliance_access_level: 'enabled', + container_expiration_policy: { + cadence: '1d', + enabled: false, + keep_n: null, + older_than: null, + name_regex_delete: null, + name_regex_keep: null, + next_run_at: faker.date.future().toISOString(), + }, + updated_at: faker.date.past().toISOString(), + creator_id: 1, + import_url: null, + import_type: null, + import_status: 'none', + import_error: null, + permissions: { + project_access: { access_level: 0, notification_level: 0 }, + group_access: { access_level: 0, notification_level: 0 }, + }, + archived: false, + license_url: '', + license: { key: 'mit', name: 'MIT', nickname: 'MIT', html_url: '', source_url: '' }, + shared_runners_enabled: true, + group_runners_enabled: true, + runners_token: '', + ci_default_git_depth: 0, + ci_forward_deployment_enabled: false, + ci_forward_deployment_rollback_allowed: false, + ci_allow_fork_pipelines_to_run_in_parent_project: false, + ci_separated_caches: false, + ci_restrict_pipeline_cancellation_role: '', + public_jobs: false, + shared_with_groups: null, + repository_storage: '', + only_allow_merge_if_pipeline_succeeds: false, + allow_merge_on_skipped_pipeline: false, + restrict_user_defined_variables: false, + only_allow_merge_if_all_discussions_are_resolved: false, + remove_source_branch_after_merge: false, + printing_merge_requests_link_enabled: false, + request_access_enabled: false, + merge_method: '', + squash_option: '', + auto_devops_enabled: false, + auto_devops_deploy_strategy: '', + mirror: false, + mirror_user_id: 1, + mirror_trigger_builds: false, + only_mirror_protected_branches: false, + mirror_overwrites_diverged_branches: false, + external_authorization_classification_label: '', + packages_enabled: false, + service_desk_enabled: false, + service_desk_address: 'service-desk@example.com', + service_desk_reply_to: 'service-desk@example.com', + autoclose_referenced_issues: false, + suggestion_commit_message: 'Add suggestion commit message', + enforce_auth_checks_on_uploads: false, + merge_commit_template: 'Add suggestion commit message', + squash_commit_template: 'Add suggestion commit message', + issue_branch_template: 'Add suggestion commit message', + marked_for_deletion_on: faker.date.future().toISOString(), + compliance_frameworks: [], + warn_about_potentially_unwanted_characters: false, + container_registry_image_prefix: 'registry.gitlab.example/forge/repo', + _links: { + self: 'https://gitlab.example/projects/1', + issues: 'https://gitlab.example/projects/1/issues', + merge_requests: 'https://gitlab.example/projects/1/merge_requests', + repo_branches: 'https://gitlab.example/projects/1/repository/branches', + labels: 'https://gitlab.example/projects/1/labels', + events: 'https://gitlab.example/projects/1/events', + members: 'https://gitlab.example/projects/1/members', + cluster_agents: 'https://gitlab.example/projects/1/cluster_agents', + }, + ...overrides, + } satisfies ProjectSchema +} + +export function makeProjectWithDetails(overrides: Partial = {}) { + return { + id: 'p1', + slug: 'project-1', + name: 'Project 1', + description: 'Test project', + owner: { id: 'o1', email: 'owner@example.com', firstName: 'Owner', lastName: 'User', adminRoleIds: [] }, + plugins: [], + roles: [], + members: [], + repositories: [], + clusters: [], + ...overrides, + } satisfies ProjectWithDetails +} + +export function makePipelineTriggerToken(overrides: Partial = {}) { + return { + id: 1, + description: 'mirroring-from-external-repo', + created_at: faker.date.past().toISOString(), + last_used: null, + token: 'trigger-token', + updated_at: faker.date.past().toISOString(), + owner: null, + repoId: 1, + ...overrides, + } satisfies PipelineTriggerTokenSchema +} + +export function makeOffsetPagination(overrides: Partial = {}) { + return { + total: 1, + next: null, + current: 1, + previous: null, + perPage: 20, + totalPages: 1, + ...overrides, + } satisfies OffsetPagination +} + +export function makeAccessTokenSchema(overrides: Partial = {}) { + const isoDate = faker.date.past().toISOString() + return { + id: 1, + user_id: 1, + name: 'token', + expires_at: isoDate, + active: true, + created_at: isoDate, + revoked: false, + access_level: AccessLevel.DEVELOPER, + ...overrides, + } satisfies AccessTokenSchema +} + +export function makeAccessTokenExposedSchema(overrides: Partial = {}) { + return { + ...makeAccessTokenSchema(), + token: 'secret-token', + ...overrides, + } satisfies AccessTokenExposedSchema +} + +export function makeRepositoryFileExpandedSchema(overrides: Partial = {}) { + return { + file_name: 'file.txt', + file_path: 'file.txt', + size: 7, + encoding: 'base64', + content: 'content', + content_sha256: 'sha256', + ref: 'main', + blob_id: 'blob', + commit_id: 'commit', + last_commit_id: 'last-commit', + ...overrides, + } satisfies RepositoryFileExpandedSchema +} + +export function makeRepositoryTreeSchema(overrides: Partial = {}) { + return { + id: 'id', + name: 'file.txt', + type: 'blob', + path: 'file.txt', + mode: '100644', + ...overrides, + } satisfies RepositoryTreeSchema +} + +export function makeGitbeakerRequestError(params: { message?: string, status?: number, statusText?: string, description: string }) { + const request = new Request('https://gitlab.internal.example/api') + const response = new Response(null, { status: params.status ?? 404, statusText: params.statusText ?? 'Not Found' }) + return new GitbeakerRequestError(params.message ?? params.statusText ?? 'Error', { + cause: { + description: params.description, + request, + response, + }, + }) +} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab.constants.ts b/apps/server-nestjs/src/modules/gitlab/gitlab.constants.ts new file mode 100644 index 000000000..7a9763772 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab.constants.ts @@ -0,0 +1,10 @@ +export const INFRA_GROUP_NAME = 'Infra' +export const INFRA_GROUP_PATH = 'infra' +export const INFRA_APPS_REPO_NAME = 'infra-apps' +export const MIRROR_REPO_NAME = 'mirror' + +export const DEFAULT_ADMIN_GROUP_PATH = '/console/admin' +export const DEFAULT_AUDITOR_GROUP_PATH = '/console/readonly' +export const DEFAULT_PROJECT_MAINTAINER_GROUP_PATH_SUFFIX = '/console/admin' +export const DEFAULT_PROJECT_DEVELOPER_GROUP_PATH_SUFFIX = '/console/developer,/console/devops' +export const DEFAULT_PROJECT_REPORTER_GROUP_PATH_SUFFIX = '/console/readonly' diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab.module.ts b/apps/server-nestjs/src/modules/gitlab/gitlab.module.ts new file mode 100644 index 000000000..f3f9a3a00 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab.module.ts @@ -0,0 +1,15 @@ +import { Module } from '@nestjs/common' +import { GitlabService } from './gitlab.service' +import { GitlabControllerService } from './gitlab-controller.service' +import { GitlabDatastoreService } from './gitlab-datastore.service' +import { GitlabClientService } from './gitlab-client.service' +import { ConfigurationModule } from '@/cpin-module/infrastructure/configuration/configuration.module' +import { InfrastructureModule } from '@/cpin-module/infrastructure/infrastructure.module' +import { VaultModule } from '../vault/vault.module' + +@Module({ + imports: [ConfigurationModule, InfrastructureModule, VaultModule], + providers: [GitlabService, GitlabControllerService, GitlabDatastoreService, GitlabClientService], + exports: [GitlabService], +}) +export class GitlabModule {} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab.service.spec.ts b/apps/server-nestjs/src/modules/gitlab/gitlab.service.spec.ts new file mode 100644 index 000000000..acddb16d5 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab.service.spec.ts @@ -0,0 +1,538 @@ +import { Test } from '@nestjs/testing' +import type { TestingModule } from '@nestjs/testing' +import { GitlabService } from './gitlab.service' +import { GitlabClientService } from './gitlab-client.service' +import { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import type { MockedFunction } from 'vitest' +import { describe, it, expect, beforeEach } from 'vitest' +import { mockDeep, mockReset } from 'vitest-mock-extended' +import type { ExpandedGroupSchema, MemberSchema, ProjectSchema, RepositoryFileExpandedSchema } from '@gitbeaker/core' +import { GitbeakerRequestError } from '@gitbeaker/requester-utils' +import { makeExpandedUserSchema } from './gitlab-testing.utils' + +const gitlabMock = mockDeep() + +function createGitlabServiceTestingModule() { + return Test.createTestingModule({ + providers: [ + GitlabService, + { + provide: GitlabClientService, + useValue: gitlabMock, + }, + { + provide: ConfigurationService, + useValue: { + gitlabUrl: 'https://gitlab.internal', + gitlabToken: 'token', + gitlabInternalUrl: 'https://gitlab.internal', + projectRootPath: 'forge', + } satisfies Partial, + }, + ], + }) +} + +describe('gitlab', () => { + let service: GitlabService + + beforeEach(async () => { + mockReset(gitlabMock) + const module: TestingModule = await createGitlabServiceTestingModule().compile() + service = module.get(GitlabService) + }) + + it('should be defined', () => { + expect(service).toBeDefined() + }) + + describe('getOrCreateInfraProject', () => { + it('should create infra project if not exists', async () => { + const zoneSlug = 'zone-1' + const rootId = 123 + const infraGroupId = 456 + const projectId = 789 + + // Mock getGroupRootId logic + const gitlabGroupsAllMock = gitlabMock.Groups.all as MockedFunction + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: rootId, full_path: 'forge' }], + paginationInfo: { next: null }, + }) + + // Mock Groups.show (root) + gitlabMock.Groups.show.mockResolvedValueOnce({ id: rootId, full_path: 'forge' } as ExpandedGroupSchema) + + // Mock find infra group (not found first) + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [], + paginationInfo: { next: null }, + }) + + // Mock create infra group + gitlabMock.Groups.create.mockResolvedValue({ id: infraGroupId, full_path: 'forge/infra' } as ExpandedGroupSchema) + + // Mock find project (not found) + const gitlabProjectsAllMock = gitlabMock.Projects.all as MockedFunction + gitlabProjectsAllMock.mockResolvedValueOnce({ + data: [], + paginationInfo: { next: null }, + }) + + // Mock create project + gitlabMock.Projects.create.mockResolvedValue({ + id: projectId, + path_with_namespace: 'forge/infra/zone-1', + http_url_to_repo: 'https://gitlab.internal/infra/zone-1.git', + } as ProjectSchema) + + const result = await service.getOrCreateInfraGroupRepo(zoneSlug) + + expect(result).toEqual({ + id: projectId, + http_url_to_repo: 'https://gitlab.internal/infra/zone-1.git', + path_with_namespace: 'forge/infra/zone-1', + }) + expect(gitlabMock.Groups.create).toHaveBeenCalledWith('infra', 'infra', expect.any(Object)) + expect(gitlabMock.Projects.create).toHaveBeenCalledWith(expect.objectContaining({ + name: zoneSlug, + path: zoneSlug, + namespaceId: infraGroupId, + })) + }) + }) + + describe('commitCreateOrUpdate', () => { + it('should create commit if file not exists', async () => { + const repoId = 1 + const content = 'content' + const filePath = 'file.txt' + + const gitlabRepositoryFilesShowMock = gitlabMock.RepositoryFiles.show as MockedFunction + const notFoundError = new GitbeakerRequestError('Not Found', { cause: { description: '404 File Not Found' } } as any) + gitlabRepositoryFilesShowMock.mockRejectedValue(notFoundError) + + await service.maybeCommitUpdate(repoId, [{ content, filePath }]) + + expect(gitlabMock.Commits.create).toHaveBeenCalledWith( + repoId, + 'main', + expect.any(String), + [{ action: 'create', filePath, content }], + ) + }) + + it('should update commit if content differs', async () => { + const repoId = 1 + const content = 'new content' + const filePath = 'file.txt' + const oldHash = 'oldhash' + + const gitlabRepositoryFilesShowMock = gitlabMock.RepositoryFiles.show as MockedFunction + gitlabRepositoryFilesShowMock.mockResolvedValue({ + content_sha256: oldHash, + } as RepositoryFileExpandedSchema) + + await service.maybeCommitUpdate(repoId, [{ content, filePath }]) + + expect(gitlabMock.Commits.create).toHaveBeenCalledWith( + repoId, + 'main', + expect.any(String), + [{ action: 'update', filePath, content }], + ) + }) + + it('should do nothing if content matches', async () => { + const repoId = 1 + const content = 'content' + const filePath = 'file.txt' + const hash = 'ed7002b439e9ac845f22357d822bac1444730fbdb6016d3ec9432297b9ec9f73' // sha256 of 'content' + + const gitlabRepositoryFilesShowMock = gitlabMock.RepositoryFiles.show as MockedFunction + gitlabRepositoryFilesShowMock.mockResolvedValue({ + content_sha256: hash, + } as RepositoryFileExpandedSchema) + + await service.maybeCommitUpdate(repoId, [{ content, filePath }]) + + expect(gitlabMock.Commits.create).not.toHaveBeenCalled() + }) + }) + + describe('getOrCreateProjectGroup', () => { + it('should create project group if not exists', async () => { + const projectSlug = 'project-1' + const rootId = 123 + const groupId = 456 + + const gitlabGroupsAllMock = gitlabMock.Groups.all as MockedFunction + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: rootId, full_path: 'forge' }], + paginationInfo: { next: null }, + }) + gitlabMock.Groups.show.mockResolvedValueOnce({ id: rootId, full_path: 'forge' } as ExpandedGroupSchema) + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [], + paginationInfo: { next: null }, + }) + gitlabMock.Groups.create.mockResolvedValue({ id: groupId, name: projectSlug } as ExpandedGroupSchema) + + const result = await service.getOrCreateProjectSubGroup(projectSlug) + + expect(result).toEqual({ id: groupId, name: projectSlug }) + expect(gitlabMock.Groups.create).toHaveBeenCalledWith(projectSlug, projectSlug, expect.objectContaining({ + parentId: rootId, + })) + }) + + it('should return existing group', async () => { + const projectSlug = 'project-1' + const rootId = 123 + const groupId = 456 + + const gitlabGroupsAllMock = gitlabMock.Groups.all as MockedFunction + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: rootId, full_path: 'forge' }], + paginationInfo: { next: null }, + }) + gitlabMock.Groups.show.mockResolvedValueOnce({ id: rootId, full_path: 'forge' } as ExpandedGroupSchema) + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: groupId, name: projectSlug, parent_id: rootId, full_path: 'forge/project-1' }], + paginationInfo: { next: null }, + }) + + const result = await service.getOrCreateProjectSubGroup(projectSlug) + + expect(result).toEqual({ id: groupId, name: projectSlug, parent_id: rootId, full_path: 'forge/project-1' }) + expect(gitlabMock.Groups.create).not.toHaveBeenCalled() + }) + }) + + describe('repositories', () => { + it('should return internal repo url', async () => { + const projectSlug = 'project-1' + const repoName = 'repo-1' + const rootId = 123 + const groupId = 1 + + const gitlabGroupsAllMock = gitlabMock.Groups.all as MockedFunction + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: rootId, full_path: 'forge' }], + paginationInfo: { next: null }, + }) + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: groupId, full_path: 'forge/project-1' }], + paginationInfo: { next: null }, + }) + + const result = await service.getProjectGroupInternalRepoUrl(projectSlug, repoName) + expect(result).toBe('https://gitlab.internal/forge/project-1/repo-1.git') + }) + + it('should upsert mirror repo', async () => { + const projectSlug = 'project-1' + const repoId = 1 + + const gitlabProjectsAllMock = gitlabMock.Projects.all as MockedFunction + gitlabProjectsAllMock.mockResolvedValue({ + data: [{ id: repoId, path_with_namespace: 'forge/project-1/mirror' }], + paginationInfo: { next: null }, + }) + + gitlabMock.Projects.edit.mockResolvedValue({ id: repoId, name: 'mirror' } as ProjectSchema) + + const result = await service.upsertProjectMirrorRepo(projectSlug) + + expect(result).toEqual({ id: repoId, name: 'mirror' }) + expect(gitlabMock.Projects.edit).toHaveBeenCalledWith(repoId, expect.objectContaining({ + name: 'mirror', + path: 'mirror', + })) + }) + + it('should create pipeline trigger token if not exists', async () => { + const projectSlug = 'project-1' + const repoId = 1 + const tokenDescription = 'mirroring-from-external-repo' + + const gitlabProjectsAllMock = gitlabMock.Projects.all as MockedFunction + gitlabProjectsAllMock.mockResolvedValue({ + data: [{ id: repoId, path_with_namespace: 'forge/project-1/mirror' }], + paginationInfo: { next: null }, + }) + gitlabMock.Projects.edit.mockResolvedValue({ id: repoId, name: 'mirror' } as ProjectSchema) + + const gitlabPipelineTriggerTokensAllMock = gitlabMock.PipelineTriggerTokens.all as MockedFunction + gitlabPipelineTriggerTokensAllMock.mockResolvedValue({ + data: [], + paginationInfo: { next: null } as any, + }) + + gitlabMock.PipelineTriggerTokens.create.mockResolvedValue({ id: 2, description: tokenDescription } as any) + + const result = await service.getOrCreateMirrorPipelineTriggerToken(projectSlug) + + expect(result).toEqual({ id: 2, description: tokenDescription }) + expect(gitlabMock.PipelineTriggerTokens.create).toHaveBeenCalledWith(repoId, tokenDescription) + }) + }) + + describe('group Members', () => { + it('should get group members', async () => { + const groupId = 1 + const members = [{ id: 1, name: 'user' } as MemberSchema] + const gitlabGroupMembersAllMock = gitlabMock.GroupMembers.all as MockedFunction + gitlabGroupMembersAllMock.mockResolvedValue(members) + + const result = await service.getGroupMembers(groupId) + expect(result).toEqual(members) + expect(gitlabMock.GroupMembers.all).toHaveBeenCalledWith(groupId) + }) + + it('should add group member', async () => { + const groupId = 1 + const userId = 2 + const accessLevel = 30 + gitlabMock.GroupMembers.add.mockResolvedValue({ id: userId } as MemberSchema) + + await service.addGroupMember(groupId, userId, accessLevel) + expect(gitlabMock.GroupMembers.add).toHaveBeenCalledWith(groupId, userId, accessLevel) + }) + + it('should remove group member', async () => { + const groupId = 1 + const userId = 2 + gitlabMock.GroupMembers.remove.mockResolvedValue(undefined) + + await service.removeGroupMember(groupId, userId) + expect(gitlabMock.GroupMembers.remove).toHaveBeenCalledWith(groupId, userId) + }) + }) + + describe('createProjectMirrorAccessToken', () => { + it('should create project access token with correct scopes', async () => { + const projectSlug = 'project-1' + const groupId = 456 + const tokenName = `${projectSlug}-bot` + const token = { id: 1, name: tokenName, token: 'secret-token' } + + // Mock getProjectGroup + const gitlabGroupsAllMock = gitlabMock.Groups.all as MockedFunction + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: 123, full_path: 'forge' }], + paginationInfo: { next: null }, + }) // root + gitlabMock.Groups.show.mockResolvedValueOnce({ id: 123, full_path: 'forge' } as ExpandedGroupSchema) + + const gitlabGroupsAllSubgroupsMock = gitlabMock.Groups.allSubgroups as MockedFunction + gitlabGroupsAllSubgroupsMock.mockResolvedValueOnce({ + data: [{ id: groupId, name: projectSlug, parent_id: 123, full_path: 'forge/project-1' }], + paginationInfo: { next: null }, + }) + + gitlabMock.GroupAccessTokens.create.mockResolvedValue(token as any) + + const result = await service.createMirrorAccessToken(projectSlug) + + expect(result).toEqual(token) + expect(gitlabMock.GroupAccessTokens.create).toHaveBeenCalledWith( + groupId, + tokenName, + ['write_repository', 'read_repository', 'read_api'], + expect.any(String), + ) + }) + }) + + describe('getOrCreateProjectGroupRepo', () => { + it('should return existing repo', async () => { + const subGroupPath = 'project-1' + const repoName = 'repo-1' + const fullPath = `${subGroupPath}/${repoName}` + const projectId = 789 + + const gitlabProjectsAllMock = gitlabMock.Projects.all as MockedFunction + gitlabProjectsAllMock.mockResolvedValueOnce({ + data: [{ id: projectId, path_with_namespace: `forge/${fullPath}` }], + paginationInfo: { next: null }, + }) + + const result = await service.getOrCreateProjectGroupRepo(fullPath) + + expect(result).toEqual(expect.objectContaining({ id: projectId })) + }) + + it('should create repo if not exists', async () => { + const subGroupPath = 'project-1' + const repoName = 'repo-1' + const fullPath = `${subGroupPath}/${repoName}` + const projectId = 789 + const groupId = 456 + const rootId = 123 + + // Mock repo search (not found) + const gitlabProjectsAllMock = gitlabMock.Projects.all as MockedFunction + gitlabProjectsAllMock.mockResolvedValueOnce({ + data: [], + paginationInfo: { next: null }, + }) + + // Mock parent group retrieval (recursive) + const gitlabGroupsAllMock = gitlabMock.Groups.all as MockedFunction + + // 1. Find root 'forge' + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: rootId, full_path: 'forge' }], + paginationInfo: { next: null }, + }) + + // 2. Find subgroup 'forge/project-1' + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: groupId, name: subGroupPath, parent_id: rootId, full_path: `forge/${subGroupPath}` }], + paginationInfo: { next: null }, + }) + + // Mock repo creation + gitlabMock.Projects.create.mockResolvedValue({ id: projectId, name: repoName } as ProjectSchema) + + const result = await service.getOrCreateProjectGroupRepo(fullPath) + + expect(result).toEqual(expect.objectContaining({ id: projectId })) + expect(gitlabMock.Projects.create).toHaveBeenCalledWith(expect.objectContaining({ + name: repoName, + path: repoName, + namespaceId: groupId, + })) + }) + }) + + describe('getFile', () => { + it('should return file content', async () => { + const repoId = 1 + const filePath = 'file.txt' + const ref = 'main' + const file = { content: 'content' } + + gitlabMock.RepositoryFiles.show.mockResolvedValue(file as any) + + const result = await service.getFile(repoId, filePath, ref) + expect(result).toEqual(file) + }) + + it('should return undefined on 404', async () => { + const repoId = 1 + const filePath = 'file.txt' + const ref = 'main' + const error = new GitbeakerRequestError('Not Found', { cause: { description: '404 File Not Found' } } as any) + + gitlabMock.RepositoryFiles.show.mockRejectedValue(error) + + const result = await service.getFile(repoId, filePath, ref) + expect(result).toBeUndefined() + }) + + it('should throw on other errors', async () => { + const repoId = 1 + const filePath = 'file.txt' + const ref = 'main' + const error = new Error('Some other error') + + gitlabMock.RepositoryFiles.show.mockRejectedValue(error) + + await expect(service.getFile(repoId, filePath, ref)).rejects.toThrow(error) + }) + }) + + describe('listFiles', () => { + it('should return files', async () => { + const repoId = 1 + const files = [{ path: 'file.txt' }] + + gitlabMock.Repositories.allRepositoryTrees.mockResolvedValue(files as any) + + const result = await service.listFiles(repoId) + expect(result).toEqual(files) + }) + + it('should return empty array on 404', async () => { + const repoId = 1 + const error = new GitbeakerRequestError('Not Found', { cause: { description: '404 Tree Not Found' } } as any) + + gitlabMock.Repositories.allRepositoryTrees.mockRejectedValue(error) + + const result = await service.listFiles(repoId) + expect(result).toEqual([]) + }) + }) + + describe('getProjectToken', () => { + it('should return specific token', async () => { + const projectSlug = 'project-1' + const groupId = 456 + const tokenName = `${projectSlug}-bot` + const token = { id: 1, name: tokenName } + + // Mock getProjectGroup + const gitlabGroupsAllMock = gitlabMock.Groups.all as MockedFunction + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: 123, full_path: 'forge' }], + paginationInfo: { next: null }, + }) + gitlabMock.Groups.show.mockResolvedValueOnce({ id: 123, full_path: 'forge' } as ExpandedGroupSchema) + const gitlabGroupsAllSubgroupsMock = gitlabMock.Groups.allSubgroups as MockedFunction + gitlabGroupsAllSubgroupsMock.mockResolvedValueOnce({ + data: [{ id: groupId, name: projectSlug, parent_id: 123, full_path: `forge/${projectSlug}` }], + paginationInfo: { next: null }, + }) + + const gitlabGroupAccessTokensAllMock = gitlabMock.GroupAccessTokens.all as MockedFunction + gitlabGroupAccessTokensAllMock.mockResolvedValue({ + data: [token] as any, + paginationInfo: { next: null } as any, + }) + + const result = await service.getProjectToken(projectSlug) + expect(result).toEqual(token) + }) + }) + + describe('createUser', () => { + it('should create user', async () => { + const email = 'user@example.com' + const username = 'user' + const name = 'User Name' + const user = makeExpandedUserSchema({ id: 1, username }) + + gitlabMock.Users.create.mockResolvedValue(user) + + const result = await service.createUser(email, username, name) + + expect(result).toEqual(user) + expect(gitlabMock.Users.create).toHaveBeenCalledWith(expect.objectContaining({ + email, + username, + name, + skipConfirmation: true, + })) + }) + }) + + describe('commitMirror', () => { + it('should create mirror commit', async () => { + const repoId = 1 + + await service.commitMirror(repoId) + + expect(gitlabMock.Commits.create).toHaveBeenCalledWith( + repoId, + 'main', + expect.any(String), + expect.arrayContaining([ + expect.objectContaining({ filePath: '.gitlab-ci.yml', action: 'create' }), + expect.objectContaining({ filePath: 'mirror.sh', action: 'create' }), + ]), + ) + }) + }) +}) diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab.service.ts b/apps/server-nestjs/src/modules/gitlab/gitlab.service.ts new file mode 100644 index 000000000..b27017822 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab.service.ts @@ -0,0 +1,314 @@ +import { Inject, Injectable, Logger } from '@nestjs/common' +import type { AccessTokenScopes, CommitAction, GroupSchema, PipelineTriggerTokenSchema } from '@gitbeaker/core' +import { GitbeakerRequestError } from '@gitbeaker/requester-utils' +import { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import { readGitlabCIConfigContent, readMirrorScriptContent, find, offsetPaginate, hasFileContentChanged } from './gitlab.utils' +import { GitlabClientService } from './gitlab-client.service' +import { INFRA_GROUP_PATH, MIRROR_REPO_NAME } from './gitlab.constants' +import { join } from 'node:path' + +@Injectable() +export class GitlabService { + private readonly logger = new Logger(GitlabService.name) + + constructor( + @Inject(GitlabClientService) private readonly client: GitlabClientService, + @Inject(ConfigurationService) private readonly config: ConfigurationService, + ) { + } + + async getGroupByPath(path: string) { + return find( + offsetPaginate(opts => this.client.Groups.all({ search: path, orderBy: 'path', ...opts })), + g => g.full_path === path, + ) + } + + async createGroup(path: string) { + return this.client.Groups.create(path, path) + } + + async createSubGroup(parentGroup: GroupSchema, name: string) { + return this.client.Groups.create(name, name, { parentId: parentGroup.id }) + } + + async getOrCreateGroup(path: string) { + const parts = path.split('/') + const rootGroupPath = parts.shift() + if (!rootGroupPath) throw new Error('Invalid projects root dir') + + // Find or create root + let parentGroup = await this.getGroupByPath(rootGroupPath) ?? await this.createGroup(rootGroupPath) + + // Recursively create subgroups + for (const part of parts) { + const fullPath = `${parentGroup.full_path}/${part}` + parentGroup = await this.getGroupByPath(fullPath) ?? await this.createSubGroup(parentGroup, part) + } + + return parentGroup + } + + async getOrCreateProjectGroup() { + if (!this.config.projectRootPath) throw new Error('projectRootPath not configured') + return this.getOrCreateGroup(this.config.projectRootPath) + } + + async getOrCreateProjectSubGroup(subGroupPath: string) { + if (!this.config.projectRootPath) throw new Error('projectRootPath not configured') + return this.getOrCreateGroup(`${this.config.projectRootPath}/${subGroupPath}`) + } + + async getProjectGroupPublicUrl(): Promise { + const projectGroup = await this.getOrCreateProjectGroup() + return `${this.config.gitlabUrl}/${projectGroup.full_path}` + } + + async getInfraGroupRepoPublicUrl(repoName: string): Promise { + const projectGroup = await this.getOrCreateProjectGroup() + return `${this.config.gitlabUrl}/${projectGroup.full_path}/${INFRA_GROUP_PATH}/${repoName}.git` + } + + async getProjectGroupInternalRepoUrl(subGroupPath: string, repoName: string): Promise { + const projectGroup = await this.getOrCreateProjectSubGroup(subGroupPath) + return `${this.config.gitlabInternalUrl}/${projectGroup.full_path}/${repoName}.git` + } + + async getOrCreateProjectGroupRepo(subGroupPath: string) { + if (!this.config.projectRootPath) throw new Error('projectRootPath not configured') + const fullPath = `${this.config.projectRootPath}/${subGroupPath}` + try { + const existingRepo = await this.client.Projects.show(fullPath) + if (existingRepo) return existingRepo + } catch (error) { + if (!(error instanceof GitbeakerRequestError) || !error.cause?.description?.includes('404')) { + throw error + } + } + const repo = await find( + offsetPaginate(opts => this.client.Projects.all({ + search: fullPath, + orderBy: 'path', + ...opts, + })), + p => p.path_with_namespace === fullPath, + ) + if (repo) return repo + const parts = subGroupPath.split('/') + const repoName = parts.pop() + if (!repoName) throw new Error('Invalid repo path') + const parentGroup = await this.getOrCreateProjectSubGroup(parts.join('/')) + try { + return await this.client.Projects.create({ + name: repoName, + path: repoName, + namespaceId: parentGroup.id, + }) + } catch (error) { + if (error instanceof GitbeakerRequestError && error.cause?.description?.includes('has already been taken')) { + return this.client.Projects.show(fullPath) + } + throw error + } + } + + async getOrCreateInfraGroupRepo(path: string) { + return this.getOrCreateProjectGroupRepo(join(INFRA_GROUP_PATH, path)) + } + + async getFile(repoId: number, filePath: string, ref: string = 'main') { + try { + return await this.client.RepositoryFiles.show(repoId, filePath, ref) + } catch (error) { + if (error instanceof GitbeakerRequestError && error.cause?.description?.includes('Not Found')) { + this.logger.debug(`File not found: ${filePath}`) + } else { + throw error + } + } + } + + async maybeCommitUpdate( + repoId: number, + files: { content: string, filePath: string }[], + message: string = 'ci: :robot_face: Update file content', + ref: string = 'main', + ): Promise { + const promises = await Promise.all(files.map(async ({ content, filePath }) => + this.generateCreateOrUpdateAction(repoId, ref, filePath, content), + )) + const actions = promises.filter(action => !!action) + if (actions.length === 0) { + this.logger.debug('No files to update') + return + } + await this.client.Commits.create(repoId, ref, message, actions) + } + + async generateCreateOrUpdateAction(repoId: number, ref, filePath, content: string) { + const file = await this.getFile(repoId, filePath, ref) + if (file && !hasFileContentChanged(file, content)) { + this.logger.debug(`File content is up to date, no need to commit: ${filePath}`) + return null + } + return { + action: file ? 'update' : 'create', + filePath, + content, + } satisfies CommitAction + } + + async maybeCommitDelete(repoId: number, paths: string[], ref: string = 'main'): Promise { + const actions = paths.map(path => ({ + action: 'delete', + filePath: path, + } satisfies CommitAction)) + if (actions.length === 0) { + this.logger.debug('No files to delete') + return + } + await this.client.Commits.create(repoId, ref, 'ci: :robot_face: Delete files', actions) + } + + async listFiles(repoId: number, options: { path?: string, recursive?: boolean, ref?: string } = {}) { + try { + return await this.client.Repositories.allRepositoryTrees(repoId, { + path: options.path ?? '/', + recursive: options.recursive ?? false, + ref: options.ref ?? 'main', + }) + } catch (error) { + if (error instanceof GitbeakerRequestError && error.cause?.description?.includes('Not Found')) { + return [] + } + if (error instanceof GitbeakerRequestError && error.cause?.description?.includes('404 Tree Not Found')) { + return [] + } + throw error + } + } + + async getProjectGroup(projectSlug: string): Promise { + const parentGroup = await this.getOrCreateProjectGroup() + return find( + offsetPaginate(opts => this.client.Groups.allSubgroups(parentGroup.id, opts)), + g => g.name === projectSlug, + ) + } + + async deleteGroup(groupId: number): Promise { + await this.client.Groups.remove(groupId) + } + + // --- Members --- + + async getGroupMembers(groupId: number) { + return this.client.GroupMembers.all(groupId) + } + + async addGroupMember(groupId: number, userId: number, accessLevel: number) { + return this.client.GroupMembers.add(groupId, userId, accessLevel) + } + + async editGroupMember(groupId: number, userId: number, accessLevel: number) { + return this.client.GroupMembers.edit(groupId, userId, accessLevel) + } + + async removeGroupMember(groupId: number, userId: number) { + return this.client.GroupMembers.remove(groupId, userId) + } + + async getUserByEmail(email: string) { + const users = await this.client.Users.all({ search: email, orderBy: 'username' }) + if (users.length === 0) return null + return users[0] + } + + async createUser(email: string, username: string, name: string) { + // Note: This requires admin token + return this.client.Users.create({ + email, + username, + name, + skipConfirmation: true, + }) + } + + async* getRepos(projectSlug: string) { + const group = await this.getOrCreateProjectSubGroup(projectSlug) + const repos = offsetPaginate(opts => this.client.Groups.allProjects(group.id, { simple: false, ...opts })) + for await (const repo of repos) { + yield repo + } + } + + async upsertProjectGroupRepo(projectSlug: string, repoName: string, description?: string) { + const repo = await this.getOrCreateProjectGroupRepo(`${projectSlug}/${repoName}`) + return this.client.Projects.edit(repo.id, { + name: repoName, + path: repoName, + description, + }) + } + + async commitMirror(repoId: number) { + const actions: CommitAction[] = [ + { + action: 'create', + filePath: '.gitlab-ci.yml', + content: await readGitlabCIConfigContent(), + execute_filemode: false, + }, + { + action: 'create', + filePath: 'mirror.sh', + content: await readMirrorScriptContent(), + execute_filemode: true, + }, + ] + + await this.client.Commits.create( + repoId, + 'main', + 'ci: :construction_worker: first mirror', + actions, + ) + } + + async upsertProjectMirrorRepo(projectSlug: string) { + return this.upsertProjectGroupRepo(projectSlug, MIRROR_REPO_NAME) + } + + async getProjectToken(projectSlug: string) { + const group = await this.getProjectGroup(projectSlug) + if (!group) throw new Error('Unable to retrieve gitlab project group') + return find( + offsetPaginate(opts => this.client.GroupAccessTokens.all(group.id, opts)), + token => token.name === `${projectSlug}-bot`, + ) + } + + async createProjectToken(projectSlug: string, tokenName: string, scopes: AccessTokenScopes[]) { + const group = await this.getProjectGroup(projectSlug) + if (!group) throw new Error('Unable to retrieve gitlab project group') + const expirationDays = Number(this.config.gitlabMirrorTokenExpirationDays) + const effectiveExpirationDays = Number.isFinite(expirationDays) && expirationDays > 0 ? expirationDays : 30 + const expiryDate = new Date(Date.now() + effectiveExpirationDays * 24 * 60 * 60 * 1000) + return this.client.GroupAccessTokens.create(group.id, tokenName, scopes, expiryDate.toISOString().slice(0, 10)) + } + + async createMirrorAccessToken(projectSlug: string) { + const tokenName = `${projectSlug}-bot` + return this.createProjectToken(projectSlug, tokenName, ['write_repository', 'read_repository', 'read_api']) + } + + async getOrCreateMirrorPipelineTriggerToken(projectSlug: string): Promise { + const tokenDescription = 'mirroring-from-external-repo' + const mirrorRepo = await this.upsertProjectMirrorRepo(projectSlug) + const currentTriggerToken = await find( + offsetPaginate(opts => this.client.PipelineTriggerTokens.all(mirrorRepo.id, opts)), + token => token.description === tokenDescription, + ) + return currentTriggerToken ?? await this.client.PipelineTriggerTokens.create(mirrorRepo.id, tokenDescription) + } +} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab.utils.ts b/apps/server-nestjs/src/modules/gitlab/gitlab.utils.ts new file mode 100644 index 000000000..4174b6153 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab.utils.ts @@ -0,0 +1,144 @@ +import { AccessLevel } from '@gitbeaker/core' +import type { PaginationRequestOptions, BaseRequestOptions, OffsetPagination, RepositoryFileExpandedSchema } from '@gitbeaker/core' +import { createHash } from 'node:crypto' +import { readFile } from 'node:fs/promises' +import { join } from 'node:path' +import type { ProjectWithDetails } from './gitlab-datastore.service.js' +import { DEFAULT_PROJECT_DEVELOPER_GROUP_PATH_SUFFIX, DEFAULT_PROJECT_MAINTAINER_GROUP_PATH_SUFFIX, DEFAULT_PROJECT_REPORTER_GROUP_PATH_SUFFIX } from './gitlab.constants.js' + +export function generateUsername(email: string) { + return email.replace('@', '.') +} + +export function getPluginConfig(project: ProjectWithDetails, key: string) { + return project.plugins?.find(p => p.key === key)?.value +} + +export function getGroupPathSuffixes(project: ProjectWithDetails, key: string) { + const value = getPluginConfig(project, key) + if (!value) return null + return value.split(',').map(path => `/${project.slug}${path}`) +} + +export function getProjectMaintainerGroupPaths(project: ProjectWithDetails) { + return getGroupPathSuffixes(project, 'projectMaintainerGroupPathSuffix') ?? DEFAULT_PROJECT_MAINTAINER_GROUP_PATH_SUFFIX.split(',') +} + +export function getProjectDeveloperGroupPaths(project: ProjectWithDetails) { + return getGroupPathSuffixes(project, 'projectDeveloperGroupPathSuffix') ?? DEFAULT_PROJECT_DEVELOPER_GROUP_PATH_SUFFIX.split(',') +} + +export function getProjectReporterGroupPaths(project: ProjectWithDetails) { + return getGroupPathSuffixes(project, 'projectReporterGroupPathSuffix') ?? DEFAULT_PROJECT_REPORTER_GROUP_PATH_SUFFIX.split(',') +} + +export function getGroupAccessLevelFromProjectRole(project: ProjectWithDetails, user: ProjectWithDetails['members'][number]['user']) { + const projectReporterGroupPathSuffixes = getProjectReporterGroupPaths(project) + const projectDeveloperGroupPathSuffixes = getProjectDeveloperGroupPaths(project) + const projectMaintainerGroupPathSuffixes = getProjectMaintainerGroupPaths(project) + + const getAccessLevel = (role: { oidcGroup: string | null }): number | null => { + if (!role.oidcGroup) return null + if (projectReporterGroupPathSuffixes.includes(role.oidcGroup)) return AccessLevel.REPORTER + if (projectDeveloperGroupPathSuffixes.includes(role.oidcGroup)) return AccessLevel.DEVELOPER + if (projectMaintainerGroupPathSuffixes.includes(role.oidcGroup)) return AccessLevel.MAINTAINER + return null + } + + const membership = project.members.find(member => member.user.id === user.id) + if (!membership) return null + + const rolesById = new Map(project.roles.map(role => [role.id, role])) + + const highestMappedAccessLevel = membership.roleIds.reduce((highestAccessLevel, roleId) => { + const role = rolesById.get(roleId) + if (!role) return highestAccessLevel + const level = getAccessLevel(role) + if (level && level > (highestAccessLevel ?? 0)) return level + return highestAccessLevel + }, null) + + return highestMappedAccessLevel +} + +export function getGroupAccessLevel(project: ProjectWithDetails, user: ProjectWithDetails['members'][number]['user']): number | null { + if (project.owner.id === user.id) return AccessLevel.OWNER + return getGroupAccessLevelFromProjectRole(project, user) +} + +export function generateAccessLevelMapping(project: ProjectWithDetails) { + const projectReporterGroupPathSuffixes = getProjectReporterGroupPaths(project) + const projectDeveloperGroupPathSuffixes = getProjectDeveloperGroupPaths(project) + const projectMaintainerGroupPathSuffixes = getProjectMaintainerGroupPaths(project) + + const getAccessLevelFromOidcGroup = (oidcGroup: string | null): number | null => { + if (!oidcGroup) return null + if (projectReporterGroupPathSuffixes.includes(oidcGroup)) return AccessLevel.REPORTER + if (projectDeveloperGroupPathSuffixes.includes(oidcGroup)) return AccessLevel.DEVELOPER + if (projectMaintainerGroupPathSuffixes.includes(oidcGroup)) return AccessLevel.MAINTAINER + return null + } + + const roleAccessLevelById = new Map( + project.roles.map(role => [role.id, getAccessLevelFromOidcGroup(role.oidcGroup)] as const), + ) + + return new Map(project.members.map((membership) => { + let highest = AccessLevel.GUEST + for (const roleId of membership.roleIds) { + const level = roleAccessLevelById.get(roleId) + if (level && level > highest) highest = level + } + return [membership.user.id, highest] as const + })) +} + +export function digestContent(content: string) { + return createHash('sha256').update(content).digest('hex') +} + +export function hasFileContentChanged(file: RepositoryFileExpandedSchema, content: string) { + return file?.content_sha256 !== digestContent(content) +} + +export function readGitlabCIConfigContent() { + return readFile(join(__dirname, './files/.gitlab-ci.yml'), 'utf-8') +} + +export async function readMirrorScriptContent() { + return await readFile(join(__dirname, './files/mirror.sh'), 'utf-8') +} + +export async function getAll( + iterable: AsyncIterable, +): Promise { + const items: T[] = [] + for await (const item of iterable) { + items.push(item) + } + return items +} + +export async function find(generator: AsyncGenerator, predicate: (item: T) => boolean): Promise { + for await (const item of generator) { + if (predicate(item)) return item + } + return undefined +} + +export async function* offsetPaginate( + request: (options: PaginationRequestOptions<'offset'> & BaseRequestOptions) => Promise<{ data: T[], paginationInfo: OffsetPagination }>, +): AsyncGenerator { + let page: number | null = 1 + while (page !== null) { + const { data, paginationInfo } = await request({ page, showExpanded: true, pagination: 'offset' }) + for (const item of data) { + yield item + } + page = paginationInfo.next ? paginationInfo.next : null + } +} + +export function daysAgoFromNow(date: Date) { + return Math.floor((Date.now() - date.getTime()) / (1000 * 60 * 60 * 24)) +} diff --git a/apps/server-nestjs/src/modules/vault/vault-client.service.ts b/apps/server-nestjs/src/modules/vault/vault-client.service.ts new file mode 100644 index 000000000..e9e767041 --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault-client.service.ts @@ -0,0 +1,305 @@ +import { Inject, Injectable, Logger } from '@nestjs/common' +import { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import { trace } from '@opentelemetry/api' +import z from 'zod' + +interface VaultAuthMethod { + accessor: string + type: string + description?: string +} + +interface VaultSysAuthResponse { + data: Record +} + +interface VaultIdentityGroupResponse { + data: { + id: string + name: string + alias?: { + id?: string + name?: string + } + } +} + +export interface VaultMetadata { + created_time: string + custom_metadata: Record | null + deletion_time: string + destroyed: boolean + version: number +} + +export interface VaultSecret { + data: T + metadata: VaultMetadata +} + +export interface VaultResponse { + data: VaultSecret +} + +export type VaultErrorKind + = | 'NotConfigured' + | 'NotFound' + | 'HttpError' + | 'InvalidResponse' + | 'ParseError' + | 'Unexpected' + +export class VaultError extends Error { + readonly kind: VaultErrorKind + readonly status?: number + readonly method?: string + readonly path?: string + readonly statusText?: string + readonly reasons?: string[] + + constructor( + kind: VaultErrorKind, + message: string, + details: { status?: number, method?: string, path?: string, statusText?: string, reasons?: string[] } = {}, + ) { + super(message) + this.name = 'VaultError' + this.kind = kind + this.status = details.status + this.method = details.method + this.path = details.path + this.statusText = details.statusText + this.reasons = details.reasons + } +} + +const tracer = trace.getTracer('vault-client-service') + +interface VaultListResponse { + data: { + keys: string[] + } +} + +interface VaultRoleIdResponse { + data: { + role_id: string + } +} + +interface VaultSecretIdResponse { + data: { + secret_id: string + } +} + +@Injectable() +export class VaultClientService { + private readonly logger = new Logger(VaultClientService.name) + + constructor( + @Inject(ConfigurationService) private readonly config: ConfigurationService, + ) { + } + + private async fetch( + path: string, + options: { method?: string, body?: any } = {}, + ): Promise { + const method = options.method ?? 'GET' + return tracer.startActiveSpan('fetch', async (span) => { + try { + span.setAttribute('vault.method', method) + span.setAttribute('vault.path', path) + + const request = this.createRequest(path, method, options.body) + const response = await fetch(request).catch((error) => { + throw new VaultError( + 'Unexpected', + error instanceof Error ? error.message : String(error), + { method, path }, + ) + }) + span.setAttribute('vault.http.status', response.status) + + return await this.handleResponse(response, method, path) + } catch (error) { + if (error instanceof Error) span.recordException(error) + throw error + } finally { + span.end() + } + }) + } + + private createRequest(path: string, method: string, body?: any): Request { + if (!this.config.vaultInternalUrl) { + throw new VaultError('NotConfigured', 'VAULT_INTERNAL_URL is required') + } + if (!this.config.vaultToken) { + throw new VaultError('NotConfigured', 'VAULT_TOKEN is required') + } + + const url = new URL(path, this.config.vaultInternalUrl).toString() + const headers: Record = { + 'Content-Type': 'application/json', + 'X-Vault-Token': this.config.vaultToken, + } + + return new Request(url, { method, headers, body: body ? JSON.stringify(body) : undefined }) + } + + private async handleResponse(response: Response, method: string, path: string): Promise { + if (response.status === 204) return null + + if (!response.ok) { + await this.throwForStatus(response, method, path) + } + + return await response.json() + } + + private async throwForStatus(response: Response, method: string, path: string): Promise { + const responseBody = await response.json() + const vaultErrorBody = z.object({ errors: z.array(z.string()) }).safeParse(responseBody) + const reasons = vaultErrorBody.success ? vaultErrorBody.data.errors : undefined + + if (response.status === 404) { + throw new VaultError('NotFound', 'Not Found', { + status: 404, + method, + path, + statusText: response.statusText, + reasons, + }) + } + + throw new VaultError('HttpError', 'Request failed', { + status: response.status, + method, + path, + statusText: response.statusText, + reasons, + }) + } + + async getKvData(kvName: string, path: string): Promise> { + if (path.startsWith('/')) path = path.slice(1) + const response = await this.fetch>(`/v1/${kvName}/data/${path}`, { method: 'GET' }) + if (!response?.data) { + throw new VaultError('InvalidResponse', 'Missing "data" field', { method: 'GET', path: `/v1/${kvName}/data/${path}` }) + } + return response.data + } + + async upsertKvData(kvName: string, path: string, body: { data: T }): Promise { + if (path.startsWith('/')) path = path.slice(1) + await this.fetch(`/v1/${kvName}/data/${path}`, { method: 'POST', body }) + } + + async destroy(path: string): Promise { + return await this.deleteKvMetadata(this.config.vaultKvName, path) + } + + async deleteKvMetadata(kvName: string, path: string): Promise { + if (path.startsWith('/')) path = path.slice(1) + try { + await this.fetch(`/v1/${kvName}/metadata/${path}`, { method: 'DELETE' }) + } catch (error) { + if (error instanceof VaultError && error.kind === 'NotFound') return + throw error + } + } + + async listKvMetadata(kvName: string, path: string): Promise { + try { + const response = await this.fetch(`/v1/${kvName}/metadata/${path}`, { method: 'LIST' }) + if (!response?.data?.keys) { + throw new VaultError('InvalidResponse', 'Missing "data.keys" field', { method: 'LIST', path: `/v1/${kvName}/metadata/${path}` }) + } + return response.data.keys + } catch (error) { + if (error instanceof VaultError && error.kind === 'NotFound') return [] + throw error + } + } + + async upsertSysPoliciesAcl(policyName: string, body: any): Promise { + await this.fetch(`/v1/sys/policies/acl/${policyName}`, { method: 'POST', body }) + } + + async deleteSysPoliciesAcl(policyName: string): Promise { + await this.fetch(`/v1/sys/policies/acl/${policyName}`, { method: 'DELETE' }) + } + + async createSysMount(name: string, body: any): Promise { + await this.fetch(`/v1/sys/mounts/${name}`, { method: 'POST', body }) + } + + async tuneSysMount(name: string, body: any): Promise { + await this.fetch(`/v1/sys/mounts/${name}/tune`, { method: 'POST', body }) + } + + async deleteSysMounts(name: string): Promise { + await this.fetch(`/v1/sys/mounts/${name}`, { method: 'DELETE' }) + } + + async upsertAuthApproleRole(roleName: string, body: any): Promise { + await this.fetch(`/v1/auth/approle/role/${roleName}`, { + method: 'POST', + body, + }) + } + + async deleteAuthApproleRole(roleName: string): Promise { + await this.fetch(`/v1/auth/approle/role/${roleName}`, { method: 'DELETE' }) + } + + async getAuthApproleRoleRoleId(roleName: string): Promise { + const path = `/v1/auth/approle/role/${roleName}/role-id` + const response = await this.fetch(path, { method: 'GET' }) + const roleId = response?.data?.role_id + if (!roleId) { + throw new VaultError('InvalidResponse', `Vault role-id not found for role ${roleName}`, { method: 'GET', path }) + } + return roleId + } + + async createAuthApproleRoleSecretId(roleName: string): Promise { + const path = `/v1/auth/approle/role/${roleName}/secret-id` + const response = await this.fetch(path, { method: 'POST' }) + const secretId = response?.data?.secret_id + if (!secretId) { + throw new VaultError('InvalidResponse', `Vault secret-id not generated for role ${roleName}`, { method: 'POST', path }) + } + return secretId + } + + async getSysAuth(): Promise> { + const path = '/v1/sys/auth' + const response = await this.fetch(path, { method: 'GET' }) + return response?.data ?? {} + } + + async upsertIdentityGroupName(groupName: string, body: any): Promise { + await this.fetch(`/v1/identity/group/name/${groupName}`, { + method: 'POST', + body, + }) + } + + async getIdentityGroupName(groupName: string): Promise { + const path = `/v1/identity/group/name/${groupName}` + const response = await this.fetch(path, { method: 'GET' }) + if (!response) throw new VaultError('InvalidResponse', 'Empty response', { method: 'GET', path }) + return response + } + + async deleteIdentityGroupName(groupName: string): Promise { + await this.fetch(`/v1/identity/group/name/${groupName}`, { method: 'DELETE' }) + } + + async createIdentityGroupAlias(body: any): Promise { + await this.fetch('/v1/identity/group-alias', { method: 'POST', body }) + } +} diff --git a/apps/server-nestjs/src/modules/vault/vault-controller.service.spec.ts b/apps/server-nestjs/src/modules/vault/vault-controller.service.spec.ts new file mode 100644 index 000000000..6e294789b --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault-controller.service.spec.ts @@ -0,0 +1,72 @@ +import { Test } from '@nestjs/testing' +import type { TestingModule } from '@nestjs/testing' +import { describe, it, expect, beforeEach, vi } from 'vitest' +import type { Mocked } from 'vitest' +import { VaultControllerService } from './vault-controller.service' +import { VaultDatastoreService } from './vault-datastore.service' +import { VaultService } from './vault.service' + +function createVaultControllerServiceTestingModule() { + return Test.createTestingModule({ + providers: [ + VaultControllerService, + { + provide: VaultDatastoreService, + useValue: { + getAllProjects: vi.fn(), + getAllZones: vi.fn(), + } satisfies Partial, + }, + { + provide: VaultService, + useValue: { + upsertProject: vi.fn().mockResolvedValue(undefined), + upsertZone: vi.fn().mockResolvedValue(undefined), + deleteProject: vi.fn().mockResolvedValue(undefined), + destroyProjectSecrets: vi.fn().mockResolvedValue(undefined), + } satisfies Partial, + }, + ], + }) +} + +describe('vaultControllerService', () => { + let service: VaultControllerService + let datastore: Mocked + let vault: Mocked + + beforeEach(async () => { + const module: TestingModule = await createVaultControllerServiceTestingModule().compile() + service = module.get(VaultControllerService) + datastore = module.get(VaultDatastoreService) + vault = module.get(VaultService) + }) + + it('should be defined', () => { + expect(service).toBeDefined() + }) + + it('should reconcile on cron', async () => { + datastore.getAllProjects.mockResolvedValue([{ slug: 'p1' }, { slug: 'p2' }] as any) + datastore.getAllZones.mockResolvedValue([{ slug: 'z1' }] as any) + + await service.handleCron() + + expect(datastore.getAllProjects).toHaveBeenCalled() + expect(datastore.getAllZones).toHaveBeenCalled() + expect(vault.upsertProject).toHaveBeenCalledTimes(2) + expect(vault.upsertZone).toHaveBeenCalledTimes(1) + expect(vault.upsertZone).toHaveBeenCalledWith('z1') + }) + + it('should upsert project on event', async () => { + await service.handleUpsert({ slug: 'p1' } as any) + expect(vault.upsertProject).toHaveBeenCalledWith({ slug: 'p1' }) + }) + + it('should delete project and destroy secrets on event', async () => { + await service.handleDelete({ slug: 'p1' } as any) + expect(vault.deleteProject).toHaveBeenCalledWith('p1') + expect(vault.destroyProjectSecrets).toHaveBeenCalledWith('p1') + }) +}) diff --git a/apps/server-nestjs/src/modules/vault/vault-controller.service.ts b/apps/server-nestjs/src/modules/vault/vault-controller.service.ts new file mode 100644 index 000000000..5f543505a --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault-controller.service.ts @@ -0,0 +1,222 @@ +import { Inject, Injectable, Logger } from '@nestjs/common' +import { OnEvent } from '@nestjs/event-emitter' +import { Cron, CronExpression } from '@nestjs/schedule' +import { VaultDatastoreService } from './vault-datastore.service' +import type { ProjectWithDetails, ZoneWithDetails } from './vault-datastore.service' +import { VaultService } from './vault.service' +import { trace } from '@opentelemetry/api' + +const tracer = trace.getTracer('vault-controller-service') + +@Injectable() +export class VaultControllerService { + private readonly logger = new Logger(VaultControllerService.name) + + constructor( + @Inject(VaultDatastoreService) private readonly vaultDatastore: VaultDatastoreService, + @Inject(VaultService) private readonly vault: VaultService, + ) { + this.logger.log('VaultControllerService initialized') + } + + @OnEvent('project.upsert') + async handleUpsert(project: ProjectWithDetails) { + return tracer.startActiveSpan('handleUpsert', async (span) => { + try { + span.setAttribute('project.slug', project.slug) + this.logger.log(`Handling project upsert for ${project.slug}`) + await this.ensureProject(project, 'event') + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + @OnEvent('project.delete') + async handleDelete(project: ProjectWithDetails) { + return tracer.startActiveSpan('handleDelete', async (span) => { + try { + span.setAttribute('project.slug', project.slug) + this.logger.log(`Handling project delete for ${project.slug}`) + try { + await this.vault.deleteProject(project.slug) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } + + try { + await this.vault.destroyProjectSecrets(project.slug) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + @OnEvent('zone.upsert') + async handleUpsertZone(zone: ZoneWithDetails) { + return tracer.startActiveSpan('handleUpsertZone', async (span) => { + try { + span.setAttribute('zone.slug', zone.slug) + this.logger.log(`Handling zone upsert for ${zone.slug}`) + await this.ensureZone(zone) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + @OnEvent('zone.delete') + async handleDeleteZone(zone: ZoneWithDetails) { + return tracer.startActiveSpan('handleDeleteZone', async (span) => { + try { + span.setAttribute('zone.slug', zone.slug) + this.logger.log(`Handling zone delete for ${zone.slug}`) + try { + await this.vault.deleteZone(zone.slug) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + @Cron(CronExpression.EVERY_HOUR) + async handleCron() { + return tracer.startActiveSpan('handleCron', async (span) => { + try { + this.logger.log('Starting Vault reconciliation') + const [projects, zones] = await Promise.all([ + this.vaultDatastore.getAllProjects(), + this.vaultDatastore.getAllZones(), + ]) + + span.setAttribute('vault.projects.count', projects.length) + span.setAttribute('vault.zones.count', zones.length) + await Promise.all([ + this.ensureProjects(projects), + this.ensureZones(zones), + ]) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + private async ensureProjects(projects: ProjectWithDetails[]) { + return tracer.startActiveSpan('ensureProjects', async (span) => { + try { + span.setAttribute('vault.projects.count', projects.length) + await Promise.all(projects.map(p => this.ensureProject(p, 'cron'))) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + private async ensureProject(project: ProjectWithDetails, source: 'event' | 'cron') { + return tracer.startActiveSpan('ensureProject', async (span) => { + try { + span.setAttribute('project.slug', project.slug) + span.setAttribute('reconcile.source', source) + try { + await this.vault.upsertProject(project) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + private async ensureZones(zones: ZoneWithDetails[]) { + return tracer.startActiveSpan('ensureZones', async (span) => { + try { + span.setAttribute('vault.zones.count', zones.length) + await Promise.all(zones.map(z => this.ensureZone(z))) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + private async ensureZone(zone: ZoneWithDetails) { + return tracer.startActiveSpan('ensureZone', async (span) => { + try { + span.setAttribute('zone.slug', zone.slug) + try { + await this.vault.upsertZone(zone.slug) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } +} diff --git a/apps/server-nestjs/src/modules/vault/vault-datastore.service.ts b/apps/server-nestjs/src/modules/vault/vault-datastore.service.ts new file mode 100644 index 000000000..09db3010e --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault-datastore.service.ts @@ -0,0 +1,58 @@ +import { Inject, Injectable } from '@nestjs/common' +import type { Prisma } from '@prisma/client' +import { PrismaService } from '@/cpin-module/infrastructure/database/prisma.service' + +export const projectSelect = { + id: true, + name: true, + slug: true, + description: true, + environments: { + select: { + id: true, + name: true, + clusterId: true, + cpu: true, + gpu: true, + memory: true, + autosync: true, + }, + }, +} satisfies Prisma.ProjectSelect + +export type ProjectWithDetails = Prisma.ProjectGetPayload<{ + select: typeof projectSelect +}> + +export const zoneSelect = { + id: true, + slug: true, +} satisfies Prisma.ZoneSelect + +export type ZoneWithDetails = Prisma.ZoneGetPayload<{ + select: typeof zoneSelect +}> + +@Injectable() +export class VaultDatastoreService { + constructor(@Inject(PrismaService) private readonly prisma: PrismaService) {} + + async getAllProjects(): Promise { + return this.prisma.project.findMany({ + select: projectSelect, + }) + } + + async getProject(id: string): Promise { + return this.prisma.project.findUnique({ + where: { id }, + select: projectSelect, + }) + } + + async getAllZones(): Promise { + return this.prisma.zone.findMany({ + select: zoneSelect, + }) + } +} diff --git a/apps/server-nestjs/src/modules/vault/vault.module.ts b/apps/server-nestjs/src/modules/vault/vault.module.ts new file mode 100644 index 000000000..105b06285 --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault.module.ts @@ -0,0 +1,14 @@ +import { Module } from '@nestjs/common' +import { ConfigurationModule } from '@/cpin-module/infrastructure/configuration/configuration.module' +import { InfrastructureModule } from '@/cpin-module/infrastructure/infrastructure.module' +import { VaultClientService } from './vault-client.service' +import { VaultControllerService } from './vault-controller.service' +import { VaultDatastoreService } from './vault-datastore.service' +import { VaultService } from './vault.service' + +@Module({ + imports: [ConfigurationModule, InfrastructureModule], + providers: [VaultService, VaultClientService, VaultControllerService, VaultDatastoreService], + exports: [VaultService], +}) +export class VaultModule {} diff --git a/apps/server-nestjs/src/modules/vault/vault.service.spec.ts b/apps/server-nestjs/src/modules/vault/vault.service.spec.ts new file mode 100644 index 000000000..f73bc6e1a --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault.service.spec.ts @@ -0,0 +1,121 @@ +import { Test } from '@nestjs/testing' +import { VaultService } from './vault.service' +import { VaultClientService, VaultError } from './vault-client.service' +import { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import { describe, beforeEach, it, expect, beforeAll, afterAll, afterEach } from 'vitest' +import { setupServer } from 'msw/node' +import { http, HttpResponse } from 'msw' + +const vaultUrl = 'https://vault.internal' + +const server = setupServer( + http.post(`${vaultUrl}/v1/auth/token/create`, () => { + return HttpResponse.json({ auth: { client_token: 'token' } }) + }), + http.get(`${vaultUrl}/v1/kv/data/:path`, () => { + return HttpResponse.json({ data: { data: { secret: 'value' }, metadata: { created_time: '2023-01-01T00:00:00.000Z', version: 1 } } }) + }), + http.post(`${vaultUrl}/v1/kv/data/:path`, () => { + return HttpResponse.json({}) + }), + http.delete(`${vaultUrl}/v1/kv/metadata/:path`, () => { + return new HttpResponse(null, { status: 204 }) + }), +) + +function createVaultServiceTestingModule() { + return Test.createTestingModule({ + providers: [ + VaultService, + VaultClientService, + { + provide: ConfigurationService, + useValue: { + vaultToken: 'token', + vaultUrl, + vaultInternalUrl: vaultUrl, + vaultKvName: 'kv', + } satisfies Partial, + }, + ], + }) +} + +describe('vault', () => { + let service: VaultService + + beforeAll(() => server.listen()) + beforeEach(async () => { + const module = await createVaultServiceTestingModule().compile() + service = module.get(VaultService) + }) + afterEach(() => server.resetHandlers()) + afterAll(() => server.close()) + + describe('getProjectValues', () => { + it('should get project values', async () => { + const result = await service.readProjectValues('project-id') + expect(result).toEqual({ secret: 'value' }) + }) + + it('should return empty object if undefined', async () => { + server.use( + http.get(`${vaultUrl}/v1/kv/data/:path`, () => { + return HttpResponse.json({}, { status: 404 }) + }), + ) + + const result = await service.readProjectValues('project-id') + expect(result).toEqual({}) + }) + }) + + describe('read', () => { + it('should read secret', async () => { + const result = await service.read('path') + expect(result).toEqual({ + data: { secret: 'value' }, + metadata: { created_time: '2023-01-01T00:00:00.000Z', version: 1 }, + }) + }) + + it('should throw if 404', async () => { + server.use( + http.get(`${vaultUrl}/v1/kv/data/:path`, () => { + return HttpResponse.json({}, { status: 404 }) + }), + ) + + await expect(service.read('path')).rejects.toBeInstanceOf(VaultError) + await expect(service.read('path')).rejects.toMatchObject({ kind: 'NotFound', status: 404 }) + }) + }) + + describe('write', () => { + it('should write secret', async () => { + await expect(service.write({ secret: 'value' }, 'path')).resolves.toBeUndefined() + }) + + it('should expose reasons on error', async () => { + server.use( + http.post(`${vaultUrl}/v1/kv/data/:path`, () => { + return HttpResponse.json({ errors: ['No secret engine mount at test-project/'] }, { status: 400 }) + }), + ) + + await expect(service.write({ secret: 'value' }, 'path')).rejects.toBeInstanceOf(VaultError) + await expect(service.write({ secret: 'value' }, 'path')).rejects.toMatchObject({ + kind: 'HttpError', + status: 400, + reasons: ['No secret engine mount at test-project/'], + }) + await expect(service.write({ secret: 'value' }, 'path')).rejects.toThrow('Request failed') + }) + }) + + describe('destroy', () => { + it('should destroy secret', async () => { + await expect(service.destroy('path')).resolves.toBeUndefined() + }) + }) +}) diff --git a/apps/server-nestjs/src/modules/vault/vault.service.ts b/apps/server-nestjs/src/modules/vault/vault.service.ts new file mode 100644 index 000000000..c029d0ba4 --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault.service.ts @@ -0,0 +1,476 @@ +import { Inject, Injectable } from '@nestjs/common' +import { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import { VaultClientService, VaultError } from './vault-client.service' +import type { VaultSecret } from './vault-client.service' +import { trace } from '@opentelemetry/api' +import { + generateAppAdminPolicyName, + generateTechReadOnlyPolicyName, + generateZoneName, + generateZoneTechReadOnlyPolicyName, +} from './vault.utils' +import type { ProjectWithDetails } from './vault-datastore.service' + +const tracer = trace.getTracer('vault-service') + +@Injectable() +export class VaultService { + constructor( + @Inject(VaultClientService) private readonly vaultClientService: VaultClientService, + @Inject(ConfigurationService) private readonly config: ConfigurationService, + ) { + } + + private getApproleRoleBody(policies: string[]) { + return { + secret_id_num_uses: '0', + secret_id_ttl: '0', + token_max_ttl: '0', + token_num_uses: '0', + token_ttl: '0', + token_type: 'batch', + token_policies: policies, + } + } + + async read(path: string): Promise { + return tracer.startActiveSpan('read', async (span) => { + try { + span.setAttribute('vault.path', path) + return await this.vaultClientService.getKvData(this.config.vaultKvName, path) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + async write(data: any, path: string): Promise { + return tracer.startActiveSpan('write', async (span) => { + try { + span.setAttribute('vault.path', path) + await this.vaultClientService.upsertKvData(this.config.vaultKvName, path, { data }) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + async destroy(path: string): Promise { + return tracer.startActiveSpan('destroy', async (span) => { + try { + span.setAttribute('vault.path', path) + await this.vaultClientService.destroy(path) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + async readProjectValues(projectId: string): Promise> { + return tracer.startActiveSpan('readProjectValues', async (span) => { + const path = this.config.projectRootPath + ? `${this.config.projectRootPath}/${projectId}` + : projectId + try { + span.setAttribute('vault.path', path) + const secret = await this.vaultClientService.getKvData(this.config.vaultKvName, path) + return secret.data || {} + } catch (error) { + if (error instanceof VaultError && error.kind === 'NotFound') return {} + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + async readGitlabMirrorCreds(projectSlug: string, repoName: string) { + return tracer.startActiveSpan('readGitlabMirrorCreds', async (span) => { + const vaultCredsPath = `${this.config.projectRootPath}/${projectSlug}/${repoName}-mirror` + try { + span.setAttribute('vault.path', vaultCredsPath) + span.setAttribute('project.slug', projectSlug) + span.setAttribute('repo.name', repoName) + return await this.read(vaultCredsPath) + } catch (error) { + if (error instanceof VaultError && error.kind === 'NotFound') return null + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + async writeGitlabMirrorCreds(projectSlug: string, repoName: string, data: Record) { + return tracer.startActiveSpan('writeGitlabMirrorCreds', async (span) => { + const vaultCredsPath = `${this.config.projectRootPath}/${projectSlug}/${repoName}-mirror` + try { + span.setAttribute('vault.path', vaultCredsPath) + span.setAttribute('project.slug', projectSlug) + span.setAttribute('repo.name', repoName) + await this.write(data, vaultCredsPath) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + async deleteGitlabMirrorCreds(projectSlug: string, repoName: string) { + return tracer.startActiveSpan('deleteGitlabMirrorCreds', async (span) => { + const vaultCredsPath = `${this.config.projectRootPath}/${projectSlug}/${repoName}-mirror` + try { + span.setAttribute('vault.path', vaultCredsPath) + span.setAttribute('project.slug', projectSlug) + span.setAttribute('repo.name', repoName) + try { + await this.destroy(vaultCredsPath) + } catch (error) { + if (error instanceof VaultError && error.kind === 'NotFound') return + throw error + } + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + async readTechnReadOnlyCreds(projectSlug: string) { + return tracer.startActiveSpan('readMirrorCreds', async (span) => { + const vaultPath = `${this.config.projectRootPath}/${projectSlug}/tech/GITLAB_MIRROR` + try { + span.setAttribute('vault.path', vaultPath) + span.setAttribute('project.slug', projectSlug) + return await this.read(vaultPath) + } catch (error) { + if (error instanceof VaultError && error.kind === 'NotFound') return null + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + async writeTechReadOnlyCreds(projectSlug: string, creds: Record) { + return tracer.startActiveSpan('writeMirrorCreds', async (span) => { + const vaultPath = `${this.config.projectRootPath}/${projectSlug}/tech/GITLAB_MIRROR` + try { + span.setAttribute('vault.path', vaultPath) + span.setAttribute('project.slug', projectSlug) + await this.write(creds, vaultPath) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + async writeMirrorTriggerToken(secret: Record) { + return tracer.startActiveSpan('writeMirrorTriggerToken', async (span) => { + try { + span.setAttribute('vault.path', 'GITLAB') + await this.write(secret, 'GITLAB') + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + private async upsertMount(kvName: string): Promise { + const createBody = { + type: 'kv', + config: { + force_no_cache: true, + }, + options: { + version: 2, + }, + } + const tuneBody = { + options: { + version: 2, + }, + } + try { + await this.vaultClientService.createSysMount(kvName, createBody) + } catch (error) { + if (error instanceof VaultError && error.kind === 'HttpError' && error.status === 400) { + await this.vaultClientService.tuneSysMount(kvName, tuneBody) + return + } + throw error + } + } + + private async deleteMount(kvName: string): Promise { + try { + await this.vaultClientService.deleteSysMounts(kvName) + } catch (error) { + if (error instanceof VaultError && error.kind === 'NotFound') return + throw error + } + } + + async upsertZone(zoneName: string): Promise { + return tracer.startActiveSpan('upsertZone', async (span) => { + const kvName = generateZoneName(zoneName) + const policyName = generateZoneTechReadOnlyPolicyName(zoneName) + const roleName = kvName + + try { + span.setAttribute('zone.name', zoneName) + span.setAttribute('vault.kvName', kvName) + + await this.upsertMount(kvName) + await this.vaultClientService.upsertSysPoliciesAcl(policyName, { + policy: `path "${kvName}/*" { capabilities = ["read"] }`, + }) + await this.vaultClientService.upsertAuthApproleRole(roleName, this.getApproleRoleBody([policyName])) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + async deleteZone(zoneName: string): Promise { + return tracer.startActiveSpan('deleteZone', async (span) => { + const kvName = generateZoneName(zoneName) + const policyName = generateZoneTechReadOnlyPolicyName(zoneName) + const roleName = kvName + + try { + span.setAttribute('zone.name', zoneName) + span.setAttribute('vault.kvName', kvName) + await this.deleteMount(kvName) + const settled = await Promise.allSettled([ + this.vaultClientService.deleteSysPoliciesAcl(policyName), + this.vaultClientService.deleteAuthApproleRole(roleName), + ]) + + for (const result of settled) { + if (result.status !== 'rejected') continue + const error = result.reason + if (error instanceof VaultError && error.kind === 'NotFound') continue + throw error + } + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + async upsertProject(project: ProjectWithDetails): Promise { + return tracer.startActiveSpan('upsertProject', async (span) => { + const appPolicyName = generateAppAdminPolicyName(project) + const techPolicyName = generateTechReadOnlyPolicyName(project) + + try { + span.setAttribute('project.slug', project.slug) + await this.upsertMount(project.slug) + await Promise.all([ + this.createAppAdminPolicy(appPolicyName, project.slug), + this.createTechReadOnlyPolicy(techPolicyName, project.slug), + this.ensureProjectGroup(project.slug, appPolicyName), + this.vaultClientService.upsertAuthApproleRole(project.slug, this.getApproleRoleBody([techPolicyName, appPolicyName])), + ]) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + async deleteProject(projectSlug: string): Promise { + return tracer.startActiveSpan('deleteProject', async (span) => { + const kvName = projectSlug + const appPolicyName = generateAppAdminPolicyName({ slug: projectSlug } as ProjectWithDetails) + const techPolicyName = generateTechReadOnlyPolicyName({ slug: projectSlug } as ProjectWithDetails) + const roleName = projectSlug + const groupName = projectSlug + + try { + span.setAttribute('project.slug', projectSlug) + span.setAttribute('vault.kvName', kvName) + + await this.deleteMount(kvName) + + const settled = await Promise.allSettled([ + this.vaultClientService.deleteSysPoliciesAcl(appPolicyName), + this.vaultClientService.deleteSysPoliciesAcl(techPolicyName), + this.vaultClientService.deleteAuthApproleRole(roleName), + this.vaultClientService.deleteIdentityGroupName(groupName), + ]) + for (const result of settled) { + if (result.status !== 'rejected') continue + const error = result.reason + if (error instanceof VaultError && error.kind === 'NotFound') continue + throw error + } + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + private async ensureProjectGroup(groupName: string, policyName: string): Promise { + await this.vaultClientService.upsertIdentityGroupName(groupName, { + name: groupName, + type: 'external', + policies: [policyName], + }) + + const groupResult = await this.vaultClientService.getIdentityGroupName(groupName) + if (!groupResult.data?.id) { + throw new VaultError('InvalidResponse', `Vault group not found after upsert: ${groupName}`, { method: 'GET', path: `/v1/identity/group/name/${groupName}` }) + } + + const groupAliasName = `/${groupName}` + if (groupResult.data.alias?.name === groupAliasName) return + + const methods = await this.vaultClientService.getSysAuth() + const oidc = methods['oidc/'] + if (!oidc?.accessor) { + throw new VaultError('InvalidResponse', 'Vault OIDC auth method not found (expected "oidc/")', { method: 'GET', path: '/v1/sys/auth' }) + } + try { + await this.vaultClientService.createIdentityGroupAlias({ + name: groupAliasName, + mount_accessor: oidc.accessor, + canonical_id: groupResult.data.id, + }) + } catch (error) { + if (error instanceof VaultError && error.kind === 'HttpError' && error.status === 400) return + throw error + } + } + + async createAppAdminPolicy(name: string, projectSlug: string): Promise { + await this.vaultClientService.upsertSysPoliciesAcl(name, { + policy: `path "${projectSlug}/*" { capabilities = ["create", "read", "update", "delete", "list"] }`, + }) + } + + async createTechReadOnlyPolicy(name: string, projectSlug: string): Promise { + await this.vaultClientService.upsertSysPoliciesAcl(name, { + policy: `path "${this.config.vaultKvName}/data/${projectSlug}/REGISTRY/ro-robot" { capabilities = ["read"] }`, + }) + } + + async listProjectSecrets(projectSlug: string): Promise { + const projectPath = this.config.projectRootPath + ? `${this.config.projectRootPath}/${projectSlug}` + : projectSlug + return this.listRecursive(this.config.vaultKvName, projectPath, '') + } + + async destroyProjectSecrets(projectSlug: string): Promise { + return tracer.startActiveSpan('destroyProjectSecrets', async (span) => { + try { + span.setAttribute('project.slug', projectSlug) + const secrets = await this.listProjectSecrets(projectSlug) + + await Promise.allSettled(secrets.map(async (relativePath) => { + const fullPath = this.config.projectRootPath + ? `${this.config.projectRootPath}/${projectSlug}/${relativePath}` + : `${projectSlug}/${relativePath}` + try { + await this.destroy(fullPath) + } catch (error) { + if (error instanceof VaultError && error.kind === 'NotFound') return + throw error + } + })) + } catch (error) { + if (error instanceof Error) { + span.recordException(error) + } + throw error + } finally { + span.end() + } + }) + } + + private async listRecursive( + kvName: string, + basePath: string, + relativePath: string, + ): Promise { + const combined = relativePath.length === 0 ? basePath : `${basePath}/${relativePath}` + const keys = await this.vaultClientService.listKvMetadata(kvName, combined) + if (keys.length === 0) return [] + + const results: string[] = [] + for (const key of keys) { + if (key.endsWith('/')) { + const nestedRel = relativePath.length === 0 ? key.slice(0, -1) : `${relativePath}/${key.slice(0, -1)}` + const nested = await this.listRecursive(kvName, basePath, nestedRel) + results.push(...nested) + } else { + results.push(relativePath.length === 0 ? key : `${relativePath}/${key}`) + } + } + return results + } +} diff --git a/apps/server-nestjs/src/modules/vault/vault.utils.ts b/apps/server-nestjs/src/modules/vault/vault.utils.ts new file mode 100644 index 000000000..96f4a1866 --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault.utils.ts @@ -0,0 +1,17 @@ +import type { ProjectWithDetails } from './vault-datastore.service' + +export function generateTechReadOnlyPolicyName(project: ProjectWithDetails) { + return `tech--${project.slug}--ro` +} + +export function generateAppAdminPolicyName(project: ProjectWithDetails) { + return `app--${project.slug}--admin` +} + +export function generateZoneName(name: string) { + return `zone-${name}` +} + +export function generateZoneTechReadOnlyPolicyName(zoneName: string) { + return `tech--${generateZoneName(zoneName)}--ro` +} diff --git a/apps/server-nestjs/src/prisma/schema/project.prisma b/apps/server-nestjs/src/prisma/schema/project.prisma index 833845eee..d45ccf451 100644 --- a/apps/server-nestjs/src/prisma/schema/project.prisma +++ b/apps/server-nestjs/src/prisma/schema/project.prisma @@ -5,6 +5,7 @@ model Environment { memory Float @db.Real cpu Float @db.Real gpu Float @db.Real + autosync Boolean @default(true) createdAt DateTime @default(now()) updatedAt DateTime @updatedAt clusterId String @db.Uuid diff --git a/apps/server-nestjs/test/gitlab.e2e-spec.ts b/apps/server-nestjs/test/gitlab.e2e-spec.ts new file mode 100644 index 000000000..0c28536c2 --- /dev/null +++ b/apps/server-nestjs/test/gitlab.e2e-spec.ts @@ -0,0 +1,231 @@ +import type { TestingModule } from '@nestjs/testing' +import { Test } from '@nestjs/testing' +import { GitlabModule } from '@/modules/gitlab/gitlab.module' +import { GitlabControllerService } from '@/modules/gitlab/gitlab-controller.service' +import { GitlabClientService } from '@/modules/gitlab/gitlab-client.service' +import { GitlabService } from '@/modules/gitlab/gitlab.service' +import { PrismaService } from '@/cpin-module/infrastructure/database/prisma.service' +import { ConfigurationModule } from '../src/cpin-module/infrastructure/configuration/configuration.module' +import { InfrastructureModule } from '@/cpin-module/infrastructure/infrastructure.module' +import { VaultService } from '@/modules/vault/vault.service' +import { afterAll, beforeAll, describe, expect, it, vi } from 'vitest' +import { faker } from '@faker-js/faker' +import { projectSelect } from '@/modules/gitlab/gitlab-datastore.service' +import { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import z from 'zod' +import type { ExpandedUserSchema } from '@gitbeaker/core' + +const canRunGitlabE2E + = Boolean(process.env.E2E) + && Boolean(process.env.GITLAB_URL) + && Boolean(process.env.GITLAB_TOKEN) + && Boolean(process.env.VAULT_URL) + && Boolean(process.env.VAULT_TOKEN) + && Boolean(process.env.PROJECTS_ROOT_DIR) + && Boolean(process.env.DB_URL) + +const describeWithGitLab = describe.runIf(canRunGitlabE2E) + +describeWithGitLab('GitlabController (e2e)', {}, () => { + let moduleRef: TestingModule + let gitlabController: GitlabControllerService + let gitlabService: GitlabService + let gitlabClient: GitlabClientService + let vaultService: VaultService + let prisma: PrismaService + let config: ConfigurationService + + let vaultProbePath: string + + let testProjectId: string + let testProjectSlug: string + let ownerId: string + let ownerUser: ExpandedUserSchema + + beforeAll(async () => { + moduleRef = await Test.createTestingModule({ + imports: [GitlabModule, ConfigurationModule, InfrastructureModule], + }).compile() + + await moduleRef.init() + + gitlabController = moduleRef.get(GitlabControllerService) + gitlabService = moduleRef.get(GitlabService) + gitlabClient = moduleRef.get(GitlabClientService) + vaultService = moduleRef.get(VaultService) + prisma = moduleRef.get(PrismaService) + config = moduleRef.get(ConfigurationService) + + ownerId = faker.string.uuid() + testProjectId = faker.string.uuid() + testProjectSlug = faker.helpers.slugify(`test-project-${faker.string.uuid()}`) + + const ownerEmail = `test-owner-${ownerId}@example.com` + + vaultProbePath = `${config.projectRootPath}/__e2e-probe-${testProjectSlug}` + await vaultService.write({ ok: true }, vaultProbePath) + await vaultService.read(vaultProbePath) + + // Create owner in GitLab + ownerUser = await gitlabClient.Users.create({ + name: 'Test Owner', + password: faker.internet.password({ length: 24 }), + username: `test-owner-${ownerId}`, + email: ownerEmail, + skipConfirmation: true, + }) + + // Create owner in DB + await prisma.user.create({ + data: { + id: ownerId, + email: ownerUser.email.toLowerCase(), + firstName: 'Test', + lastName: 'Owner', + type: 'human', + }, + }) + }) + + afterAll(async () => { + if (vaultProbePath) { + await vaultService.destroy(vaultProbePath).catch(() => {}) + } + + // Clean GitLab group + if (testProjectSlug && config.projectRootPath) { + const fullPath = `${config.projectRootPath}/${testProjectSlug}` + const group = await gitlabService.getGroupByPath(fullPath) + if (group) { + await gitlabService.deleteGroup(group.id).catch(() => {}) + } + } + + // Clean Vault + if (testProjectSlug && config.projectRootPath) { + const vaultPath = `${config.projectRootPath}/${testProjectSlug}` + await vaultService.destroy(`${vaultPath}/tech/GITLAB_MIRROR`).catch(() => {}) + await vaultService.destroy(`${vaultPath}/app-mirror`).catch(() => {}) + } + + // Clean DB + if (prisma) { + await prisma.projectMembers.deleteMany({ where: { projectId: testProjectId } }).catch(() => {}) + await prisma.project.deleteMany({ where: { id: testProjectId } }).catch(() => {}) + await prisma.user.deleteMany({ where: { id: ownerId } }).catch(() => {}) + } + + await moduleRef.close() + + vi.restoreAllMocks() + vi.unstubAllEnvs() + }) + + it('should reconcile and create project group in GitLab and Vault secrets', async () => { + // Create Project in DB + await prisma.project.create({ + data: { + id: testProjectId, + slug: testProjectSlug, + name: testProjectSlug, + ownerId, + description: 'E2E Test Project', + hprodCpu: 0, + hprodGpu: 0, + hprodMemory: 0, + prodCpu: 0, + prodGpu: 0, + prodMemory: 0, + }, + }) + + await prisma.repository.create({ + data: { + projectId: testProjectId, + internalRepoName: 'app', + externalRepoUrl: 'https://example.com/example.git', + isPrivate: false, + }, + }) + + const project = await prisma.project.findUniqueOrThrow({ + where: { id: testProjectId }, + select: projectSelect, + }) + + // Act + await gitlabController.handleUpsert(project) + + // Assert + const groupPath = `${config.projectRootPath}/${testProjectSlug}` + const group = z.object({ + id: z.number(), + name: z.string(), + full_path: z.string(), + }).parse(await gitlabService.getGroupByPath(groupPath)) + expect(group.full_path).toBe(groupPath) + + // Check membership + const members = await gitlabService.getGroupMembers(group.id) + const isMember = members.some(m => m.id === ownerUser.id) + expect(isMember).toBe(true) + + const repoVaultPath = `${config.projectRootPath}/${testProjectSlug}/app-mirror` + const repoSecret = await vaultService.read(repoVaultPath) + expect(repoSecret?.data?.GIT_OUTPUT_USER).toBeTruthy() + expect(repoSecret?.data?.GIT_OUTPUT_PASSWORD).toBeTruthy() + }, 72000) + + it('should add member to GitLab group when added in DB', async () => { + // Create user in GitLab + const newUserId = faker.string.uuid() + const newUser = await gitlabClient.Users.create({ + email: faker.internet.email().toLowerCase(), + username: faker.internet.username(), + name: `${faker.person.firstName()} ${faker.person.lastName()}`, + password: faker.internet.password({ length: 24 }), + skipConfirmation: true, + }) + + // Create user in DB + await prisma.user.create({ + data: { + id: newUserId, + email: newUser.email, + firstName: 'Test', + lastName: 'User', + type: 'human', + }, + }) + + // Add member to project in DB + await prisma.projectMembers.create({ + data: { + projectId: testProjectId, + userId: newUserId, + roleIds: [], // No roles for now + }, + }) + + const project = await prisma.project.findUniqueOrThrow({ + where: { id: testProjectId }, + select: projectSelect, + }) + + // Act + await gitlabController.handleUpsert(project) + + // Assert + const groupPath = `${config.projectRootPath}/${testProjectSlug}` + const group = z.object({ + id: z.number(), + }).parse(await gitlabService.getGroupByPath(groupPath)) + + const members = await gitlabService.getGroupMembers(group.id) + const isNewMemberPresent = members.some(m => m.id === newUser.id) + expect(isNewMemberPresent).toBe(true) + + await prisma.projectMembers.deleteMany({ where: { userId: newUserId } }).catch(() => {}) + await prisma.user.delete({ where: { id: newUserId } }).catch(() => {}) + }, 72000) +}) diff --git a/apps/server-nestjs/test/vault.e2e-spec.ts b/apps/server-nestjs/test/vault.e2e-spec.ts new file mode 100644 index 000000000..93e443d2b --- /dev/null +++ b/apps/server-nestjs/test/vault.e2e-spec.ts @@ -0,0 +1,105 @@ +import type { TestingModule } from '@nestjs/testing' +import { Test } from '@nestjs/testing' +import { afterAll, beforeAll, describe, expect, it, vi } from 'vitest' +import { faker } from '@faker-js/faker' +import { VaultModule } from '../src/modules/vault/vault.module' +import { VaultControllerService } from '../src/modules/vault/vault-controller.service' +import { PrismaService } from '../src/cpin-module/infrastructure/database/prisma.service' +import { projectSelect } from '../src/modules/vault/vault-datastore.service' +import { ConfigurationModule } from '../src/cpin-module/infrastructure/configuration/configuration.module' +import { InfrastructureModule } from '../src/cpin-module/infrastructure/infrastructure.module' +import { VaultClientService } from '../src/modules/vault/vault-client.service' +import { VaultService } from '../src/modules/vault/vault.service' + +const canRunVaultE2E + = Boolean(process.env.E2E) + && Boolean(process.env.VAULT_URL) + && Boolean(process.env.VAULT_TOKEN) + && Boolean(process.env.DB_URL) + +const describeWithVault = describe.runIf(canRunVaultE2E) + +describeWithVault('VaultController (e2e)', () => { + let moduleRef: TestingModule + let vaultController: VaultControllerService + let vaultClient: VaultClientService + let vaultService: VaultService + let prisma: PrismaService + + let ownerId: string + let testProjectId: string + let testProjectSlug: string + + beforeAll(async () => { + moduleRef = await Test.createTestingModule({ + imports: [VaultModule, ConfigurationModule, InfrastructureModule], + }).compile() + + await moduleRef.init() + + vaultController = moduleRef.get(VaultControllerService) + vaultClient = moduleRef.get(VaultClientService) + vaultService = moduleRef.get(VaultService) + prisma = moduleRef.get(PrismaService) + + ownerId = faker.string.uuid() + testProjectId = faker.string.uuid() + testProjectSlug = faker.helpers.slugify(`test-project-${faker.string.uuid()}`) + + await prisma.user.create({ + data: { + id: ownerId, + email: faker.internet.email().toLowerCase(), + firstName: 'Test', + lastName: 'Owner', + type: 'human', + }, + }) + }) + + afterAll(async () => { + if (testProjectSlug) { + await vaultService.deleteProject(testProjectSlug).catch(() => {}) + } + + if (prisma) { + await prisma.project.deleteMany({ where: { id: testProjectId } }).catch(() => {}) + await prisma.user.deleteMany({ where: { id: ownerId } }).catch(() => {}) + } + + await moduleRef.close() + + vi.restoreAllMocks() + vi.unstubAllEnvs() + }) + + it('should reconcile project in Vault (mount, group, role)', async () => { + await prisma.project.create({ + data: { + id: testProjectId, + slug: testProjectSlug, + name: testProjectSlug, + ownerId, + description: 'E2E Test Project', + hprodCpu: 0, + hprodGpu: 0, + hprodMemory: 0, + prodCpu: 0, + prodGpu: 0, + prodMemory: 0, + }, + }) + + const project = await prisma.project.findUniqueOrThrow({ + where: { id: testProjectId }, + select: projectSelect, + }) + + await vaultController.handleUpsert(project) + + const group = await vaultClient.getIdentityGroupName(testProjectSlug) + expect(group.data?.id).toBeTruthy() + expect(group.data?.name).toBe(testProjectSlug) + expect(group.data?.alias?.name).toBe(`/${testProjectSlug}`) + }, 180000) +}) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 1999f405a..494d690c2 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -398,6 +398,9 @@ importers: '@gitbeaker/core': specifier: ^40.6.0 version: 40.6.0 + '@gitbeaker/requester-utils': + specifier: ^40.6.0 + version: 40.6.0 '@gitbeaker/rest': specifier: ^40.6.0 version: 40.6.0