diff --git a/apps/server-nestjs/package.json b/apps/server-nestjs/package.json index b2950759b..44500a55b 100644 --- a/apps/server-nestjs/package.json +++ b/apps/server-nestjs/package.json @@ -1,9 +1,9 @@ { "name": "server-nestjs", "version": "9.13.2", + "private": true, "description": "", "author": "", - "private": true, "license": "UNLICENSED", "scripts": { "build": "nest build", @@ -12,9 +12,15 @@ "start:dev": "nest start --watch", "start:debug": "nest start --debug --watch", "start:prod": "node dist/main", - "lint": "eslint \"{src,apps,libs,test}/**/*.ts\" --fix" + "lint": "eslint \"{src,apps,libs,test}/**/*.ts\" --fix", + "test": "vitest run", + "test:watch": "vitest", + "test:cov": "vitest run --coverage", + "test:debug": "vitest --inspect" }, "dependencies": { + "@casl/ability": "^6.7.1", + "@casl/prisma": "^1.5.0", "@cpn-console/argocd-plugin": "workspace:^", "@cpn-console/gitlab-plugin": "workspace:^", "@cpn-console/harbor-plugin": "workspace:^", @@ -30,23 +36,29 @@ "@fastify/swagger": "^8.15.0", "@fastify/swagger-ui": "^4.2.0", "@gitbeaker/core": "^40.6.0", + "@gitbeaker/requester-utils": "^40.6.0", "@gitbeaker/rest": "^40.6.0", + "@keycloak/keycloak-admin-client": "^24.0.0", "@kubernetes-models/argo-cd": "^2.6.2", "@nestjs/common": "^11.0.1", "@nestjs/config": "^4.0.2", "@nestjs/core": "^11.0.1", + "@nestjs/event-emitter": "^3.0.1", "@nestjs/platform-express": "^11.0.1", + "@nestjs/schedule": "^5.0.1", "@prisma/client": "^6.0.1", "@ts-rest/core": "^3.52.1", "@ts-rest/fastify": "^3.52.1", "@ts-rest/open-api": "^3.52.1", - "axios": "1.12.2", "date-fns": "^4.1.0", "dotenv": "^16.4.7", "fastify": "^4.29.1", "fastify-keycloak-adapter": "2.3.2", + "js-yaml": "^4.1.1", "json-2-csv": "^5.5.7", + "keycloak-connect": "^25.0.0", "mustache": "^4.2.0", + "nest-keycloak-connect": "^1.10.1", "nestjs-pino": "^4.5.0", "pino-http": "^11.0.0", "prisma": "^6.0.1", @@ -67,6 +79,7 @@ "@nestjs/testing": "^11.0.1", "@types/express": "^5.0.0", "@types/jest": "^30.0.0", + "@types/js-yaml": "4.0.9", "@types/node": "^22.10.7", "@types/supertest": "^6.0.2", "@vitest/coverage-v8": "^2.1.8", diff --git a/apps/server-nestjs/src/cpin-module/infrastructure/configuration/configuration.service.ts b/apps/server-nestjs/src/cpin-module/infrastructure/configuration/configuration.service.ts index b3af13bcb..10d058a49 100644 --- a/apps/server-nestjs/src/cpin-module/infrastructure/configuration/configuration.service.ts +++ b/apps/server-nestjs/src/cpin-module/infrastructure/configuration/configuration.service.ts @@ -25,6 +25,7 @@ export class ConfigurationService { keycloakClientId = process.env.KEYCLOAK_CLIENT_ID keycloakClientSecret = process.env.KEYCLOAK_CLIENT_SECRET keycloakRedirectUri = process.env.KEYCLOAK_REDIRECT_URI + keycloakControllerPurgeOrphans = Boolean(process.env.KEYCLOAK_RECONCILER_PURGE_ORPHANS) adminsUserId = process.env.ADMIN_KC_USER_ID ? process.env.ADMIN_KC_USER_ID.split(',') : [] @@ -33,10 +34,35 @@ export class ConfigurationService { = process.env.CONTACT_EMAIL ?? 'cloudpinative-relations@interieur.gouv.fr' + // argocd + argoNamespace = process.env.ARGO_NAMESPACE ?? 'argocd' + argocdUrl = process.env.ARGOCD_URL + argocdExtraRepositories = process.env.ARGOCD_EXTRA_REPOSITORIES + + // dso + dsoEnvChartVersion = process.env.DSO_ENV_CHART_VERSION ?? 'dso-env-1.6.0' + dsoNsChartVersion = process.env.DSO_NS_CHART_VERSION ?? 'dso-ns-1.1.5' + // plugins mockPlugins = process.env.MOCK_PLUGINS === 'true' projectRootDir = process.env.PROJECTS_ROOT_DIR pluginsDir = process.env.PLUGINS_DIR ?? '/plugins' + + // gitlab + gitlabToken = process.env.GITLAB_TOKEN + gitlabUrl = process.env.GITLAB_URL + gitlabInternalUrl = process.env.GITLAB_INTERNAL_URL + ? process.env.GITLAB_INTERNAL_URL + : process.env.GITLAB_URL + + // vault + vaultToken = process.env.VAULT_TOKEN + vaultUrl = process.env.VAULT_URL + vaultInternalUrl = process.env.VAULT_INTERNAL_URL + ? process.env.VAULT_INTERNAL_URL + : process.env.VAULT_URL + vaultKvName = process.env.VAULT_KV_NAME ?? 'forge-dso' + NODE_ENV = process.env.NODE_ENV === 'test' ? 'test' diff --git a/apps/server-nestjs/src/cpin-module/infrastructure/database/prisma.service.ts b/apps/server-nestjs/src/cpin-module/infrastructure/database/prisma.service.ts new file mode 100644 index 000000000..410e662ea --- /dev/null +++ b/apps/server-nestjs/src/cpin-module/infrastructure/database/prisma.service.ts @@ -0,0 +1,14 @@ +import type { OnModuleInit, OnModuleDestroy } from '@nestjs/common' +import { Injectable } from '@nestjs/common' +import { PrismaClient } from '@prisma/client' + +@Injectable() +export class PrismaService extends PrismaClient implements OnModuleInit, OnModuleDestroy { + async onModuleInit() { + await this.$connect() + } + + async onModuleDestroy() { + await this.$disconnect() + } +} diff --git a/apps/server-nestjs/src/cpin-module/infrastructure/infrastructure.module.ts b/apps/server-nestjs/src/cpin-module/infrastructure/infrastructure.module.ts index 8bd7fac8a..35f17db52 100644 --- a/apps/server-nestjs/src/cpin-module/infrastructure/infrastructure.module.ts +++ b/apps/server-nestjs/src/cpin-module/infrastructure/infrastructure.module.ts @@ -2,13 +2,14 @@ import { Module } from '@nestjs/common' import { ConfigurationModule } from './configuration/configuration.module' import { DatabaseService } from './database/database.service' +import { PrismaService } from './database/prisma.service' import { HttpClientService } from './http-client/http-client.service' import { LoggerModule } from './logger/logger.module' import { ServerService } from './server/server.service' @Module({ - providers: [DatabaseService, HttpClientService, ServerService], + providers: [DatabaseService, PrismaService, HttpClientService, ServerService], imports: [LoggerModule, ConfigurationModule], - exports: [DatabaseService, HttpClientService, ServerService], + exports: [DatabaseService, PrismaService, HttpClientService, ServerService], }) export class InfrastructureModule {} diff --git a/apps/server-nestjs/src/main.module.ts b/apps/server-nestjs/src/main.module.ts index a8c7b3fd0..8a7d48ab7 100644 --- a/apps/server-nestjs/src/main.module.ts +++ b/apps/server-nestjs/src/main.module.ts @@ -1,11 +1,27 @@ import { Module } from '@nestjs/common' +import { EventEmitterModule } from '@nestjs/event-emitter' +import { ScheduleModule } from '@nestjs/schedule' import { CpinModule } from './cpin-module/cpin.module' +import { IamModule } from './modules/iam/iam.module' +import { KeycloakModule } from './modules/keycloak/keycloak.module' +import { ArgoCDModule } from './modules/argocd/argocd.module' +import { GitlabModule } from './modules/gitlab/gitlab.module' +import { VaultModule } from './modules/vault/vault.module' // This module only exists to import other module. // « One module to rule them all, and in NestJs bind them » @Module({ - imports: [CpinModule], + imports: [ + CpinModule, + IamModule, + KeycloakModule, + ArgoCDModule, + GitlabModule, + VaultModule, + EventEmitterModule.forRoot(), + ScheduleModule.forRoot(), + ], controllers: [], providers: [], }) diff --git a/apps/server-nestjs/src/modules/argocd/argocd-controller.service.spec.ts b/apps/server-nestjs/src/modules/argocd/argocd-controller.service.spec.ts new file mode 100644 index 000000000..ab35567c3 --- /dev/null +++ b/apps/server-nestjs/src/modules/argocd/argocd-controller.service.spec.ts @@ -0,0 +1,169 @@ +import { describe, it, expect, beforeEach, vi } from 'vitest' +import type { Mocked } from 'vitest' +import { load } from 'js-yaml' +import { ArgoCDControllerService } from './argocd-controller.service' +import type { ArgoCDDatastoreService, ProjectWithDetails } from './argocd-datastore.service' +import type { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import type { GitlabService } from '../gitlab/gitlab.service' +import type { VaultService } from '../vault/vault.service' + +const mockArgoCDDatastore = { + getAllProjects: vi.fn(), +} as unknown as Mocked + +const mockConfigService = { + keycloakControllerPurgeOrphans: true, + argoNamespace: 'argocd', + argocdUrl: 'http://argocd', + argocdExtraRepositories: 'repo3', + dsoEnvChartVersion: 'dso-env-1.6.0', + dsoNsChartVersion: 'dso-ns-1.1.5', +} as unknown as Mocked + +const mockGitlabService = { + getOrCreateInfraProject: vi.fn(), + getPublicGroupUrl: vi.fn(), + getPublicRepoUrl: vi.fn(), + commitCreateOrUpdate: vi.fn(), + commitDelete: vi.fn(), + listFiles: vi.fn(), +} as unknown as Mocked + +const mockVaultService = { + getProjectValues: vi.fn(), +} as unknown as Mocked + +describe('argoCDControllerService', () => { + let service: ArgoCDControllerService + let datastore: Mocked + let gitlabService: Mocked + let vaultService: Mocked + + beforeEach(() => { + service = new ArgoCDControllerService( + mockArgoCDDatastore, + mockConfigService, + mockGitlabService, + mockVaultService, + ) + datastore = mockArgoCDDatastore + gitlabService = mockGitlabService + vaultService = mockVaultService + vi.clearAllMocks() + }) + + it('should be defined', () => { + expect(service).toBeDefined() + }) + + describe('reconcile', () => { + it('should sync project environments', async () => { + const mockProject = { + id: '123e4567-e89b-12d3-a456-426614174000', + slug: 'project-1', + name: 'Project 1', + environments: [ + { id: '123e4567-e89b-12d3-a456-426614174001', name: 'dev', clusterId: 'c1', cpu: 1, gpu: 0, memory: 1, autosync: true }, + { id: '123e4567-e89b-12d3-a456-426614174002', name: 'prod', clusterId: 'c1', cpu: 1, gpu: 0, memory: 1, autosync: true }, + ], + clusters: [ + { id: 'c1', label: 'cluster-1', zone: { slug: 'zone-1' } }, + ], + repositories: [ + { + id: 'repo-1', + internalRepoName: 'infra-repo', + url: 'http://gitlab/infra-repo', + isInfra: true, + }, + ], + plugins: [{ pluginName: 'argocd', key: 'extraRepositories', value: 'repo2' }], + } as unknown as ProjectWithDetails + + datastore.getAllProjects.mockResolvedValue([mockProject]) + gitlabService.getOrCreateInfraProject.mockResolvedValue({ id: 100, http_url_to_repo: 'http://gitlab/infra' }) + gitlabService.getPublicGroupUrl.mockResolvedValue('http://gitlab/group') + gitlabService.getPublicRepoUrl.mockResolvedValue('http://gitlab/infra-repo') + gitlabService.listFiles.mockResolvedValue([]) + vaultService.getProjectValues.mockResolvedValue({ secret: 'value' }) + + const results = await (service as any).reconcile() + + expect(results).toHaveLength(3) // 2 envs + 1 cleanup (1 zone) + + // Verify Gitlab calls + expect(gitlabService.commitCreateOrUpdate).toHaveBeenCalledTimes(2) + + const calls = gitlabService.commitCreateOrUpdate.mock.calls + const devCall = calls.find(c => c[2] === 'Project 1/cluster-1/dev/values.yaml') + expect(devCall).toBeDefined() + + const content = load(devCall![1]) as any + expect(content).toMatchObject({ + common: { + 'dso/project': 'Project 1', + 'dso/project.slug': 'project-1', + 'dso/environment': 'dev', + }, + argocd: { + namespace: 'argocd', + project: expect.stringMatching(/^project-1-dev-[a-f0-9]{4}$/), + }, + environment: { + valueFileRepository: 'http://gitlab/infra', + valueFilePath: 'Project 1/cluster-1/dev/values.yaml', + roGroup: '/project-project-1/console/dev/RO', + rwGroup: '/project-project-1/console/dev/RW', + }, + application: { + quota: { + cpu: 1, + gpu: 0, + memory: '1Gi', + }, + sourceRepositories: expect.arrayContaining([ + expect.stringContaining('repo3'), + expect.stringContaining('repo2'), + expect.stringContaining('http://gitlab/group'), + ]), + destination: { + namespace: expect.any(String), + name: 'cluster-1', + }, + autosync: true, + vault: { secret: 'value' }, + repositories: [ + { + repoURL: 'http://gitlab/infra-repo', + targetRevision: 'HEAD', + path: '.', + valueFiles: [], + }, + ], + }, + }) + }) + + it('should handle errors gracefully', async () => { + const mockProject = { + id: '123e4567-e89b-12d3-a456-426614174000', + slug: 'project-1', + name: 'Project 1', + environments: [{ id: '123e4567-e89b-12d3-a456-426614174001', name: 'dev', clusterId: 'c1', cpu: 1, gpu: 0, memory: 1, autosync: true }], + clusters: [ + { id: 'c1', label: 'cluster-1', zone: { slug: 'zone-1' } }, + ], + } as unknown as ProjectWithDetails + + datastore.getAllProjects.mockResolvedValue([mockProject]) + gitlabService.getOrCreateInfraProject.mockRejectedValue(new Error('Sync failed')) + + const results = await (service as any).reconcile() + + // 1 env (fails) + 1 cleanup (fails because getOrCreateInfraProject fails) + expect(results).toHaveLength(2) + const failed = results.filter((r: any) => r.status === 'rejected') + expect(failed).toHaveLength(2) + }) + }) +}) diff --git a/apps/server-nestjs/src/modules/argocd/argocd-controller.service.ts b/apps/server-nestjs/src/modules/argocd/argocd-controller.service.ts new file mode 100644 index 000000000..9063765e8 --- /dev/null +++ b/apps/server-nestjs/src/modules/argocd/argocd-controller.service.ts @@ -0,0 +1,148 @@ +import type { OnModuleInit } from '@nestjs/common' +import { Injectable, Logger } from '@nestjs/common' +import { OnEvent } from '@nestjs/event-emitter' +import { Cron, CronExpression } from '@nestjs/schedule' +import { dump } from 'js-yaml' + +import type { ArgoCDDatastoreService, ProjectWithDetails } from './argocd-datastore.service' +import type { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import type { GitlabService } from '../gitlab/gitlab.service' +import type { VaultService } from '../vault/vault.service' +import { + formatEnvironmentValuesFilePath, + formatValues, + getDistinctZones, +} from './argocd.utils' + +@Injectable() +export class ArgoCDControllerService implements OnModuleInit { + private readonly logger = new Logger(ArgoCDControllerService.name) + + constructor( + private readonly argoCDDatastore: ArgoCDDatastoreService, + private readonly configService: ConfigurationService, + private readonly gitlabService: GitlabService, + private readonly vaultService: VaultService, + ) { + this.logger.log('ArgoCDControllerService initialized') + } + + onModuleInit() { + this.handleCron() + } + + @OnEvent('project.upsert') + async handleUpsert(project: ProjectWithDetails) { + this.logger.log(`Handling project upsert for ${project.slug}`) + return this.reconcile() + } + + @OnEvent('project.delete') + async handleDelete(project: ProjectWithDetails) { + this.logger.log(`Handling project delete for ${project.slug}`) + return this.reconcile() + } + + @Cron(CronExpression.EVERY_HOUR) + async handleCron() { + this.logger.log('Starting ArgoCD reconciliation') + await this.reconcile() + } + + private async reconcile() { + const projects = await this.argoCDDatastore.getAllProjects() + const results: PromiseSettledResult[] = [] + + const projectResults = await Promise.all(projects.map(async (project) => { + const pResults: PromiseSettledResult[] = [] + + const ensureResults = await Promise.allSettled( + project.environments.map(env => this.ensureValues(project, env)), + ) + pResults.push(...ensureResults) + + const cleanupResults = await this.cleanupStaleValues(project) + pResults.push(...cleanupResults) + + return pResults + })) + + results.push(...projectResults.flat()) + + results.forEach((result) => { + if (result.status === 'rejected') { + this.logger.error(`Reconciliation failed: ${result.reason}`) + } + }) + + return results + } + + private async cleanupStaleValues(project: ProjectWithDetails) { + const zones = getDistinctZones(project) + return Promise.allSettled(zones.map(async (zoneSlug) => { + const infraProject = await this.gitlabService.getOrCreateInfraProject(zoneSlug) + const existingFiles = await this.gitlabService.listFiles(infraProject.id, { + path: `${project.name}/`, + recursive: true, + }) + + const neededFiles = project.environments + .filter((env) => { + const cluster = project.clusters.find(c => c.id === env.clusterId) + return cluster?.zone.slug === zoneSlug + }) + .map((env) => { + const cluster = project.clusters.find(c => c.id === env.clusterId)! + return formatEnvironmentValuesFilePath(project, cluster, env) + }) + + const filesToDelete: string[] = [] + for (const existingFile of existingFiles) { + if ( + existingFile.name === 'values.yaml' + && !neededFiles.includes(existingFile.path) + ) { + filesToDelete.push(existingFile.path) + } + } + + if (filesToDelete.length > 0) { + await this.gitlabService.commitDelete(infraProject.id, filesToDelete) + } + })) + } + + async ensureValues( + project: ProjectWithDetails, + environment: ProjectWithDetails['environments'][number], + ) { + const vaultValues = await this.vaultService.getProjectValues(project.id) + const cluster = project.clusters.find(c => c.id === environment.clusterId) + if (!cluster) throw new Error(`Cluster not found for environment ${environment.id}`) + + const infraProject = await this.gitlabService.getOrCreateInfraProject(cluster.zone.slug) + const valueFilePath = formatEnvironmentValuesFilePath(project, cluster, environment) + + const repo = project.repositories.find(r => r.isInfra) + if (!repo) throw new Error(`Infra repository not found for project ${project.id}`) + const repoUrl = await this.gitlabService.getPublicRepoUrl(repo.internalRepoName) + + const values = formatValues({ + project, + environment, + cluster, + gitlabPublicGroupUrl: await this.gitlabService.getPublicGroupUrl(), + argocdExtraRepositories: this.configService.argocdExtraRepositories, + infraProject, + valueFilePath, + repoUrl, + vaultValues, + argoNamespace: this.configService.argoNamespace, + envChartVersion: this.configService.dsoEnvChartVersion, + nsChartVersion: this.configService.dsoNsChartVersion, + }) + + await this.gitlabService.commitCreateOrUpdate(infraProject.id, dump(values), valueFilePath) + } +} diff --git a/apps/server-nestjs/src/modules/argocd/argocd-datastore.service.ts b/apps/server-nestjs/src/modules/argocd/argocd-datastore.service.ts new file mode 100644 index 000000000..c07b9aed5 --- /dev/null +++ b/apps/server-nestjs/src/modules/argocd/argocd-datastore.service.ts @@ -0,0 +1,65 @@ +import { Injectable, Logger } from '@nestjs/common' +import type { Prisma } from '@prisma/client' +import type { PrismaService } from '@/cpin-module/infrastructure/database/prisma.service' + +export const projectSelect = { + id: true, + name: true, + slug: true, + plugins: { + select: { + pluginName: true, + key: true, + value: true, + }, + }, + repositories: { + select: { + id: true, + internalRepoName: true, + isInfra: true, + helmValuesFiles: true, + deployRevision: true, + deployPath: true, + }, + }, + environments: { + select: { + id: true, + name: true, + clusterId: true, + cpu: true, + gpu: true, + memory: true, + autosync: true, + }, + }, + clusters: { + select: { + id: true, + label: true, + zone: { + select: { + slug: true, + }, + }, + }, + }, +} satisfies Prisma.ProjectSelect + +export type ProjectWithDetails = Prisma.ProjectGetPayload<{ + select: typeof projectSelect +}> + +@Injectable() +export class ArgoCDDatastoreService { + private readonly logger = new Logger(ArgoCDDatastoreService.name) + + constructor(private readonly prisma: PrismaService) {} + + async getAllProjects(): Promise { + return this.prisma.project.findMany({ + select: projectSelect, + }) + } +} diff --git a/apps/server-nestjs/src/modules/argocd/argocd.module.ts b/apps/server-nestjs/src/modules/argocd/argocd.module.ts new file mode 100644 index 000000000..ee8080467 --- /dev/null +++ b/apps/server-nestjs/src/modules/argocd/argocd.module.ts @@ -0,0 +1,14 @@ +import { Module } from '@nestjs/common' +import { ArgoCDControllerService } from './argocd-controller.service' +import { ArgoCDDatastoreService } from './argocd-datastore.service' +import { ConfigurationModule } from '@/cpin-module/infrastructure/configuration/configuration.module' +import { InfrastructureModule } from '@/cpin-module/infrastructure/infrastructure.module' +import { GitlabModule } from '../gitlab/gitlab.module' +import { VaultModule } from '../vault/vault.module' + +@Module({ + imports: [ConfigurationModule, InfrastructureModule, GitlabModule, VaultModule], + providers: [ArgoCDControllerService, ArgoCDDatastoreService], + exports: [], +}) +export class ArgoCDModule {} diff --git a/apps/server-nestjs/src/modules/argocd/argocd.utils.ts b/apps/server-nestjs/src/modules/argocd/argocd.utils.ts new file mode 100644 index 000000000..d0086afb0 --- /dev/null +++ b/apps/server-nestjs/src/modules/argocd/argocd.utils.ts @@ -0,0 +1,237 @@ +import { createHmac } from 'node:crypto' +import { generateNamespaceName, inClusterLabel } from '@cpn-console/shared' +import type { ProjectWithDetails } from './argocd-datastore.service.js' + +export interface RepositoryValue { + repoURL: string + targetRevision: string + path: string + valueFiles: string[] +} + +export interface Values { + common: { + 'dso/project': string + 'dso/project.id': string + 'dso/project.slug': string + 'dso/environment': string + 'dso/environment.id': string + } + argocd: { + cluster: string + namespace: string + project: string + envChartVersion: string + nsChartVersion: string + } + environment: { + valueFileRepository: string + valueFileRevision: string + valueFilePath: string + roGroup: string + rwGroup: string + } + application: { + quota: { + cpu: number + gpu: number + memory: string + } + sourceRepositories: string[] + destination: { + namespace: string + name: string + } + autosync: boolean + vault: Record + repositories: RepositoryValue[] + } +} + +export function formatReadOnlyGroupName(projectSlug: string, environmentName: string) { + return `/project-${projectSlug}/console/${environmentName}/RO` +} + +export function formatReadWriteGroupName(projectSlug: string, environmentName: string) { + return `/project-${projectSlug}/console/${environmentName}/RW` +} + +export function formatAppProjectName(projectSlug: string, env: string) { + const envHash = createHmac('sha256', '') + .update(env) + .digest('hex') + .slice(0, 4) + return `${projectSlug}-${env}-${envHash}` +} + +export function formatEnvironmentValuesFilePath(project: { name: string }, cluster: { label: string }, env: { name: string }): string { + return `${project.name}/${cluster.label}/${env.name}/values.yaml` +} + +export function getDistinctZones(project: ProjectWithDetails) { + const zones = new Set() + project.clusters.forEach(c => zones.add(c.zone.slug)) + return Array.from(zones) +} + +export function splitExtraRepositories(extraRepositories: string | undefined): string[] { + if (!extraRepositories) return [] + return extraRepositories.split(',').map(r => r.trim()).filter(r => r.length > 0) +} + +export function formatRepositoriesValues( + repositories: ProjectWithDetails['repositories'], + repoUrl: string, + envName: string, +): Values['application']['repositories'] { + return repositories + .filter(repo => repo.isInfra) + .map((repository) => { + const valueFiles = splitExtraRepositories(repository.helmValuesFiles?.replaceAll('', envName)) + return { + repoURL: repoUrl, + targetRevision: repository.deployRevision || 'HEAD', + path: repository.deployPath || '.', + valueFiles, + } + }) +} + +export function formatEnvironmentValues( + infraProject: { http_url_to_repo: string }, + valueFilePath: string, + roGroup: string, + rwGroup: string, +): Values['environment'] { + return { + valueFileRepository: infraProject.http_url_to_repo, + valueFileRevision: 'HEAD', + valueFilePath, + roGroup, + rwGroup, + } +} + +export interface FormatSourceRepositoriesValuesOptions { + gitlabPublicGroupUrl: string + argocdExtraRepositories?: string + projectPlugins?: ProjectWithDetails['plugins'] +} + +export function formatSourceRepositoriesValues( + { gitlabPublicGroupUrl, argocdExtraRepositories, projectPlugins }: FormatSourceRepositoriesValuesOptions, +): string[] { + let projectExtraRepositories = '' + if (projectPlugins) { + const argocdPlugin = projectPlugins.find(p => p.pluginName === 'argocd' && p.key === 'extraRepositories') + if (argocdPlugin) projectExtraRepositories = argocdPlugin.value + } + + return [ + `${gitlabPublicGroupUrl}/**`, + ...splitExtraRepositories(argocdExtraRepositories), + ...splitExtraRepositories(projectExtraRepositories), + ] +} + +export interface FormatCommonOptions { + project: ProjectWithDetails + environment: ProjectWithDetails['environments'][number] +} + +export function formatCommon({ project, environment }: FormatCommonOptions) { + return { + 'dso/project': project.name, + 'dso/project.id': project.id, + 'dso/project.slug': project.slug, + 'dso/environment': environment.name, + 'dso/environment.id': environment.id, + } +} + +export interface FormatArgoCDValuesOptions { + namespace: string + project: string + envChartVersion: string + nsChartVersion: string +} + +export function formatArgoCDValues(options: FormatArgoCDValuesOptions) { + const { namespace, project, envChartVersion, nsChartVersion } = options + return { + cluster: inClusterLabel, + namespace, + project, + envChartVersion, + nsChartVersion, + } +} + +export interface FormatValuesOptions { + project: ProjectWithDetails + environment: ProjectWithDetails['environments'][number] + cluster: ProjectWithDetails['clusters'][number] + gitlabPublicGroupUrl: string + argocdExtraRepositories?: string + vaultValues: Record + infraProject: { http_url_to_repo: string } + valueFilePath: string + repoUrl: string + argoNamespace: string + envChartVersion: string + nsChartVersion: string +} + +export function formatValues({ + project, + environment, + cluster, + gitlabPublicGroupUrl, + argocdExtraRepositories, + vaultValues, + infraProject, + valueFilePath, + repoUrl, + argoNamespace, + envChartVersion, + nsChartVersion, +}: FormatValuesOptions): Values { + return { + common: formatCommon({ project, environment }), + argocd: formatArgoCDValues({ + namespace: argoNamespace, + project: formatAppProjectName(project.slug, environment.name), + envChartVersion, + nsChartVersion, + }), + environment: formatEnvironmentValues( + infraProject, + valueFilePath, + formatReadOnlyGroupName(project.slug, environment.name), + formatReadWriteGroupName(project.slug, environment.name), + ), + application: { + quota: { + cpu: environment.cpu, + gpu: environment.gpu, + memory: `${environment.memory}Gi`, + }, + sourceRepositories: formatSourceRepositoriesValues({ + gitlabPublicGroupUrl, + argocdExtraRepositories, + projectPlugins: project.plugins, + }), + destination: { + namespace: generateNamespaceName(project.id, environment.id), + name: cluster.label, + }, + autosync: environment.autosync, + vault: vaultValues, + repositories: formatRepositoriesValues( + project.repositories, + repoUrl, + environment.name, + ), + }, + } +} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab-controller.service.spec.ts b/apps/server-nestjs/src/modules/gitlab/gitlab-controller.service.spec.ts new file mode 100644 index 000000000..facde2d0d --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab-controller.service.spec.ts @@ -0,0 +1,114 @@ +import { Test, TestingModule } from '@nestjs/testing' +import { GitlabControllerService } from './gitlab-controller.service' +import { GitlabService } from './gitlab.service' +import { GitlabDatastoreService } from './gitlab-datastore.service' +import { VaultService } from '../vault/vault.service' +import { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import { vi, describe, beforeEach, it, expect } from 'vitest' + +describe('GitlabControllerService', () => { + let service: GitlabControllerService + let gitlabService: GitlabService + let vaultService: VaultService + let gitlabDatastore: GitlabDatastoreService + + const mockProject = { + id: 'p1', + slug: 'project-1', + name: 'Project 1', + members: [], + repositories: [], + clusters: [], + } + + const mockConfigService = { + projectRootDir: 'forge/console', + } + + beforeEach(async () => { + const module: TestingModule = await Test.createTestingModule({ + providers: [ + GitlabControllerService, + { + provide: GitlabService, + useValue: { + getOrCreateProjectGroup: vi.fn(), + getGroupMembers: vi.fn(), + addGroupMember: vi.fn(), + removeGroupMember: vi.fn(), + findUserByEmail: vi.fn(), + createUser: vi.fn(), + listRepositories: vi.fn(), + createEmptyProjectRepository: vi.fn(), + getProjectToken: vi.fn(), + getPublicRepoUrl: vi.fn(), + commitCreateOrUpdate: vi.fn(), + deleteGroup: vi.fn(), + provisionMirror: vi.fn(), + getMirrorProjectTriggerToken: vi.fn(), + deleteRepository: vi.fn(), + createProjectToken: vi.fn(), + updateProject: vi.fn(), + }, + }, + { + provide: GitlabDatastoreService, + useValue: { + getAllProjects: vi.fn(), + }, + }, + { + provide: VaultService, + useValue: { + read: vi.fn(), + write: vi.fn(), + destroy: vi.fn(), + }, + }, + { + provide: ConfigurationService, + useValue: mockConfigService, + }, + ], + }).compile() + + service = module.get(GitlabControllerService) + gitlabService = module.get(GitlabService) + vaultService = module.get(VaultService) + gitlabDatastore = module.get(GitlabDatastoreService) + }) + + it('should be defined', () => { + expect(service).toBeDefined() + }) + + describe('handleUpsert', () => { + it('should reconcile project members and repositories', async () => { + // Mock data + const project = { ...mockProject } + const group = { id: 123, full_path: 'forge/console/project-1' } + + // Mock implementations + // @ts-ignore + gitlabService.getOrCreateProjectGroup.mockResolvedValue(group) + // @ts-ignore + gitlabService.getGroupMembers.mockResolvedValue([]) + // @ts-ignore + gitlabService.listRepositories.mockResolvedValue([]) + // @ts-ignore + gitlabService.getProjectToken.mockResolvedValue({ token: 'token' }) + // @ts-ignore + gitlabService.createEmptyProjectRepository.mockResolvedValue({ id: 1 }) + // @ts-ignore + gitlabService.getMirrorProjectTriggerToken.mockResolvedValue({ repoId: 1, token: 'trigger-token' }) + // @ts-ignore + vaultService.read.mockResolvedValue({ MIRROR_TOKEN: 'token' }) + + await service.handleUpsert(project as any) + + expect(gitlabService.getOrCreateProjectGroup).toHaveBeenCalledWith(project.slug) + expect(gitlabService.getGroupMembers).toHaveBeenCalledWith(group.id) + expect(gitlabService.listRepositories).toHaveBeenCalledWith(project.slug) + }) + }) +}) diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab-controller.service.ts b/apps/server-nestjs/src/modules/gitlab/gitlab-controller.service.ts new file mode 100644 index 000000000..64389b393 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab-controller.service.ts @@ -0,0 +1,215 @@ +import type { OnModuleInit } from '@nestjs/common' +import { Inject, Injectable, Logger } from '@nestjs/common' +import { OnEvent } from '@nestjs/event-emitter' +import { Cron, CronExpression } from '@nestjs/schedule' +import type { ProjectSchema } from '@gitbeaker/core' +import { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import { GitlabDatastoreService } from './gitlab-datastore.service' +import type { ProjectWithDetails } from './gitlab-datastore.service' +import { GitlabService } from './gitlab.service' +import { VaultService } from '../vault/vault.service' +import { infraAppsRepoName, internalMirrorRepoName, pluginManagedTopic } from './gitlab.utils' + +@Injectable() +export class GitlabControllerService implements OnModuleInit { + private readonly logger = new Logger(GitlabControllerService.name) + + constructor( + @Inject(GitlabDatastoreService) private readonly gitlabDatastore: GitlabDatastoreService, + @Inject(GitlabService) private readonly gitlabService: GitlabService, + @Inject(VaultService) private readonly vaultService: VaultService, + @Inject(ConfigurationService) private readonly configService: ConfigurationService, + ) { + this.logger.log('GitlabControllerService initialized') + } + + onModuleInit() { + this.handleCron() + } + + @OnEvent('project.upsert') + async handleUpsert(project: ProjectWithDetails) { + this.logger.log(`Handling project upsert for ${project.slug}`) + return this.reconcileProject(project) + } + + @OnEvent('project.delete') + async handleDelete(project: ProjectWithDetails) { + this.logger.log(`Handling project delete for ${project.slug}`) + const group = await this.gitlabService.getProjectGroup(project.slug) + if (group) { + await this.gitlabService.deleteGroup(group.id) + } + } + + @Cron(CronExpression.EVERY_HOUR) + async handleCron() { + this.logger.log('Starting Gitlab reconciliation') + const projects = await this.gitlabDatastore.getAllProjects() + const results = await Promise.allSettled(projects.map(p => this.reconcileProject(p))) + results.forEach((result) => { + if (result.status === 'rejected') { + this.logger.error(`Reconciliation failed: ${result.reason}`) + } + }) + } + + private async reconcileProject(project: ProjectWithDetails) { + try { + await this.gitlabService.getOrCreateProjectGroup(project.slug) + await this.ensureMembers(project) + await this.ensureRepositories(project) + } catch (error) { + this.logger.error(`Failed to reconcile project ${project.slug}: ${error}`) + throw error + } + } + + private async ensureMembers(project: ProjectWithDetails) { + const group = await this.gitlabService.getOrCreateProjectGroup(project.slug) + const currentMembers = await this.gitlabService.getGroupMembers(group.id) + const projectUsers = project.members.map(m => m.user) + + // Upsert users + const gitlabUsers = await Promise.all(projectUsers.map(async (user) => { + let gitlabUser = await this.gitlabService.findUserByEmail(user.email) + const username = user.email.split('@')[0] + + if (!gitlabUser) { + // Create user if not found. Note: In real env, might depend on SSO. + // But plugin does create it. + // Using dummy password as in plugin logic (or service logic I added) + try { + gitlabUser = await this.gitlabService.createUser(user.email, username, `${user.firstName} ${user.lastName}`) + } catch (e) { + this.logger.warn(`Failed to create user ${user.email}: ${e}`) + return null + } + } + return { ...user, gitlabId: gitlabUser.id } + })) + + const validGitlabUsers = gitlabUsers.filter(u => u !== null) + + // Add missing members + for (const user of validGitlabUsers) { + if (!currentMembers.find(m => m.id === user.gitlabId)) { + // Access level 30 = Developer. Plugin uses Developer by default. + // TODO: Check permissions/roles if needed. + await this.gitlabService.addGroupMember(group.id, user.gitlabId, 30) + } + } + + // Remove extra members + for (const member of currentMembers) { + // Ignore bots + if (member.username.match(/group_\d+_bot/)) continue + // Ignore root/admin if needed? Plugin ignores root (id 1) in checkApi but ensureMembers just checks against project users. + + if (!validGitlabUsers.find(u => u.gitlabId === member.id)) { + await this.gitlabService.removeGroupMember(group.id, member.id) + } + } + } + + private async ensureRepositories(project: ProjectWithDetails) { + const gitlabRepositories = await this.gitlabService.listRepositories(project.slug) + const projectMirrorCreds = await this.getProjectMirrorCreds(project.slug) + await this.syncProjectRepositories(project, gitlabRepositories, projectMirrorCreds) + await this.ensureSpecialRepositories(project, gitlabRepositories) + } + + private async syncProjectRepositories(project: ProjectWithDetails, gitlabRepositories: ProjectSchema[], projectMirrorCreds: { MIRROR_USER: string, MIRROR_TOKEN: string }) { + for (const repo of project.repositories) { + let gitlabRepo = gitlabRepositories.find(r => r.name === repo.internalRepoName) + if (!gitlabRepo) { + gitlabRepo = await this.gitlabService.createEmptyProjectRepository( + project.slug, + repo.internalRepoName, + undefined, + !!repo.externalRepoUrl, + ) + } + + // Handle Vault secrets for mirroring + if (repo.externalRepoUrl) { + const vaultCredsPath = `${this.configService.projectRootDir}/${project.slug}/${repo.internalRepoName}-mirror` + const currentVaultSecret = await this.vaultService.read(vaultCredsPath) + + const internalRepoUrl = await this.gitlabService.getPublicRepoUrl(repo.internalRepoName) + // Service getPublicRepoUrl returns config.gitlabUrl/... + // Service needs getInternalRepoUrl returning config.gitlabInternalUrl/... + // I should add getInternalRepoUrl to GitlabService or use config directly. + // But wait, GitlabService has getPublicRepoUrl. + // Plugin uses getInternalRepoUrl for mirroring. + + // Let's assume for now we use what we have or add it. + // I'll add getInternalRepoUrl to GitlabService later or now. + // For now, let's construct it or assume public is fine for logic structure. + + const externalRepoUrn = repo.externalRepoUrl.split(/:\/\/(.*)/s)[1] + const internalRepoUrn = internalRepoUrl.split(/:\/\/(.*)/s)[1] // Hacky + + const mirrorSecretData = { + GIT_INPUT_URL: externalRepoUrn, + GIT_INPUT_USER: repo.isPrivate ? repo.externalUserName : undefined, + GIT_INPUT_PASSWORD: currentVaultSecret?.GIT_INPUT_PASSWORD, // Preserve existing password as it's not in DB + GIT_OUTPUT_URL: internalRepoUrn, + GIT_OUTPUT_USER: projectMirrorCreds.MIRROR_USER, + GIT_OUTPUT_PASSWORD: projectMirrorCreds.MIRROR_TOKEN, + } + + // Write to vault if changed + // Using simplified check + await this.vaultService.write(mirrorSecretData, vaultCredsPath) + } else { + // If no external URL, destroy secret if exists + const vaultCredsPath = `${this.configService.projectRootDir}/${project.slug}/${repo.internalRepoName}-mirror` + await this.vaultService.destroy(vaultCredsPath) + } + } + } + + private async ensureSpecialRepositories(project: ProjectWithDetails, gitlabRepositories: ProjectSchema[]) { + // Ensure special repos + if (!gitlabRepositories.find(r => r.name === infraAppsRepoName)) { + await this.gitlabService.createEmptyProjectRepository(project.slug, infraAppsRepoName, undefined, false) + } + + const mirrorRepo = gitlabRepositories.find(r => r.name === internalMirrorRepoName) + if (!mirrorRepo) { + const newMirrorRepo = await this.gitlabService.createEmptyProjectRepository(project.slug, internalMirrorRepoName, undefined, false) + await this.gitlabService.provisionMirror(newMirrorRepo.id) + } + + // Setup Trigger Token for mirror repo + const triggerToken = await this.gitlabService.getMirrorProjectTriggerToken(project.slug) + const gitlabSecret = { + PROJECT_SLUG: project.slug, + GIT_MIRROR_PROJECT_ID: triggerToken.repoId, + GIT_MIRROR_TOKEN: triggerToken.token, + } + await this.vaultService.write(gitlabSecret, 'GITLAB') + } + + private async getProjectMirrorCreds(projectSlug: string) { + const tokenName = `${projectSlug}-bot` + const currentToken = await this.gitlabService.getProjectToken(projectSlug, tokenName) + const vaultPath = `${this.configService.projectRootDir}/${projectSlug}/tech/GITLAB_MIRROR` + + if (currentToken) { + const vaultSecret = await this.vaultService.read(vaultPath) + // Verify if token works? Plugin does. + // For simplicity, return from vault if exists. + if (vaultSecret) return vaultSecret as unknown as { MIRROR_USER: string, MIRROR_TOKEN: string } + } + + const newToken = await this.gitlabService.createProjectToken(projectSlug, tokenName, ['write_repository', 'read_repository', 'read_api']) + const creds = { + MIRROR_USER: newToken.name, + MIRROR_TOKEN: newToken.token, + } + await this.vaultService.write(creds, vaultPath) + return creds + } +} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab-datastore.service.spec.ts b/apps/server-nestjs/src/modules/gitlab/gitlab-datastore.service.spec.ts new file mode 100644 index 000000000..cdf54350f --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab-datastore.service.spec.ts @@ -0,0 +1,30 @@ +import { Test, type TestingModule } from '@nestjs/testing' +import { GitlabDatastoreService } from './gitlab-datastore.service' +import { PrismaService } from '@/cpin-module/infrastructure/database/prisma.service' +import { mockDeep } from 'vitest-mock-extended' + +describe('gitlabDatastoreService', () => { + let service: GitlabDatastoreService + const prismaMock = mockDeep() + + beforeEach(async () => { + const module: TestingModule = await Test.createTestingModule({ + providers: [ + GitlabDatastoreService, + { provide: PrismaService, useValue: prismaMock }, + ], + }).compile() + + service = module.get(GitlabDatastoreService) + }) + + it('should be defined', () => { + expect(service).toBeDefined() + }) + + it('should get user', async () => { + const user = { id: 'user-id' } + prismaMock.user.findUnique.mockResolvedValue(user as any) + await expect(service.getUser('user-id')).resolves.toEqual(user) + }) +}) diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab-datastore.service.ts b/apps/server-nestjs/src/modules/gitlab/gitlab-datastore.service.ts new file mode 100644 index 000000000..0b39e0b73 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab-datastore.service.ts @@ -0,0 +1,75 @@ +import { Injectable, Logger } from '@nestjs/common' +import type { Prisma } from '@prisma/client' +import type { PrismaService } from '@/cpin-module/infrastructure/database/prisma.service' + +export const projectSelect = { + id: true, + name: true, + slug: true, + description: true, + members: { + select: { + user: { + select: { + id: true, + email: true, + firstName: true, + lastName: true, + }, + }, + }, + }, + repositories: { + select: { + id: true, + internalRepoName: true, + isInfra: true, + isPrivate: true, + externalRepoUrl: true, + externalUserName: true, + }, + }, + clusters: { + select: { + id: true, + label: true, + zone: { + select: { + slug: true, + }, + }, + }, + }, +} satisfies Prisma.ProjectSelect + +export type ProjectWithDetails = Prisma.ProjectGetPayload<{ + select: typeof projectSelect +}> + +@Injectable() +export class GitlabDatastoreService { + private readonly logger = new Logger(GitlabDatastoreService.name) + + constructor(private readonly prisma: PrismaService) {} + + async getAllProjects(): Promise { + return this.prisma.project.findMany({ + select: projectSelect, + }) + } + + async getProject(id: string): Promise { + return this.prisma.project.findUnique({ + where: { id }, + select: projectSelect, + }) + } + + async getUser(id: string) { + return this.prisma.user.findUnique({ + where: { + id, + }, + }) + } +} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab.module.ts b/apps/server-nestjs/src/modules/gitlab/gitlab.module.ts new file mode 100644 index 000000000..fa084c26c --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab.module.ts @@ -0,0 +1,8 @@ +import { Module } from '@nestjs/common' +import { GitlabService } from './gitlab.service' + +@Module({ + providers: [GitlabService], + exports: [GitlabService], +}) +export class GitlabModule {} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab.service.spec.ts b/apps/server-nestjs/src/modules/gitlab/gitlab.service.spec.ts new file mode 100644 index 000000000..d2c33025e --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab.service.spec.ts @@ -0,0 +1,312 @@ +import { Test, type TestingModule } from '@nestjs/testing' +import { GitlabService } from './gitlab.service' +import { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import { vi, describe, it, expect, beforeEach } from 'vitest' + +// Mock gitbeaker +vi.mock('@gitbeaker/rest', () => { + return { + Gitlab: vi.fn().mockImplementation(() => ({ + Groups: { + all: vi.fn(), + create: vi.fn(), + show: vi.fn(), + allSubgroups: vi.fn(), + remove: vi.fn(), + }, + GroupMembers: { + all: vi.fn(), + add: vi.fn(), + remove: vi.fn(), + }, + Projects: { + all: vi.fn(), + create: vi.fn(), + show: vi.fn(), + remove: vi.fn(), + }, + Commits: { + create: vi.fn(), + }, + RepositoryFiles: { + show: vi.fn(), + }, + Repositories: { + allRepositoryTrees: vi.fn(), + }, + GroupAccessTokens: { + all: vi.fn(), + create: vi.fn(), + revoke: vi.fn(), + }, + PipelineTriggerTokens: { + all: vi.fn(), + create: vi.fn(), + remove: vi.fn(), + }, + Users: { + all: vi.fn(), + create: vi.fn(), + }, + })), + } +}) + +describe('gitlabService', () => { + let service: GitlabService + // let configService: ConfigurationService + let gitlabMock: any + + const mockConfigService = { + gitlabUrl: 'https://gitlab.example.com', + gitlabToken: 'token', + gitlabInternalUrl: 'https://gitlab.internal.example.com', + projectRootDir: 'forge/console', + } + + beforeEach(async () => { + vi.clearAllMocks() + + const module: TestingModule = await Test.createTestingModule({ + providers: [ + GitlabService, + { + provide: ConfigurationService, + useValue: mockConfigService, + }, + ], + }).compile() + + service = module.get(GitlabService) + // configService = module.get(ConfigurationService) + + // Initialize module (calls onModuleInit) + service.onModuleInit() + gitlabMock = (service as any).api + }) + + it('should be defined', () => { + expect(service).toBeDefined() + }) + + describe('getOrCreateInfraProject', () => { + it('should create infra project if not exists', async () => { + const zoneSlug = 'zone-1' + const rootId = 123 + const infraGroupId = 456 + const projectId = 789 + + // Mock getGroupRootId logic + gitlabMock.Groups.all.mockResolvedValueOnce({ + data: [{ id: rootId, full_path: 'forge/console' }], + paginationInfo: { next: null }, + }) + + // Mock Groups.show (root) + gitlabMock.Groups.show.mockResolvedValueOnce({ id: rootId, full_path: 'forge/console' }) + + // Mock find infra group (not found first) + gitlabMock.Groups.all.mockResolvedValueOnce({ + data: [], + paginationInfo: { next: null }, + }) + + // Mock create infra group + gitlabMock.Groups.create.mockResolvedValue({ id: infraGroupId, full_path: 'forge/console/infra' }) + + // Mock find project (not found) + gitlabMock.Projects.all.mockResolvedValueOnce({ + data: [], + paginationInfo: { next: null }, + }) + + // Mock create project + gitlabMock.Projects.create.mockResolvedValue({ + id: projectId, + path_with_namespace: 'forge/console/infra/zone-1', + http_url_to_repo: 'http://gitlab/repo.git', + }) + + const result = await service.getOrCreateInfraProject(zoneSlug) + + expect(result).toEqual({ id: projectId, http_url_to_repo: 'http://gitlab/repo.git' }) + expect(gitlabMock.Groups.create).toHaveBeenCalledWith('infra', 'infra', expect.any(Object)) + expect(gitlabMock.Projects.create).toHaveBeenCalledWith(expect.objectContaining({ + name: zoneSlug, + path: zoneSlug, + namespaceId: infraGroupId, + })) + }) + }) + + describe('commitCreateOrUpdate', () => { + it('should stage creation if file not exists', async () => { + const repoId = 1 + const content = 'content' + const filePath = 'file.txt' + + gitlabMock.RepositoryFiles.show.mockRejectedValue(new Error('Not found')) + + await service.commitCreateOrUpdate(repoId, content, filePath) + + expect(gitlabMock.Commits.create).not.toHaveBeenCalled() + }) + + it('should stage update if content differs', async () => { + const repoId = 1 + const content = 'new content' + const filePath = 'file.txt' + const oldHash = 'oldhash' + + gitlabMock.RepositoryFiles.show.mockResolvedValue({ + content_sha256: oldHash, + }) + + await service.commitCreateOrUpdate(repoId, content, filePath) + + expect(gitlabMock.Commits.create).not.toHaveBeenCalled() + }) + + it('should do nothing if content matches', async () => { + const repoId = 1 + const content = 'content' + const filePath = 'file.txt' + const hash = 'ed7002b439e9ac845f22357d822bac1444730fbdb6016d3ec9432297b9ec9f73' // sha256 of 'content' + + gitlabMock.RepositoryFiles.show.mockResolvedValue({ + content_sha256: hash, + }) + + await service.commitCreateOrUpdate(repoId, content, filePath) + + expect(gitlabMock.Commits.create).not.toHaveBeenCalled() + }) + }) + + describe('commitFiles', () => { + it('should commit staged files', async () => { + const repoId = 1 + const content = 'content' + const filePath = 'file.txt' + + gitlabMock.RepositoryFiles.show.mockRejectedValue(new Error('Not found')) + await service.commitCreateOrUpdate(repoId, content, filePath) + await service.commitFiles() + + expect(gitlabMock.Commits.create).toHaveBeenCalledWith( + repoId, + 'main', + expect.stringContaining('Update 1 file'), + [{ action: 'create', filePath, content }], + ) + }) + + it('should not commit if nothing staged', async () => { + await service.commitFiles() + expect(gitlabMock.Commits.create).not.toHaveBeenCalled() + }) + }) + + describe('getOrCreateProjectGroup', () => { + it('should create project group if not exists', async () => { + const projectSlug = 'project-1' + const rootId = 123 + const groupId = 456 + + gitlabMock.Groups.all.mockResolvedValueOnce({ + data: [{ id: rootId, full_path: 'forge/console' }], + paginationInfo: { next: null }, + }) + gitlabMock.Groups.show.mockResolvedValueOnce({ id: rootId, full_path: 'forge/console' }) + gitlabMock.Groups.all.mockResolvedValueOnce({ + data: [], + paginationInfo: { next: null }, + }) + gitlabMock.Groups.create.mockResolvedValue({ id: groupId, name: projectSlug }) + + const result = await service.getOrCreateProjectGroup(projectSlug) + + expect(result).toEqual({ id: groupId, name: projectSlug }) + expect(gitlabMock.Groups.create).toHaveBeenCalledWith(projectSlug, projectSlug, expect.objectContaining({ + parentId: rootId, + })) + }) + + it('should return existing group', async () => { + const projectSlug = 'project-1' + const rootId = 123 + const groupId = 456 + + gitlabMock.Groups.all.mockResolvedValueOnce({ + data: [{ id: rootId, full_path: 'forge/console' }], + paginationInfo: { next: null }, + }) + gitlabMock.Groups.show.mockResolvedValueOnce({ id: rootId, full_path: 'forge/console' }) + gitlabMock.Groups.all.mockResolvedValueOnce({ + data: [{ id: groupId, name: projectSlug, parent_id: rootId }], + paginationInfo: { next: null }, + }) + + const result = await service.getOrCreateProjectGroup(projectSlug) + + expect(result).toEqual({ id: groupId, name: projectSlug, parent_id: rootId }) + expect(gitlabMock.Groups.create).not.toHaveBeenCalled() + }) + }) + + describe('group Members', () => { + it('should get group members', async () => { + const groupId = 1 + const members = [{ id: 1, name: 'user' }] + gitlabMock.GroupMembers.all.mockResolvedValue(members) + + const result = await service.getGroupMembers(groupId) + expect(result).toEqual(members) + expect(gitlabMock.GroupMembers.all).toHaveBeenCalledWith(groupId) + }) + + it('should add group member', async () => { + const groupId = 1 + const userId = 2 + const accessLevel = 30 + gitlabMock.GroupMembers.add.mockResolvedValue({ id: userId }) + + await service.addGroupMember(groupId, userId, accessLevel) + expect(gitlabMock.GroupMembers.add).toHaveBeenCalledWith(groupId, userId, accessLevel) + }) + + it('should remove group member', async () => { + const groupId = 1 + const userId = 2 + gitlabMock.GroupMembers.remove.mockResolvedValue(true) + + await service.removeGroupMember(groupId, userId) + expect(gitlabMock.GroupMembers.remove).toHaveBeenCalledWith(groupId, userId) + }) + }) + + describe('createEmptyProjectRepository', () => { + it('should create repository and first commit', async () => { + const projectSlug = 'project-1' + const repoName = 'repo-1' + const groupId = 456 + const projectId = 789 + + // Mock getOrCreateProjectGroup + gitlabMock.Groups.all.mockResolvedValueOnce({ data: [{ id: 123 }] }) // root + gitlabMock.Groups.show.mockResolvedValueOnce({ id: 123 }) + gitlabMock.Groups.all.mockResolvedValueOnce({ data: [{ id: groupId, name: projectSlug, parent_id: 123 }] }) + + gitlabMock.Projects.create.mockResolvedValue({ id: projectId }) + + await service.createEmptyProjectRepository(projectSlug, repoName) + + expect(gitlabMock.Projects.create).toHaveBeenCalledWith(expect.objectContaining({ + name: repoName, + path: repoName, + namespaceId: groupId, + })) + expect(gitlabMock.Commits.create).toHaveBeenCalledWith(projectId, 'main', expect.any(String), []) + }) + }) +}) diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab.service.ts b/apps/server-nestjs/src/modules/gitlab/gitlab.service.ts new file mode 100644 index 000000000..a9596a582 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab.service.ts @@ -0,0 +1,427 @@ +import type { OnModuleInit } from '@nestjs/common' +import { Inject, Injectable, Logger } from '@nestjs/common' +import { Gitlab } from '@gitbeaker/rest' +import type { AccessTokenScopes, CommitAction, Gitlab as IGitlab, PaginationRequestOptions, BaseRequestOptions, OffsetPagination, GroupSchema, ProjectSchema } from '@gitbeaker/core' +import { GitbeakerRequestError } from '@gitbeaker/requester-utils' +import { createHash } from 'node:crypto' +import { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import { generateGitLabCIConfig, generateMirrorScript, internalMirrorRepoName } from './gitlab.utils' + +@Injectable() +export class GitlabService implements OnModuleInit { + private readonly logger = new Logger(GitlabService.name) + private api: IGitlab + private groupRootId: number | undefined + + constructor( + @Inject(ConfigurationService) private readonly configService: ConfigurationService, + ) {} + + onModuleInit() { + this.api = new Gitlab({ + token: this.configService.gitlabToken, + host: this.configService.gitlabInternalUrl, + }) + } + + public async getPublicGroupUrl(): Promise { + const rootId = await this.getGroupRootId() + const group = await this.api.Groups.show(rootId) + return `${this.configService.gitlabUrl}/${group.full_path}` + } + + public async getPublicRepoUrl(internalRepoName: string): Promise { + const rootId = await this.getGroupRootId() + const group = await this.api.Groups.show(rootId) + return `${this.configService.gitlabUrl}/${group.full_path}/infra/${internalRepoName}.git` + } + + public async getInternalRepoUrl(internalRepoName: string): Promise { + const rootId = await this.getGroupRootId() + const group = await this.api.Groups.show(rootId) + return `${this.configService.gitlabInternalUrl}/${group.full_path}/infra/${internalRepoName}.git` + } + + public async getOrCreateInfraProject(zoneSlug: string): Promise<{ id: number, http_url_to_repo: string }> { + const rootId = await this.getGroupRootId() + const infraGroupName = 'infra' + + // Find or create 'infra' subgroup + const rootGroup = await this.api.Groups.show(rootId) + const infraGroupPath = `${rootGroup.full_path}/${infraGroupName}` + + let infraGroup = await this.find( + this.offsetPaginate(opts => this.api.Groups.all({ + search: infraGroupName, + ...opts, + })), + g => g.full_path === infraGroupPath, + ) + + if (!infraGroup) { + infraGroup = await this.api.Groups.create(infraGroupName, infraGroupName, { + parentId: rootId, + visibility: 'public', // Or internal? Plugin uses internal usually but let's check + }) + } + + // Find or create project for zone + const projectPath = zoneSlug + const projectFullPath = `${infraGroupPath}/${projectPath}` + + let project = await this.find( + this.offsetPaginate(opts => this.api.Projects.all({ + search: projectPath, + ...opts, + })), + p => p.path_with_namespace === projectFullPath, + ) + + if (!project) { + project = await this.api.Projects.create({ + name: projectPath, + path: projectPath, + namespaceId: infraGroup.id, + visibility: 'public', // Check visibility requirements + }) + + // Initialize with readme or empty commit? + // Plugin creates empty repo then first commit. + try { + await this.api.Commits.create(project.id, 'main', 'ci: 🌱 First commit', []) + } catch (_error) { + // Ignore if already exists or fails (e.g. default branch creation) + } + } + + return { + id: project.id, + http_url_to_repo: project.http_url_to_repo, + } + } + + public async commitCreateOrUpdate( + repoId: number, + content: string, + filePath: string, + message: string = 'ci: :robot_face: Update file content', + ): Promise { + const branch = 'main' + let action: CommitAction['action'] = 'create' + + try { + const file = await this.api.RepositoryFiles.show(repoId, filePath, branch) + const newContentDigest = createHash('sha256').update(content).digest('hex') + if (file.content_sha256 === newContentDigest) { + return // Already up to date + } + action = 'update' + } catch (_error) { + // File likely doesn't exist, proceed with create + } + + await this.api.Commits.create(repoId, branch, message, [{ + action, + filePath, + content, + }]) + } + + public async commitDelete(repoId: number, paths: string[]): Promise { + if (paths.length === 0) return + const branch = 'main' + const actions: CommitAction[] = paths.map(path => ({ + action: 'delete', + filePath: path, + })) + + await this.api.Commits.create(repoId, branch, 'ci: :robot_face: Delete files', actions) + } + + public async listFiles(repoId: number, options: { path?: string, recursive?: boolean } = {}): Promise> { + try { + const files = await this.api.Repositories.allRepositoryTrees(repoId, { + path: options.path ?? '/', + recursive: options.recursive ?? false, + ref: 'main', + }) + return files.map(f => ({ + name: f.name, + path: f.path, + type: f.type, + })) + } catch (error) { + if (error instanceof GitbeakerRequestError && error.cause?.description?.includes('Not Found')) { + return [] + } + if (error instanceof GitbeakerRequestError && error.cause?.description?.includes('404 Tree Not Found')) { + return [] + } + throw error + } + } + + // --- Project Management --- + + public async getOrCreateProjectGroup(projectSlug: string): Promise { + const parentId = await this.getGroupRootId() + const existingGroup = await this.find( + this.offsetPaginate(opts => this.api.Groups.all({ + search: projectSlug, + ...opts, + })), + g => g.parent_id === parentId && g.name === projectSlug, + ) + + if (existingGroup) return existingGroup + + return this.api.Groups.create(projectSlug, projectSlug, { + parentId, + projectCreationLevel: 'maintainer', + subgroupCreationLevel: 'owner', + defaultBranchProtection: 0, + }) + } + + public async getProjectGroup(projectSlug: string): Promise { + const parentId = await this.getGroupRootId() + return this.find( + this.offsetPaginate(opts => this.api.Groups.allSubgroups(parentId, opts)), + g => g.name === projectSlug, + ) + } + + public async deleteGroup(groupId: number): Promise { + await this.api.Groups.remove(groupId) + } + + // --- Members --- + + public async getGroupMembers(groupId: number) { + return this.api.GroupMembers.all(groupId) + } + + public async addGroupMember(groupId: number, userId: number, accessLevel: number) { + return this.api.GroupMembers.add(groupId, userId, accessLevel) + } + + public async removeGroupMember(groupId: number, userId: number) { + return this.api.GroupMembers.remove(groupId, userId) + } + + public async findUserByEmail(email: string) { + const [user] = await this.api.Users.all({ search: email }) + return user + } + + public async findUserByUsername(username: string) { + const [user] = await this.api.Users.all({ username }) + return user + } + + public async createUser(email: string, username: string, name: string) { + // Note: This requires admin token usually + return this.api.Users.create({ + email, + username, + name, + password: Math.random().toString(36).slice(-8) + Math.random().toString(36).slice(-8), // Dummy password + skipConfirmation: true, + }) + } + + // --- Repositories --- + + public async listRepositories(projectSlug: string) { + const group = await this.getOrCreateProjectGroup(projectSlug) + const generator = this.offsetPaginate(opts => this.api.Groups.allProjects(group.id, { simple: false, ...opts })) + const repositories: ProjectSchema[] = [] + for await (const repo of generator) { + repositories.push(repo) + } + return repositories + } + + public async createEmptyProjectRepository(projectSlug: string, repoName: string, description?: string, clone?: boolean) { + const group = await this.getOrCreateProjectGroup(projectSlug) + const project = await this.api.Projects.create({ + name: repoName, + path: repoName, + namespaceId: group.id, + description, + // ciConfigPath: ... + }) + + if (!clone) { + // Initialize with empty commit if not cloning + try { + await this.api.Commits.create(project.id, 'main', 'ci: 🌱 First commit', []) + } catch (_e) { + // ignore + } + } + return project + } + + public async deleteRepository(repoId: number) { + await this.api.Projects.remove(repoId) + } + + public async updateProject(repoId: number, data: Record) { + await this.api.Projects.edit(repoId, data) + } + + public async provisionMirror(repoId: number) { + const mirrorFirstActions: CommitAction[] = [ + { + action: 'create', + filePath: '.gitlab-ci.yml', + content: generateGitLabCIConfig(), + execute_filemode: false, + }, + { + action: 'create', + filePath: 'mirror.sh', + content: generateMirrorScript(), + execute_filemode: true, + }, + ] + + await this.api.Commits.create( + repoId, + 'main', + 'ci: :construction_worker: first mirror', + mirrorFirstActions, + ) + } + + // --- Tokens --- + + public async getProjectToken(projectSlug: string, tokenName: string) { + const group = await this.getProjectGroup(projectSlug) + if (!group) throw new Error('Unable to retrieve gitlab project group') + return this.find( + this.offsetPaginate(opts => this.api.GroupAccessTokens.all(group.id, opts)), + token => token.name === tokenName, + ) + } + + public async createProjectToken(projectSlug: string, tokenName: string, scopes: AccessTokenScopes[]) { + const group = await this.getProjectGroup(projectSlug) + if (!group) throw new Error('Unable to retrieve gitlab project group') + const expiryDate = new Date() + expiryDate.setFullYear(expiryDate.getFullYear() + 1) + return this.api.GroupAccessTokens.create(group.id, tokenName, scopes, expiryDate.toLocaleDateString('en-CA')) + } + + public async revokeProjectToken(projectSlug: string, tokenId: number) { + const group = await this.getProjectGroup(projectSlug) + if (!group) throw new Error('Unable to retrieve gitlab project group') + return this.api.GroupAccessTokens.revoke(group.id, tokenId) + } + + public async getMirrorProjectTriggerToken(projectSlug: string) { + const tokenDescription = 'mirroring-from-external-repo' + const repositoriesGenerator = await this.listRepositories(projectSlug) + let mirrorRepo: ProjectSchema | undefined + for await (const repo of repositoriesGenerator) { + if (repo.name === internalMirrorRepoName) { + mirrorRepo = repo + break + } + } + + if (!mirrorRepo) throw new Error('Don\'t know how mirror repo could not exist') + + const currentTriggerToken = await this.find( + this.offsetPaginate(opts => this.api.PipelineTriggerTokens.all(mirrorRepo.id, opts)), + token => token.description === tokenDescription, + ) + + // Note: The logic to compare with Vault and recreate if missing is in Controller. + // Here we just get or create. + // Actually, plugin recreates if missing in Vault. + // So maybe we just return current if exists. + + if (currentTriggerToken) { + return { token: currentTriggerToken.token, repoId: mirrorRepo.id, id: currentTriggerToken.id } + } + + const triggerToken = await this.api.PipelineTriggerTokens.create(mirrorRepo.id, tokenDescription) + return { token: triggerToken.token, repoId: mirrorRepo.id, id: triggerToken.id } + } + + public async deleteTriggerToken(repoId: number, tokenId: number) { + await this.api.PipelineTriggerTokens.remove(repoId, tokenId) + } + + // Private helpers + + private async getGroupRootId(): Promise { + if (this.groupRootId) return this.groupRootId + + const projectRootDir = this.configService.projectRootDir + if (!projectRootDir) throw new Error('PROJECTS_ROOT_DIR not configured') + + const group = await this.find( + this.offsetPaginate(opts => this.api.Groups.all({ + search: projectRootDir, + ...opts, + })), + g => g.full_path === projectRootDir, + ) + + if (!group) { + // Create it if not exists? Plugin logic has createGroupRoot. + // For now throw error or implement createGroupRoot logic. + // Let's implement createGroupRoot logic here or assume it exists. + // Given this is migration, better to implement creation. + return this.createGroupRoot(projectRootDir) + } + + this.groupRootId = group.id + return group.id + } + + private async createGroupRoot(projectRootDir: string): Promise { + const parts = projectRootDir.split('/') + const currentPath = parts.shift() + if (!currentPath) throw new Error('Invalid projects root dir') + + // Find or create root + let parentGroup = await this.find( + this.offsetPaginate(opts => this.api.Groups.all({ search: currentPath, ...opts })), + g => g.full_path === currentPath, + ) ?? await this.api.Groups.create(currentPath, currentPath) + + for (const part of parts) { + const fullPath = `${parentGroup.full_path}/${part}` + parentGroup = await this.find( + this.offsetPaginate(opts => this.api.Groups.all({ search: fullPath, ...opts })), + g => g.full_path === fullPath, + ) ?? await this.api.Groups.create(part, part, { parentId: parentGroup.id }) + } + + return parentGroup.id + } + + private async find(generator: AsyncGenerator, predicate: (item: T) => boolean): Promise { + for await (const item of generator) { + if (predicate(item)) return item + } + return undefined + } + + private async *offsetPaginate( + request: (options: PaginationRequestOptions<'offset'> & BaseRequestOptions) => Promise<{ data: T[], paginationInfo: OffsetPagination }>, + ): AsyncGenerator { + let page: number | null = 1 + while (page !== null) { + const { data, paginationInfo } = await request({ page, showExpanded: true, pagination: 'offset' }) + for (const item of data) { + yield item + } + page = paginationInfo.next ? paginationInfo.next : null + } + } +} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab.utils.ts b/apps/server-nestjs/src/modules/gitlab/gitlab.utils.ts new file mode 100644 index 000000000..2aae45288 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab.utils.ts @@ -0,0 +1,116 @@ +export const infraAppsRepoName = 'infra-apps' +export const internalMirrorRepoName = 'mirror' +export const pluginManagedTopic = 'plugin-managed' + +export function generateGitLabCIConfig() { + return `variables: + PROJECT_NAME: + description: Nom du dépôt (dans ce Gitlab) à synchroniser. + GIT_BRANCH_DEPLOY: + description: Nom de la branche à synchroniser. + value: main + SYNC_ALL: + description: Synchroniser toutes les branches. + value: "false" + +include: + - project: $CATALOG_PATH + file: mirror.yml + ref: main + +repo_pull_sync: + extends: .repo_pull_sync + only: + - api + - triggers + - web + - schedules +` +} + +export function generateMirrorScript() { + return `#!/bin/bash + +set -e + +# Colorize terminal +red='\\e[0;31m' +no_color='\\033[0m' + +# Console step increment +i=1 + +# Default values +BRANCH_TO_SYNC=main + +print_help() { + TEXT_HELPER="\\nThis script aims to send a synchronization request to DSO.\\nFollowing flags are available: + -a Api url to send the synchronization request + -b Branch which is wanted to be synchronize for the given repository (default '$BRANCH_TO_SYNC') + -g GitLab token to trigger the pipeline on the gitlab mirror project + -i Gitlab mirror project id + -r Gitlab repository name to mirror + -h Print script help\\n" + printf "$TEXT_HELPER" +} + +print_args() { + printf "\\nArguments received: + -a API_URL: $API_URL + -b BRANCH_TO_SYNC: $BRANCH_TO_SYNC + -g GITLAB_TRIGGER_TOKEN length: \${#GITLAB_TRIGGER_TOKEN} + -i GITLAB_MIRROR_PROJECT_ID: $GITLAB_MIRROR_PROJECT_ID + -r REPOSITORY_NAME: $REPOSITORY_NAME\\n" +} + +# Parse options +while getopts :ha:b:g:i:r: flag +do + case "\${flag}" in + a) + API_URL=\${OPTARG};; + b) + BRANCH_TO_SYNC=\${OPTARG};; + g) + GITLAB_TRIGGER_TOKEN=\${OPTARG};; + i) + GITLAB_MIRROR_PROJECT_ID=\${OPTARG};; + r) + REPOSITORY_NAME=\${OPTARG};; + h) + printf "\\nHelp requested.\\n" + print_help + printf "\\nExiting.\\n" + exit 0;; + *) + printf "\\nInvalid argument \${OPTARG} (\${flag}).\\n" + print_help + print_args + exit 1;; + esac +done + +# Test if arguments are missing +if [ -z \${API_URL} ] || [ -z \${BRANCH_TO_SYNC} ] || [ -z \${GITLAB_TRIGGER_TOKEN} ] || [ -z \${GITLAB_MIRROR_PROJECT_ID} ] || [ -z \${REPOSITORY_NAME} ]; then + printf "\\nArgument(s) missing!\\n" + print_help + print_args + exit 2 +fi + +# Print arguments +print_args + +# Send synchronization request +printf "\\n\${red}\${i}.\${no_color} Send request to DSO api.\\n\\n" + +curl \\ + -X POST \\ + --fail \\ + -F token=\${GITLAB_TRIGGER_TOKEN} \\ + -F ref=main \\ + -F variables[GIT_BRANCH_DEPLOY]=\${BRANCH_TO_SYNC} \\ + -F variables[PROJECT_NAME]=\${REPOSITORY_NAME} \\ + "\${API_URL}/api/v4/projects/\${GITLAB_MIRROR_PROJECT_ID}/trigger/pipeline" +` +} diff --git a/apps/server-nestjs/src/modules/iam/decorators/check-policies.decorator.ts b/apps/server-nestjs/src/modules/iam/decorators/check-policies.decorator.ts new file mode 100644 index 000000000..e08973bc8 --- /dev/null +++ b/apps/server-nestjs/src/modules/iam/decorators/check-policies.decorator.ts @@ -0,0 +1,15 @@ +import { SetMetadata } from '@nestjs/common' +import type { AppAbility } from '../factories/casl-ability.factory' + +export interface IPolicyHandler { + handle: (ability: AppAbility) => boolean +} + +type PolicyHandlerCallback = (ability: AppAbility) => boolean + +export type PolicyHandler = IPolicyHandler | PolicyHandlerCallback + +export const CHECK_POLICIES_KEY = 'check_policy' +export function CheckPolicies(...handlers: PolicyHandler[]) { + return SetMetadata(CHECK_POLICIES_KEY, handlers) +} diff --git a/apps/server-nestjs/src/modules/iam/factories/casl-ability.factory.ts b/apps/server-nestjs/src/modules/iam/factories/casl-ability.factory.ts new file mode 100644 index 000000000..4edff5f58 --- /dev/null +++ b/apps/server-nestjs/src/modules/iam/factories/casl-ability.factory.ts @@ -0,0 +1,58 @@ +import type { PureAbility } from '@casl/ability' +import { AbilityBuilder } from '@casl/ability' +import type { PrismaQuery, Subjects } from '@casl/prisma' +import { createPrismaAbility } from '@casl/prisma' +import { Injectable } from '@nestjs/common' +import type { Project, Environment, User, ProjectMembers } from '@prisma/client' + +export type AppAbility = PureAbility< + [string, Subjects<{ Project: Project, Environment: Environment, User: User, ProjectMembers: ProjectMembers }>], + PrismaQuery +> + +@Injectable() +export class CaslAbilityFactory { + createForUser(user: any) { + const { can, build } = new AbilityBuilder( + createPrismaAbility, + ) + + // If user is not authenticated or doesn't have an ID + if (!user || !user.sub) { + return build() + } + + const userId = user.sub + + // A user can read projects they are a member of (via ProjectMembers) + can('read', 'Project', { + members: { + some: { + userId, + }, + }, + }) + + // A project owner can manage everything + can('manage', 'Project', { + ownerId: userId, + }) + + // A user can update an environment if the project is not locked + // and they are a member of the project + can('update', 'Environment', { + project: { + is: { + locked: false, + members: { + some: { + userId, + }, + }, + }, + }, + }) + + return build() + } +} diff --git a/apps/server-nestjs/src/modules/iam/guards/policies.guard.ts b/apps/server-nestjs/src/modules/iam/guards/policies.guard.ts new file mode 100644 index 000000000..7f4117636 --- /dev/null +++ b/apps/server-nestjs/src/modules/iam/guards/policies.guard.ts @@ -0,0 +1,36 @@ +import type { CanActivate, ExecutionContext } from '@nestjs/common' +import { Injectable } from '@nestjs/common' +import type { Reflector } from '@nestjs/core' +import type { CaslAbilityFactory, AppAbility } from '../factories/casl-ability.factory' +import type { PolicyHandler } from '../decorators/check-policies.decorator' +import { CHECK_POLICIES_KEY } from '../decorators/check-policies.decorator' + +@Injectable() +export class PoliciesGuard implements CanActivate { + constructor( + private reflector: Reflector, + private caslAbilityFactory: CaslAbilityFactory, + ) {} + + async canActivate(context: ExecutionContext): Promise { + const policyHandlers + = this.reflector.get( + CHECK_POLICIES_KEY, + context.getHandler(), + ) || [] + + const { user } = context.switchToHttp().getRequest() + const ability = this.caslAbilityFactory.createForUser(user) + + return policyHandlers.every(handler => + this.execPolicyHandler(handler, ability), + ) + } + + private execPolicyHandler(handler: PolicyHandler, ability: AppAbility) { + if (typeof handler === 'function') { + return handler(ability) + } + return handler.handle(ability) + } +} diff --git a/apps/server-nestjs/src/modules/iam/iam.module.ts b/apps/server-nestjs/src/modules/iam/iam.module.ts new file mode 100644 index 000000000..f1e5b0899 --- /dev/null +++ b/apps/server-nestjs/src/modules/iam/iam.module.ts @@ -0,0 +1,48 @@ +import { Module } from '@nestjs/common' +import { APP_GUARD } from '@nestjs/core' +import { + AuthGuard, + ResourceGuard, + KeycloakConnectModule, + PolicyEnforcementMode, + TokenValidation, +} from 'nest-keycloak-connect' +import { ConfigurationModule } from '../../cpin-module/infrastructure/configuration/configuration.module' +import { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' +import { PoliciesGuard } from './guards/policies.guard' +import { CaslAbilityFactory } from './factories/casl-ability.factory' + +@Module({ + imports: [ + ConfigurationModule, + KeycloakConnectModule.registerAsync({ + imports: [ConfigurationModule], + useFactory: (configService: ConfigurationService) => ({ + authServerUrl: `${configService.keycloakProtocol}://${configService.keycloakDomain}`, + realm: configService.keycloakRealm!, + clientId: configService.keycloakClientId!, + secret: configService.keycloakClientSecret!, + policyEnforcement: PolicyEnforcementMode.PERMISSIVE, + tokenValidation: TokenValidation.ONLINE, + }), + inject: [ConfigurationService], + }), + ], + providers: [ + CaslAbilityFactory, + { + provide: APP_GUARD, + useClass: AuthGuard, + }, + { + provide: APP_GUARD, + useClass: ResourceGuard, + }, + { + provide: APP_GUARD, + useClass: PoliciesGuard, + }, + ], + exports: [CaslAbilityFactory], +}) +export class IamModule {} diff --git a/apps/server-nestjs/src/modules/keycloak/keycloak-controller.service.spec.ts b/apps/server-nestjs/src/modules/keycloak/keycloak-controller.service.spec.ts new file mode 100644 index 000000000..28126fd03 --- /dev/null +++ b/apps/server-nestjs/src/modules/keycloak/keycloak-controller.service.spec.ts @@ -0,0 +1,291 @@ +import { describe, it, expect, beforeEach, vi, type Mocked } from 'vitest' +import { KeycloakControllerService } from './keycloak-controller.service' +import type { KeycloakDatastoreService, ProjectWithDetails } from './keycloak-datastore.service' +import type { KeycloakService } from './keycloak.service' +import type { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' + +const mockKeycloakService = { + getAllGroups: vi.fn(), + deleteGroup: vi.fn().mockResolvedValue(undefined), + getOrCreateGroupByPath: vi.fn().mockResolvedValue({}), + getGroupMembers: vi.fn().mockResolvedValue([]), + addUserToGroup: vi.fn().mockResolvedValue(undefined), + removeUserFromGroup: vi.fn().mockResolvedValue(undefined), + getOrCreateSubGroupByName: vi.fn().mockResolvedValue({}), + getSubGroups: vi.fn(), + getOrCreateConsoleGroup: vi.fn().mockResolvedValue({}), + getOrCreateEnvironmentGroups: vi.fn().mockResolvedValue({}), +} as unknown as Mocked + +const mockKeycloakDatastore = { + getAllProjects: vi.fn(), +} as unknown as Mocked + +const mockConfigService = { + keycloakControllerPurgeOrphans: false, +} as unknown as Mocked + +describe('keycloakControllerService', () => { + let service: KeycloakControllerService + let keycloakService: Mocked + let keycloakDatastore: Mocked + let configService: Mocked + + beforeEach(async () => { + service = new KeycloakControllerService( + mockKeycloakService, + mockKeycloakDatastore, + mockConfigService, + ) + keycloakService = mockKeycloakService + keycloakDatastore = mockKeycloakDatastore + configService = mockConfigService + vi.clearAllMocks() + }) + + it('should be defined', () => { + expect(service).toBeDefined() + }) + + describe('reconcile', () => { + const mockProject: ProjectWithDetails = { + id: 'project-id', + slug: 'test-project', + ownerId: 'owner-id', + everyonePerms: 0n, + members: [], + roles: [], + environments: [], + } + + it('should purge orphans if enabled', async () => { + // Setup + configService.keycloakControllerPurgeOrphans = true + mockKeycloakDatastore.getAllProjects.mockResolvedValue([mockProject]) + + const projectGroup = { id: 'group-id', name: 'test-project', subGroups: [] } + const orphanGroup = { id: 'orphan-id', name: 'orphan-project', subGroups: [{ name: 'console' }] } + + mockKeycloakService.getAllGroups.mockImplementation(async function* () { + yield projectGroup + yield orphanGroup + }) + mockKeycloakService.getOrCreateGroupByPath.mockResolvedValue(projectGroup) + mockKeycloakService.getGroupMembers.mockResolvedValue([]) + mockKeycloakService.getOrCreateSubGroupByName.mockResolvedValue({ id: 'console-id', name: 'console' }) + mockKeycloakService.getSubGroups.mockImplementation(async function* () { /* empty */ }) + + await service.handleCron() + + expect(keycloakDatastore.getAllProjects).toHaveBeenCalled() + expect(keycloakService.getAllGroups).toHaveBeenCalled() + expect(keycloakService.getOrCreateGroupByPath).toHaveBeenCalledWith('/test-project') + expect(keycloakService.deleteGroup).toHaveBeenCalledWith('orphan-id') + }) + + it('should not purge orphans if disabled', async () => { + // Setup + configService.keycloakControllerPurgeOrphans = false + mockKeycloakDatastore.getAllProjects.mockResolvedValue([mockProject]) + + const projectGroup = { id: 'group-id', name: 'test-project', subGroups: [] } + const orphanGroup = { id: 'orphan-id', name: 'orphan-project', subGroups: [{ name: 'console' }] } + + mockKeycloakService.getAllGroups.mockImplementation(async function* () { + yield projectGroup + yield orphanGroup + }) + mockKeycloakService.getOrCreateGroupByPath.mockResolvedValue(projectGroup) + mockKeycloakService.getGroupMembers.mockResolvedValue([]) + mockKeycloakService.getOrCreateSubGroupByName.mockResolvedValue({ id: 'console-id', name: 'console' }) + mockKeycloakService.getSubGroups.mockImplementation(async function* () { /* empty */ }) + + await service.handleCron() + + expect(keycloakService.deleteGroup).not.toHaveBeenCalled() + }) + + it('should sync project members', async () => { + // Setup + configService.keycloakControllerPurgeOrphans = true + const projectWithMembers = { + ...mockProject, + members: [{ user: { id: 'user-1', email: 'user1@example.com' }, roleIds: [] }], + } + mockKeycloakDatastore.getAllProjects.mockResolvedValue([projectWithMembers]) + + const projectGroup = { id: 'group-id', name: 'test-project' } + mockKeycloakService.getOrCreateGroupByPath.mockResolvedValue(projectGroup) + + // Current members: user-2 (extra), missing user-1 + mockKeycloakService.getGroupMembers.mockResolvedValue([ + { id: 'user-2', email: 'user2@example.com' }, + ]) + + mockKeycloakService.getOrCreateSubGroupByName.mockResolvedValue({ id: 'console-id', name: 'console' }) + mockKeycloakService.getSubGroups.mockImplementation(async function* () { /* empty */ }) + + await service.handleCron() + + // Should add missing member + expect(keycloakService.addUserToGroup).toHaveBeenCalledWith('user-1', 'group-id') + // Should add owner (missing in group members) + expect(keycloakService.addUserToGroup).toHaveBeenCalledWith('owner-id', 'group-id') + // Should remove extra member (purge enabled) + expect(keycloakService.removeUserFromGroup).toHaveBeenCalledWith('user-2', 'group-id') + }) + + it('should sync OIDC role groups', async () => { + // Setup + configService.keycloakControllerPurgeOrphans = true + const roleWithOidc = { + id: 'role-oidc', + permissions: 0n, + oidcGroup: '/oidc-group', + } + const projectWithRole = { + ...mockProject, + members: [{ user: { id: 'user-1', email: 'user1@example.com' }, roleIds: ['role-oidc'] }], + roles: [roleWithOidc], + } + mockKeycloakDatastore.getAllProjects.mockResolvedValue([projectWithRole]) + + const projectGroup = { id: 'group-id', name: 'test-project' } + const roleGroup = { id: 'role-group-id', name: 'oidc-group', path: '/oidc-group' } + + mockKeycloakService.getOrCreateGroupByPath.mockImplementation((path) => { + if (path === '/test-project') return Promise.resolve(projectGroup) + if (path === '/oidc-group') return Promise.resolve(roleGroup) + return Promise.resolve({}) + }) + + // Project members: owner + mockKeycloakService.getGroupMembers.mockImplementation((groupId) => { + if (groupId === 'group-id') return Promise.resolve([{ id: 'owner-id' }]) + // Role group members: user-2 (extra), missing user-1 + if (groupId === 'role-group-id') return Promise.resolve([{ id: 'user-2', email: 'user2@example.com', groups: ['/oidc-group'] }]) + return Promise.resolve([]) + }) + + mockKeycloakService.getOrCreateSubGroupByName.mockResolvedValue({ id: 'console-id', name: 'console' }) + mockKeycloakService.getSubGroups.mockImplementation(async function* () { /* empty */ }) + + await service.handleCron() + + // Should create/get role group + expect(keycloakService.getOrCreateGroupByPath).toHaveBeenCalledWith('/oidc-group') + // Should add user-1 to role group + expect(keycloakService.addUserToGroup).toHaveBeenCalledWith('user-1', 'role-group-id') + // Should remove user-2 from role group (purge enabled) + expect(keycloakService.removeUserFromGroup).toHaveBeenCalledWith('user-2', 'role-group-id') + }) + + it('should sync environment groups', async () => { + // Setup + configService.keycloakControllerPurgeOrphans = true + const projectWithEnv = { + ...mockProject, + environments: [{ id: 'env-1', name: 'dev' }], + } + mockKeycloakDatastore.getAllProjects.mockResolvedValue([projectWithEnv]) + + const projectGroup = { id: 'group-id', name: 'test-project', subGroups: [{ name: 'console', id: 'console-id' }] } + mockKeycloakService.getOrCreateGroupByPath.mockResolvedValue(projectGroup) + mockKeycloakService.getGroupMembers.mockResolvedValue([]) + + // Mock console group retrieval + mockKeycloakService.getOrCreateConsoleGroup.mockResolvedValue({ id: 'console-id', name: 'console' }) + mockKeycloakService.getOrCreateEnvironmentGroups.mockResolvedValue({ + roGroup: { id: 'dev-ro-id', name: 'RO' }, + rwGroup: { id: 'dev-rw-id', name: 'RW' }, + }) + mockKeycloakService.getOrCreateSubGroupByName.mockImplementation((_parentId, name) => { + if (name === 'console') return Promise.resolve({ id: 'console-id', name: 'console' }) + if (name === 'dev') return Promise.resolve({ id: 'dev-id', name: 'dev' }) + if (name === 'RO') return Promise.resolve({ id: 'dev-ro-id', name: 'RO' }) + if (name === 'RW') return Promise.resolve({ id: 'dev-rw-id', name: 'RW' }) + return Promise.resolve({ id: 'new-id', name }) + }) + + // Mock existing environments: 'staging' (extra) + mockKeycloakService.getSubGroups.mockImplementation(async function* (parentId) { + if (parentId === 'console-id') { + yield { id: 'staging-id', name: 'staging' } + } + }) + + await service.handleCron() + + // Should create dev group + expect(keycloakService.getOrCreateConsoleGroup).toHaveBeenCalledWith(projectGroup) + // Should create RO/RW groups + expect(keycloakService.getOrCreateEnvironmentGroups).toHaveBeenCalledWith({ id: 'console-id', name: 'console' }, projectWithEnv.environments[0]) + // Should delete staging group (purge enabled) + expect(keycloakService.deleteGroup).toHaveBeenCalledWith('staging-id') + }) + + it('should sync environment permissions', async () => { + // Setup + configService.keycloakControllerPurgeOrphans = true + + const userRo = { id: 'user-ro', email: 'ro@example.com' } + const userRw = { id: 'user-rw', email: 'rw@example.com' } + const userNone = { id: 'user-none', email: 'none@example.com' } + + const projectWithEnvAndMembers = { + id: mockProject.id, + slug: mockProject.slug, + ownerId: mockProject.ownerId, + everyonePerms: mockProject.everyonePerms, + members: [ + { userId: userRo.id, user: userRo, roleIds: ['role-ro'] }, + { userId: userRw.id, user: userRw, roleIds: ['role-rw'] }, + { userId: userNone.id, user: userNone, roleIds: [] }, + ], + roles: [ + { id: 'role-ro', permissions: BigInt(256), oidcGroup: '' }, // ListEnvironments (bit 8) + { id: 'role-rw', permissions: BigInt(8), oidcGroup: '' }, // ManageEnvironments (bit 3) + ], + environments: [{ id: 'env-1', name: 'dev' }], + } + mockKeycloakDatastore.getAllProjects.mockResolvedValue([projectWithEnvAndMembers]) + + const projectGroup = { id: 'group-id', name: 'test-project', subGroups: [{ name: 'console', id: 'console-id' }] } + mockKeycloakService.getOrCreateGroupByPath.mockResolvedValue(projectGroup) + mockKeycloakService.getOrCreateConsoleGroup.mockResolvedValue({ id: 'console-id', name: 'console' }) + mockKeycloakService.getOrCreateEnvironmentGroups.mockResolvedValue({ + roGroup: { id: 'dev-ro-id', name: 'RO' }, + rwGroup: { id: 'dev-rw-id', name: 'RW' }, + }) + + // Project group members (assume all are in project group for simplicity) + mockKeycloakService.getGroupMembers.mockImplementation((groupId) => { + if (groupId === 'group-id') return Promise.resolve([userRo, userRw, userNone]) + // RO group has userNone (extra), missing userRo + if (groupId === 'dev-ro-id') return Promise.resolve([userNone]) + // RW group has userNone (extra), missing userRw + if (groupId === 'dev-rw-id') return Promise.resolve([userNone]) + return Promise.resolve([]) + }) + + mockKeycloakService.getOrCreateSubGroupByName.mockImplementation((_parentId, name) => { + if (name === 'console') return Promise.resolve({ id: 'console-id', name: 'console' }) + if (name === 'dev') return Promise.resolve({ id: 'dev-id', name: 'dev' }) + if (name === 'RO') return Promise.resolve({ id: 'dev-ro-id', name: 'RO' }) + if (name === 'RW') return Promise.resolve({ id: 'dev-rw-id', name: 'RW' }) + return Promise.resolve({ id: 'new-id', name }) + }) + + mockKeycloakService.getSubGroups.mockImplementation(async function* () { /* empty */ }) + + await service.handleCron() + + // Sync RO + expect(keycloakService.addUserToGroup).toHaveBeenCalledWith('user-ro', 'dev-ro-id') + expect(keycloakService.removeUserFromGroup).toHaveBeenCalledWith('user-none', 'dev-ro-id') + // Sync RW + expect(keycloakService.addUserToGroup).toHaveBeenCalledWith('user-rw', 'dev-rw-id') + expect(keycloakService.removeUserFromGroup).toHaveBeenCalledWith('user-none', 'dev-rw-id') + }) + }) +}) diff --git a/apps/server-nestjs/src/modules/keycloak/keycloak-controller.service.ts b/apps/server-nestjs/src/modules/keycloak/keycloak-controller.service.ts new file mode 100644 index 000000000..748657dc1 --- /dev/null +++ b/apps/server-nestjs/src/modules/keycloak/keycloak-controller.service.ts @@ -0,0 +1,438 @@ +import type { OnModuleInit } from '@nestjs/common' +import { Injectable, Logger } from '@nestjs/common' +import { OnEvent } from '@nestjs/event-emitter' +import { Cron, CronExpression } from '@nestjs/schedule' +import { ProjectAuthorized, getPermsByUserRoles, resourceListToDict } from '@cpn-console/shared' +import type { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import type { KeycloakService } from './keycloak.service' +import type GroupRepresentation from '@keycloak/keycloak-admin-client/lib/defs/groupRepresentation' +import type UserRepresentation from '@keycloak/keycloak-admin-client/lib/defs/userRepresentation.js' +import type { KeycloakDatastoreService, ProjectWithDetails } from './keycloak-datastore.service' +import { CONSOLE_GROUP_NAME } from './keycloak.constant' + +@Injectable() +export class KeycloakControllerService implements OnModuleInit { + private readonly logger = new Logger(KeycloakControllerService.name) + + constructor( + private readonly keycloakService: KeycloakService, + private readonly keycloakDatastore: KeycloakDatastoreService, + private readonly configService: ConfigurationService, + ) { + this.logger.log('KeycloakControllerService initialized') + } + + onModuleInit() { + this.handleCron() + } + + @OnEvent('project.upsert') + async handleUpsert(project: ProjectWithDetails) { + this.logger.log(`Handling project upsert for ${project.slug}`) + return this.reconcile() + } + + @OnEvent('project.delete') + async handleDelete(project: ProjectWithDetails) { + this.logger.log(`Handling project delete for ${project.slug}`) + return this.reconcile() + } + + @Cron(CronExpression.EVERY_HOUR) + async handleCron() { + this.logger.log('Starting periodic Keycloak reconciliation') + await this.reconcile() + this.logger.log('Periodic Keycloak reconciliation completed') + } + + private async reconcile(): Promise[]> { + const results: PromiseSettledResult[] = [] + try { + const projects = await this.keycloakDatastore.getAllProjects() + + const projectGroupResults = await this.ensureProjectGroups(projects) + results.push(...projectGroupResults) + projectGroupResults.forEach((result) => { + if (result.status === 'rejected') { + this.logger.error(`Failed to ensure project group ${result.reason}`) + } + }) + + const orphanResults = await this.purgeOrphanGroups(projects) + results.push(...orphanResults) + orphanResults.forEach((result) => { + if (result.status === 'rejected') { + this.logger.error(`Failed to purge orphan group ${result.reason}`) + } + }) + } catch (error) { + this.logger.error('Failed to reconcile Keycloak state', error) + results.push({ status: 'rejected', reason: error }) + } + + return results + } + + private async ensureProjectGroups(projects: ProjectWithDetails[]) { + const results = await Promise.all(projects.map(async (project) => { + try { + const projectGroup = await this.keycloakService.getOrCreateGroupByPath(`/${project.slug}`) + const memberResults = await this.ensureProjectGroup(project, projectGroup) + const subResults = await Promise.all([ + this.ensureProjectRoleGroups(project, projectGroup), + this.ensureEnvironmentGroups(project, projectGroup), + ]) + return [...memberResults, ...subResults.flat()] + } catch (error) { + return [{ status: 'rejected', reason: error }] as PromiseSettledResult[] + } + })) + return results.flat() + } + + private async purgeOrphanGroups(projects: ProjectWithDetails[]) { + const groups = this.keycloakService.getAllGroups() + const projectSlugs = new Set(projects.map(p => p.slug)) + const promises: Promise[] = [] + + for await (const group of groups) { + if (group.name && !projectSlugs.has(group.name)) { + if (this.isOwnedProjectGroup(group)) { + if (this.configService.keycloakControllerPurgeOrphans) { + if (group.id) { + this.logger.log(`Deleting orphan Keycloak group: ${group.name}`) + promises.push( + this.keycloakService.deleteGroup(group.id) + .catch(error => this.logger.error(`Failed to delete orphan group ${group.name}`, error)), + ) + } else { + this.logger.warn(`Orphan Keycloak group detected but ID is missing: ${group.name}`) + } + } else { + this.logger.warn(`Orphan Keycloak group detected but purge is disabled: ${group.name}`) + } + } + } + } + return Promise.allSettled(promises) + } + + private isOwnedProjectGroup(group: GroupRepresentation) { + // Safety check: Only delete if it looks like a project group (has 'console' subgroup) + // or if we can be sure it's not a system group. + // For now, we rely on the 'console' subgroup heuristic as it's created by us. + return !!group.subGroups?.some(sg => sg.name === CONSOLE_GROUP_NAME) + } + + private async ensureProjectGroup(project: ProjectWithDetails, projectGroup: GroupRepresentation) { + if (!projectGroup.id) { + throw new Error(`Failed to create or retrieve project group for ${project.slug}`) + } + const groupMembers = await this.keycloakService.getGroupMembers(projectGroup.id) + + const results = await Promise.all([ + this.addMissingProjectMembers(project, projectGroup, groupMembers), + this.deleteExtraProjectMembers(project, projectGroup, groupMembers), + ]) + return results.flat() + } + + private async addMissingProjectMembers( + project: ProjectWithDetails, + projectGroup: GroupRepresentation, + members: UserRepresentation[], + ) { + const promises = project.members.map(async (member) => { + if (!members.some(m => m.id === member.user.id)) { + if (member.user.id && projectGroup.id) { + await this.keycloakService.addUserToGroup(member.user.id, projectGroup.id) + } + this.logger.log(`Added ${member.user.email} to keycloak project group ${projectGroup.name}`) + } + }) + return Promise.allSettled([ + ...promises, + this.addMissingOwner(project, projectGroup, members), + ]) + } + + private async addMissingOwner( + project: ProjectWithDetails, + projectGroup: GroupRepresentation, + members: UserRepresentation[], + ) { + if (!projectGroup.id) { + throw new Error(`Failed to create or retrieve project group for ${project.slug}`) + } + if (!members.some(m => m.id === project.ownerId)) { + await this.keycloakService.addUserToGroup(project.ownerId, projectGroup.id) + this.logger.log(`Added owner ${project.ownerId} to keycloak project group ${projectGroup.name}`) + } + } + + private async deleteExtraProjectMembers( + project: ProjectWithDetails, + projectGroup: GroupRepresentation, + members: UserRepresentation[], + ) { + if (!projectGroup.id) { + throw new Error(`Failed to create or retrieve project group for ${project.slug}`) + } + const promises = members.map(async (member) => { + const isMember = project.members.some(m => m.user.id === member.id) || project.ownerId === member.id + if (!isMember) { + if (this.configService.keycloakControllerPurgeOrphans) { + await this.keycloakService.removeUserFromGroup(member.id!, projectGroup.id!) + this.logger.log(`Removed ${member.email} from keycloak project group ${projectGroup.name}`) + } else { + this.logger.warn(`User ${member.email} is in Keycloak group but not in project ${project.slug} (purge disabled)`) + } + } + }) + return Promise.allSettled(promises) + } + + private async ensureProjectRoleGroups(project: ProjectWithDetails, projectGroup: GroupRepresentation): Promise[]> { + if (!projectGroup.id) { + return [{ status: 'rejected', reason: new Error(`Failed to create or retrieve project group for ${project.slug}`) }] + } + const results = await Promise.all(project.roles.map(async (role) => { + if (role.oidcGroup) { + try { + const roleGroup = await this.keycloakService.getOrCreateGroupByPath(role.oidcGroup) + if (!roleGroup.id) { + throw new Error(`Failed to create or retrieve role group for ${role.oidcGroup}`) + } + const groupMembers = await this.keycloakService.getGroupMembers(roleGroup.id) + const results = await Promise.all([ + this.addMissingRoleMembers(roleGroup, project, role, groupMembers), + this.deleteExtraRoleMembers(roleGroup, project, role, groupMembers), + ]) + return results.flat() + } catch (error) { + return [{ status: 'rejected', reason: error }] as PromiseSettledResult[] + } + } + return [] + })) + return results.flat() + } + + private async addMissingRoleMembers( + roleGroup: GroupRepresentation, + project: ProjectWithDetails, + role: ProjectWithDetails['roles'][number], + members: UserRepresentation[], + ) { + if (!roleGroup.id) { + throw new Error(`Failed to create or retrieve role group for ${role.oidcGroup}`) + } + return Promise.allSettled(project.members.map(async (member) => { + if (!members.some(m => m.id === member.user.id) && member.roleIds.includes(role.id)) { + await this.keycloakService.addUserToGroup(member.user.id, roleGroup.id!) + this.logger.log(`Added ${member.user.email} to keycloak role group ${roleGroup.name}`) + } + })) + } + + private async deleteExtraRoleMembers( + roleGroup: GroupRepresentation, + project: ProjectWithDetails, + role: ProjectWithDetails['roles'][number], + members: UserRepresentation[], + ) { + if (!roleGroup.id) { + throw new Error(`Failed to create or retrieve role group for ${role.oidcGroup}`) + } + return Promise.allSettled(members.map(async (member) => { + const isMember = project.members.some(m => m.user.id === member.id) || project.ownerId === member.id + if (!isMember && member.groups?.some(g => g === roleGroup.path)) { + if (this.configService.keycloakControllerPurgeOrphans) { + await this.keycloakService.removeUserFromGroup(member.id!, roleGroup.id!) + this.logger.log(`Removed ${member.email} from keycloak role group ${roleGroup.name}`) + } else { + this.logger.warn(`User ${member.email} is in Keycloak group but not in project ${project.slug} (purge disabled)`) + } + } + })) + } + + private async ensureEnvironmentGroups(project: ProjectWithDetails, projectGroup: GroupRepresentation): Promise[]> { + try { + const consoleGroup = await this.keycloakService.getOrCreateConsoleGroup(projectGroup) + const envResults = await Promise.all(project.environments.map(environment => + this.ensureEnvironmentGroup(consoleGroup, environment, project))) + const orphanResults = await this.purgeOrphanEnvironmentGroups(consoleGroup, project) + return [...envResults.flat(), ...orphanResults] + } catch (error) { + return [{ status: 'rejected', reason: error }] satisfies PromiseSettledResult[] + } + } + + private async purgeOrphanEnvironmentGroups(consoleGroup: GroupRepresentation, project: ProjectWithDetails) { + if (!consoleGroup.id) { + throw new Error(`Failed to create or retrieve console group for ${project.slug}`) + } + const promises: Promise[] = [] + for await (const envGroup of this.keycloakService.getSubGroups(consoleGroup.id)) { + if (!this.isOwnedEnvironmentGroup(envGroup, project) && envGroup.id) { + if (this.configService.keycloakControllerPurgeOrphans) { + promises.push( + this.keycloakService.deleteGroup(envGroup.id) + .catch(e => this.logger.warn(`Failed to delete environment group ${envGroup.name}`, e)), + ) + } else { + this.logger.warn(`Environment group ${envGroup.name} detected but purge is disabled`) + } + } + } + return Promise.allSettled(promises) + } + + private isOwnedEnvironmentGroup( + envGroup: GroupRepresentation, + project: ProjectWithDetails, + ) { + return project.environments.some(e => e.name === envGroup.name) + } + + private async ensureEnvironmentGroup( + consoleGroup: GroupRepresentation, + environment: ProjectWithDetails['environments'][number], + project: ProjectWithDetails, + ) { + const { roGroup, rwGroup } = await this.keycloakService.getOrCreateEnvironmentGroups(consoleGroup, environment) + if (!roGroup.id || !rwGroup.id) { + throw new Error(`Failed to create or retrieve RO and RW groups for ${environment.name}`) + } + + const rolesById = resourceListToDict(project.roles) + + // Get current members of RO and RW groups to ensure we clean up removed users + const [roMembers, rwMembers] = await Promise.all([ + this.keycloakService.getGroupMembers(roGroup.id), + this.keycloakService.getGroupMembers(rwGroup.id), + ]) + + const results = await Promise.all([ + this.ensureEnvironmentMemberPermissions( + environment, + project, + rolesById, + roGroup, + rwGroup, + roMembers, + rwMembers, + ), + this.purgeOrphanMembersFromEnvironment( + environment, + project, + roGroup, + rwGroup, + roMembers, + rwMembers, + ), + ]) + return results.flat() + } + + private async ensureEnvironmentMemberPermissions( + environment: ProjectWithDetails['environments'][number], + project: ProjectWithDetails, + rolesById: Record, + roGroup: GroupRepresentation, + rwGroup: GroupRepresentation, + roMembers: UserRepresentation[], + rwMembers: UserRepresentation[], + ) { + if (!roGroup.id || !rwGroup.id) { + throw new Error(`Failed to create or retrieve RO and RW groups for ${environment.name}`) + } + + const projectUserIds = new Set([project.ownerId, ...project.members.map(m => m.user.id)]) + + return Promise.allSettled(Array.from(projectUserIds).map(async (userId) => { + const perms = this.getUserPermissions(userId, project, rolesById) + + // Sync RO + const isInRo = roMembers.some(m => m.id === userId) + if (perms.ro && !isInRo) { + await this.keycloakService.addUserToGroup(userId, roGroup.id!) + this.logger.log(`User ${userId} added to RO group for ${environment.name}`) + } else if (!perms.ro && isInRo) { + if (this.configService.keycloakControllerPurgeOrphans) { + await this.keycloakService.removeUserFromGroup(userId, roGroup.id!) + this.logger.log(`User ${userId} removed from RO group for ${environment.name}`) + } else { + this.logger.warn(`User ${userId} has no RO permission but is in RO group for ${environment.name} (purge disabled)`) + } + } + + // Sync RW + const isInRw = rwMembers.some(m => m.id === userId) + if (perms.rw && !isInRw) { + await this.keycloakService.addUserToGroup(userId, rwGroup.id!) + this.logger.log(`User ${userId} added to RW group for ${environment.name}`) + } else if (!perms.rw && isInRw) { + if (this.configService.keycloakControllerPurgeOrphans) { + await this.keycloakService.removeUserFromGroup(userId, rwGroup.id!) + this.logger.log(`User ${userId} removed from RW group for ${environment.name}`) + } else { + this.logger.warn(`User ${userId} has no RW permission but is in RW group for ${environment.name} (purge disabled)`) + } + } + })) + } + + private async purgeOrphanMembersFromEnvironment( + environment: ProjectWithDetails['environments'][number], + project: ProjectWithDetails, + roGroup: GroupRepresentation, + rwGroup: GroupRepresentation, + roMembers: UserRepresentation[], + rwMembers: UserRepresentation[], + ) { + if (!roGroup.id || !rwGroup.id) { + throw new Error(`Failed to create or retrieve RO and RW groups for ${environment.name}`) + } + + const projectUserIds = new Set([project.ownerId, ...project.members.map(m => m.user.id)]) + + const roPromises = roMembers.map(async (member) => { + if (!projectUserIds.has(member.id!)) { + if (this.configService.keycloakControllerPurgeOrphans) { + await this.keycloakService.removeUserFromGroup(member.id!, roGroup.id!) + this.logger.log(`User ${member.id} removed from RO group for ${environment.name}`) + } else { + this.logger.warn(`User ${member.id} is in RO group for ${environment.name} but not in project (purge disabled)`) + } + } + }) + + const rwPromises = rwMembers.map(async (member) => { + if (!projectUserIds.has(member.id!)) { + if (this.configService.keycloakControllerPurgeOrphans) { + await this.keycloakService.removeUserFromGroup(member.id!, rwGroup.id!) + this.logger.log(`User ${member.id} removed from RW group for ${environment.name}`) + } else { + this.logger.warn(`User ${member.id} is in RW group for ${environment.name} but not in project (purge disabled)`) + } + } + }) + + return Promise.allSettled([...roPromises, ...rwPromises]) + } + + private getUserPermissions(userId: string, project: ProjectWithDetails, rolesById: Record) { + if (userId === project.ownerId) return { ro: true, rw: true } + const member = project.members.find(m => m.user.id === userId) + if (!member) return { ro: false, rw: false } + + const projectPermissions = getPermsByUserRoles(member.roleIds, rolesById, project.everyonePerms) + + return { + ro: ProjectAuthorized.ListEnvironments({ adminPermissions: 0n, projectPermissions }), + rw: ProjectAuthorized.ManageEnvironments({ adminPermissions: 0n, projectPermissions }), + } + } +} diff --git a/apps/server-nestjs/src/modules/keycloak/keycloak-datastore.service.ts b/apps/server-nestjs/src/modules/keycloak/keycloak-datastore.service.ts new file mode 100644 index 000000000..46801787d --- /dev/null +++ b/apps/server-nestjs/src/modules/keycloak/keycloak-datastore.service.ts @@ -0,0 +1,51 @@ +import { Injectable, Logger } from '@nestjs/common' +import type { Prisma } from '@prisma/client' +import type { PrismaService } from '@/cpin-module/infrastructure/database/prisma.service' + +export const projectSelect = { + id: true, + slug: true, + ownerId: true, + everyonePerms: true, + members: { + select: { + roleIds: true, + user: { + select: { + id: true, + email: true, + }, + }, + }, + }, + roles: { + select: { + id: true, + permissions: true, + oidcGroup: true, + }, + }, + environments: { + select: { + id: true, + name: true, + }, + }, +} satisfies Prisma.ProjectSelect + +export type ProjectWithDetails = Prisma.ProjectGetPayload<{ + select: typeof projectSelect +}> + +@Injectable() +export class KeycloakDatastoreService { + private readonly logger = new Logger(KeycloakDatastoreService.name) + + constructor(private readonly prisma: PrismaService) {} + + async getAllProjects(): Promise { + return this.prisma.project.findMany({ + select: projectSelect, + }) + } +} diff --git a/apps/server-nestjs/src/modules/keycloak/keycloak.constant.ts b/apps/server-nestjs/src/modules/keycloak/keycloak.constant.ts new file mode 100644 index 000000000..93d81fcf3 --- /dev/null +++ b/apps/server-nestjs/src/modules/keycloak/keycloak.constant.ts @@ -0,0 +1 @@ +export const CONSOLE_GROUP_NAME = 'console' diff --git a/apps/server-nestjs/src/modules/keycloak/keycloak.module.ts b/apps/server-nestjs/src/modules/keycloak/keycloak.module.ts new file mode 100644 index 000000000..284b5ca59 --- /dev/null +++ b/apps/server-nestjs/src/modules/keycloak/keycloak.module.ts @@ -0,0 +1,13 @@ +import { Module } from '@nestjs/common' +import { KeycloakService } from './keycloak.service' +import { KeycloakControllerService } from './keycloak-controller.service' +import { KeycloakDatastoreService } from './keycloak-datastore.service' +import { ConfigurationModule } from '../../cpin-module/infrastructure/configuration/configuration.module' +import { InfrastructureModule } from '../../cpin-module/infrastructure/infrastructure.module' + +@Module({ + imports: [ConfigurationModule, InfrastructureModule], + providers: [KeycloakService, KeycloakControllerService, KeycloakDatastoreService], + exports: [KeycloakService], +}) +export class KeycloakModule {} diff --git a/apps/server-nestjs/src/modules/keycloak/keycloak.service.ts b/apps/server-nestjs/src/modules/keycloak/keycloak.service.ts new file mode 100644 index 000000000..466f0a431 --- /dev/null +++ b/apps/server-nestjs/src/modules/keycloak/keycloak.service.ts @@ -0,0 +1,166 @@ +import type { OnModuleInit } from '@nestjs/common' +import { Injectable, Logger } from '@nestjs/common' +import KcAdminClient from '@keycloak/keycloak-admin-client' +import type GroupRepresentation from '@keycloak/keycloak-admin-client/lib/defs/groupRepresentation' +import type { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' +import { ProjectWithDetails } from './keycloak-datastore.service' +import { CONSOLE_GROUP_NAME } from './keycloak.constant' + +@Injectable() +export class KeycloakService implements OnModuleInit { + private readonly logger = new Logger(KeycloakService.name) + private _client: KcAdminClient + + constructor(private readonly configService: ConfigurationService) { + this._client = new KcAdminClient({ + baseUrl: `${this.configService.keycloakProtocol}://${this.configService.keycloakDomain}`, + realmName: this.configService.keycloakRealm, + }) + } + + async onModuleInit() { + try { + await this._client.auth({ + grantType: 'client_credentials', + clientId: this.configService.keycloakClientId!, + clientSecret: this.configService.keycloakClientSecret!, + }) + this.logger.log('Keycloak Admin Client authenticated') + } catch (error) { + this.logger.error('Failed to authenticate with Keycloak', error) + } + } + + async *getAllGroups() { + let first = 0 + while (true) { + const fetched = await this._client.groups.find({ first, max: 50, briefRepresentation: false }) + if (fetched.length === 0) break + for (const group of fetched) { + yield group + } + if (fetched.length < 50) break + first += 50 + } + } + + // TODO: May return undefined if group not found in the most recent search + async getGroupByName(name: string): Promise { + const groups = await this._client.groups.find({ search: name }) + return groups.find(g => g.name === name) + } + + async getGroupByPath(path: string): Promise { + const parts = path.split('/').filter(Boolean) + let current: GroupRepresentation | undefined + for (const name of parts) { + if (!current) { + current = await this.getGroupByName(name) + } else { + for await (const subgroup of this.getSubGroups(current.id!)) { + if (subgroup.name === name) { + current = subgroup + break + } + } + if (current?.name !== name) return undefined + } + if (!current) return undefined + } + return current + } + + async deleteGroup(id: string): Promise { + await this._client.groups.del({ id }) + } + + async getGroupMembers(groupId: string) { + return this._client.groups.listMembers({ id: groupId }) + } + + async createGroup(name: string) { + return this._client.groups.create({ name }) + } + + async addUserToGroup(userId: string, groupId: string) { + return this._client.users.addToGroup({ id: userId, groupId }) + } + + async removeUserFromGroup(userId: string, groupId: string) { + return this._client.users.delFromGroup({ id: userId, groupId }) + } + + async* getSubGroups(parentId: string) { + let first = 0 + while (true) { + const page = await this._client.groups.listSubGroups({ parentId, briefRepresentation: false, max: 10, first }) + if (page.length === 0) break + for (const subgroup of page) { + yield subgroup + } + if (page.length < 10) break + first += 10 + } + } + + async getOrCreateGroupByPath(path: string) { + const existingGroup = await this.getGroupByPath(path) + if (existingGroup) return existingGroup + + const parts = path.split('/').filter(Boolean) + let parentId: string | undefined + let current: GroupRepresentation | undefined + + for (let i = 0; i < parts.length; i++) { + const name = parts[i] + if (!current) { + current = await this.getGroupByName(name) + if (!current) { + current = await this.createGroup(name) + } + } else { + if (!parentId) parentId = current.id! + current = await this.getOrCreateSubGroupByName(parentId, name) + } + parentId = current.id! + } + + return { id: parentId } satisfies GroupRepresentation + } + + async getOrCreateSubGroupByName(parentId: string, name: string) { + for await (const subgroup of this.getSubGroups(parentId)) { + if (subgroup.name === name) return subgroup + } + const createdGroup = await this._client.groups.createChildGroup({ id: parentId }, { name }) + return { id: createdGroup.id } satisfies GroupRepresentation + } + + async getOrCreateConsoleGroup(projectGroup: GroupRepresentation) { + if (!projectGroup.id) { + throw new Error(`Failed to create or retrieve project group for ${projectGroup.name}`) + } + return this.getOrCreateSubGroupByName(projectGroup.id, CONSOLE_GROUP_NAME) + } + + async getOrCreateEnvironmentGroups(consoleGroup: GroupRepresentation, environment: ProjectWithDetails['environments'][number]) { + if (!consoleGroup.id) { + throw new Error(`Failed to create or retrieve console group for ${consoleGroup.name}`) + } + + const envGroup = await this.getOrCreateSubGroupByName(consoleGroup.id, environment.name) + if (!envGroup.id) { + throw new Error(`Failed to create or retrieve environment group for ${environment.name}`) + } + + const [roGroup, rwGroup] = await Promise.all([ + this.getOrCreateSubGroupByName(envGroup.id, 'RO'), + this.getOrCreateSubGroupByName(envGroup.id, 'RW'), + ]) + if (!roGroup.id || !rwGroup.id) { + throw new Error(`Failed to create or retrieve RO and RW groups for ${environment.name}`) + } + + return { roGroup, rwGroup } + } +} diff --git a/apps/server-nestjs/src/modules/vault/vault.module.ts b/apps/server-nestjs/src/modules/vault/vault.module.ts new file mode 100644 index 000000000..5ead00f2e --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault.module.ts @@ -0,0 +1,10 @@ +import { Module } from '@nestjs/common' +import { ConfigurationModule } from '../../cpin-module/infrastructure/configuration/configuration.module' +import { VaultService } from './vault.service' + +@Module({ + imports: [ConfigurationModule], + providers: [VaultService], + exports: [VaultService], +}) +export class VaultModule {} diff --git a/apps/server-nestjs/src/modules/vault/vault.service.spec.ts b/apps/server-nestjs/src/modules/vault/vault.service.spec.ts new file mode 100644 index 000000000..471697e73 --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault.service.spec.ts @@ -0,0 +1,136 @@ +import { Test, type TestingModule } from '@nestjs/testing' +import { VaultService } from './vault.service' +import { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' +import { vi, describe, beforeEach, it, expect, type Mock } from 'vitest' + +describe('VaultService', () => { + let service: VaultService + let fetchMock: Mock + + const mockConfigService = { + vaultToken: 'token', + vaultUrl: 'http://vault', + vaultInternalUrl: 'http://vault-internal', + vaultKvName: 'kv', + } + + beforeEach(async () => { + vi.clearAllMocks() + + const module: TestingModule = await Test.createTestingModule({ + providers: [ + VaultService, + { + provide: ConfigurationService, + useValue: mockConfigService, + }, + ], + }).compile() + + service = module.get(VaultService) + + // Mock global fetch + fetchMock = vi.fn() + global.fetch = fetchMock + }) + + describe('getProjectValues', () => { + it('should get project values', async () => { + fetchMock.mockResolvedValue({ + ok: true, + status: 200, + json: async () => ({ data: { data: { secret: 'value' } } }), + }) + + const result = await service.getProjectValues('project-id') + expect(result).toEqual({ secret: 'value' }) + expect(fetchMock).toHaveBeenCalledWith( + 'http://vault-internal/v1/kv/data/project-id', + expect.objectContaining({ + method: 'GET', + headers: expect.objectContaining({ 'X-Vault-Token': 'token' }), + }) + ) + }) + + it('should return empty object if undefined', async () => { + fetchMock.mockResolvedValue({ + ok: false, + status: 404, + }) + + const result = await service.getProjectValues('project-id') + expect(result).toEqual({}) + }) + }) + + it('should be defined', () => { + expect(service).toBeDefined() + }) + + describe('read', () => { + it('should read secret', async () => { + fetchMock.mockResolvedValue({ + ok: true, + status: 200, + json: async () => ({ data: { data: { secret: 'value' } } }), + }) + + const result = await service.read('path') + expect(result).toEqual({ secret: 'value' }) + expect(fetchMock).toHaveBeenCalledWith( + 'http://vault-internal/v1/kv/data/path', + expect.objectContaining({ + method: 'GET', + headers: expect.objectContaining({ 'X-Vault-Token': 'token' }), + }) + ) + }) + + it('should return undefined if 404', async () => { + fetchMock.mockResolvedValue({ + ok: false, + status: 404, + }) + + const result = await service.read('path') + expect(result).toBeUndefined() + }) + }) + + describe('write', () => { + it('should write secret', async () => { + fetchMock.mockResolvedValue({ + ok: true, + status: 200, + json: async () => ({}), + }) + + await service.write({ secret: 'value' }, 'path') + expect(fetchMock).toHaveBeenCalledWith( + 'http://vault-internal/v1/kv/data/path', + expect.objectContaining({ + method: 'POST', + body: JSON.stringify({ data: { secret: 'value' } }), + }) + ) + }) + }) + + describe('destroy', () => { + it('should destroy secret', async () => { + fetchMock.mockResolvedValue({ + ok: true, + status: 204, + }) + + await service.destroy('path') + expect(fetchMock).toHaveBeenCalledWith( + 'http://vault-internal/v1/kv/metadata/path', + expect.objectContaining({ + method: 'DELETE', + }) + ) + }) + }) +}) diff --git a/apps/server-nestjs/src/modules/vault/vault.service.ts b/apps/server-nestjs/src/modules/vault/vault.service.ts new file mode 100644 index 000000000..f0f06644a --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault.service.ts @@ -0,0 +1,108 @@ +import { Injectable, Logger } from '@nestjs/common' +import { ConfigurationService } from '@/cpin-module/infrastructure/configuration/configuration.service' + +@Injectable() +export class VaultService { + private readonly logger = new Logger(VaultService.name) + private token: string | undefined + + constructor(private readonly config: ConfigurationService) { + this.logger.log('VaultService initialized with config:', config) + } + + async getProjectValues(projectId: string): Promise> { + const path = this.config.projectRootDir + ? `${this.config.projectRootDir}/${projectId}` + : projectId + const values = await this.read(path) + return values || {} + } + + private async request(method: string, path: string, options: { body?: any, token?: string, allow404?: boolean } = {}) { + const url = `${this.config.vaultInternalUrl}${path}` + const headers: Record = { + 'Content-Type': 'application/json', + } + if (options.token) { + headers['X-Vault-Token'] = options.token + } else if (this.config.vaultToken) { + headers['X-Vault-Token'] = this.config.vaultToken + } + + const response = await fetch(url, { + method, + headers, + body: options.body ? JSON.stringify(options.body) : undefined, + }) + + if (options.allow404 && response.status === 404) { + return undefined + } + + if (!response.ok) { + throw new Error(`Vault request failed: ${response.status} ${response.statusText}`) + } + + if (response.status === 204) return undefined + + return response.json() + } + + private async getToken() { + if (!this.token) { + if (this.config.vaultToken) { + try { + const data = await this.request('POST', '/v1/auth/token/create', { token: this.config.vaultToken }) + this.token = data.auth.client_token + } catch (error) { + this.logger.error('Failed to create vault token, falling back to env token', error) + this.token = this.config.vaultToken + } + } + } + return this.token + } + + async read(path: string): Promise { + if (path.startsWith('/')) path = path.slice(1) + try { + const token = await this.getToken() + const data = await this.request('GET', `/v1/${this.config.vaultKvName}/data/${path}`, { + token, + allow404: true, + }) + if (!data) return undefined + return data.data.data + } catch (error) { + this.logger.error(`Failed to read vault path ${path}: ${error}`) + throw error + } + } + + async write(data: any, path: string): Promise { + if (path.startsWith('/')) path = path.slice(1) + try { + const token = await this.getToken() + await this.request('POST', `/v1/${this.config.vaultKvName}/data/${path}`, { + token, + body: { data }, + }) + } catch (error) { + this.logger.error(`Failed to write vault path ${path}: ${error}`) + throw error + } + } + + async destroy(path: string): Promise { + if (path.startsWith('/')) path = path.slice(1) + try { + const token = await this.getToken() + await this.request('DELETE', `/v1/${this.config.vaultKvName}/metadata/${path}`, { + token, + }) + } catch (error) { + this.logger.error(`Failed to destroy vault path ${path}: ${error}`) + throw error + } + } +} diff --git a/apps/server-nestjs/vitest.config.ts b/apps/server-nestjs/vitest.config.ts new file mode 100644 index 000000000..edeacaacb --- /dev/null +++ b/apps/server-nestjs/vitest.config.ts @@ -0,0 +1,17 @@ +import { defineConfig } from 'vitest/config' +import path from 'node:path' + +export default defineConfig({ + test: { + globals: true, + environment: 'node', + include: ['src/**/*.spec.ts', 'test/**/*.e2e-spec.ts'], + alias: { + '@': path.resolve(__dirname, './src'), + }, + coverage: { + provider: 'v8', + reporter: ['text', 'json', 'html'], + }, + }, +}) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 947571d75..c341f75c6 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -359,6 +359,12 @@ importers: apps/server-nestjs: dependencies: + '@casl/ability': + specifier: ^6.7.1 + version: 6.8.0 + '@casl/prisma': + specifier: ^1.5.0 + version: 1.6.1(@casl/ability@6.8.0)(@prisma/client@6.19.0(prisma@6.19.0(magicast@0.3.5)(typescript@5.9.3))(typescript@5.9.3)) '@cpn-console/argocd-plugin': specifier: workspace:^ version: file:plugins/argocd(@types/node@22.19.3)(typescript@5.9.3)(vitest@2.1.9(@types/node@22.19.3)(jsdom@25.0.1)(terser@5.44.1)) @@ -404,9 +410,15 @@ importers: '@gitbeaker/core': specifier: ^40.6.0 version: 40.6.0 + '@gitbeaker/requester-utils': + specifier: ^40.6.0 + version: 40.6.0 '@gitbeaker/rest': specifier: ^40.6.0 version: 40.6.0 + '@keycloak/keycloak-admin-client': + specifier: ^24.0.0 + version: 24.0.5 '@kubernetes-models/argo-cd': specifier: ^2.6.2 version: 2.7.2 @@ -419,9 +431,15 @@ importers: '@nestjs/core': specifier: ^11.0.1 version: 11.1.11(@nestjs/common@11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/platform-express@11.1.11)(reflect-metadata@0.2.2)(rxjs@7.8.2) + '@nestjs/event-emitter': + specifier: ^3.0.1 + version: 3.0.1(@nestjs/common@11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/core@11.1.11) '@nestjs/platform-express': specifier: ^11.0.1 version: 11.1.11(@nestjs/common@11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/core@11.1.11) + '@nestjs/schedule': + specifier: ^5.0.1 + version: 5.0.1(@nestjs/common@11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/core@11.1.11) '@prisma/client': specifier: ^6.0.1 version: 6.19.0(prisma@6.19.0(magicast@0.3.5)(typescript@5.9.3))(typescript@5.9.3) @@ -434,9 +452,6 @@ importers: '@ts-rest/open-api': specifier: ^3.52.1 version: 3.52.1(@ts-rest/core@3.52.1(@types/node@22.19.3)(zod@3.25.76))(zod@3.25.76) - axios: - specifier: 1.12.2 - version: 1.12.2 date-fns: specifier: ^4.1.0 version: 4.1.0 @@ -449,12 +464,21 @@ importers: fastify-keycloak-adapter: specifier: 2.3.2 version: 2.3.2(patch_hash=6846b953fc520dd1ca6cb2e790cf190cbc3ed9fa9ff69739100458c520293447) + js-yaml: + specifier: ^4.1.1 + version: 4.1.1 json-2-csv: specifier: ^5.5.7 version: 5.5.10 + keycloak-connect: + specifier: ^25.0.0 + version: 25.0.6 mustache: specifier: ^4.2.0 version: 4.2.0 + nest-keycloak-connect: + specifier: ^1.10.1 + version: 1.10.1(@nestjs/common@11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/core@11.1.11)(keycloak-connect@25.0.6) nestjs-pino: specifier: ^4.5.0 version: 4.5.0(@nestjs/common@11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2))(pino-http@11.0.0)(pino@10.1.0)(rxjs@7.8.2) @@ -510,6 +534,9 @@ importers: '@types/jest': specifier: ^30.0.0 version: 30.0.0 + '@types/js-yaml': + specifier: 4.0.9 + version: 4.0.9 '@types/node': specifier: ^22.10.7 version: 22.19.3 @@ -1818,6 +1845,15 @@ packages: '@cacheable/utils@2.2.0': resolution: {integrity: sha512-7xaQayO3msdVcxXLYcLU5wDqJBNdQcPPPHr6mdTEIQI7N7TbtSVVTpWOTfjyhg0L6AQwQdq7miKdWtTDBoBldQ==} + '@casl/ability@6.8.0': + resolution: {integrity: sha512-Ipt4mzI4gSgnomFdaPjaLgY2MWuXqAEZLrU6qqWBB7khGiBBuuEp6ytYDnq09bRXqcjaeeHiaCvCGFbBA2SpvA==} + + '@casl/prisma@1.6.1': + resolution: {integrity: sha512-VSAzfTMOZvP3Atj3F0qwJItOm1ixIiumjbBz21PL/gLUIDwoktyAx2dB7dPwjH9AQvzZPE629ee7fVU5K2hpzg==} + peerDependencies: + '@casl/ability': ^5.3.0 || ^6.0.0 + '@prisma/client': ^2.14.0 || ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 + '@clack/core@0.4.1': resolution: {integrity: sha512-Pxhij4UXg8KSr7rPek6Zowm+5M22rbd2g1nfojHJkxp5YkFqiZ2+YLEM/XGVIzvGOcM0nqjIFxrpDwWRZYWYjA==} @@ -2751,6 +2787,10 @@ packages: '@jridgewell/trace-mapping@0.3.9': resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + '@keycloak/keycloak-admin-client@24.0.5': + resolution: {integrity: sha512-SXDVtQ3ov7GQbxXq51Uq8lzhwzQwNg6XiY50ZA9whuUe2t/0zPT4Zd/LcULcjweIjSNWWgfbDyN1E3yRSL8Qqw==} + engines: {node: '>=18'} + '@keycloak/keycloak-admin-client@26.4.2': resolution: {integrity: sha512-BDZuV+s9XoYSElHmG/Ul6r/uHzbExRSC3jybBR9CHZ9JDad1PcVwpSVSBTqiJhF7P1OtMm1gnLLS1TMO/QY+8Q==} engines: {node: '>=18'} @@ -2841,12 +2881,24 @@ packages: '@nestjs/websockets': optional: true + '@nestjs/event-emitter@3.0.1': + resolution: {integrity: sha512-0Ln/x+7xkU6AJFOcQI9tIhUMXVF7D5itiaQGOyJbXtlAfAIt8gzDdJm+Im7cFzKoWkiW5nCXCPh6GSvdQd/3Dw==} + peerDependencies: + '@nestjs/common': ^10.0.0 || ^11.0.0 + '@nestjs/core': ^10.0.0 || ^11.0.0 + '@nestjs/platform-express@11.1.11': resolution: {integrity: sha512-kyABSskdMRIAMWL0SlbwtDy4yn59RL4HDdwHDz/fxWuv7/53YP8Y2DtV3/sHqY5Er0msMVTZrM38MjqXhYL7gw==} peerDependencies: '@nestjs/common': ^11.0.0 '@nestjs/core': ^11.0.0 + '@nestjs/schedule@5.0.1': + resolution: {integrity: sha512-kFoel84I4RyS2LNPH6yHYTKxB16tb3auAEciFuc788C3ph6nABkUfzX5IE+unjVaRX+3GuruJwurNepMlHXpQg==} + peerDependencies: + '@nestjs/common': ^10.0.0 || ^11.0.0 + '@nestjs/core': ^10.0.0 || ^11.0.0 + '@nestjs/schematics@11.0.9': resolution: {integrity: sha512-0NfPbPlEaGwIT8/TCThxLzrlz3yzDNkfRNpbL7FiplKq3w4qXpJg0JYwqgMEJnLQZm3L/L/5XjoyfJHUO3qX9g==} peerDependencies: @@ -3127,6 +3179,9 @@ packages: '@swc/helpers@0.5.17': resolution: {integrity: sha512-5IKx/Y13RsYd+sauPb2x+U/xZikHjolzfuDgTAl/Tdf3Q8rslRvC19NKDLgAJQ6wsqADk10ntlv08nPFw/gO/A==} + '@testim/chrome-version@1.1.4': + resolution: {integrity: sha512-kIhULpw9TrGYnHp/8VfdcneIcxKnLixmADtukQRtJUmsVlMg0niMkwV0xZmi8hqa57xqilIHjWFA0GKvEjVU5g==} + '@tokenizer/inflate@0.4.1': resolution: {integrity: sha512-2mAv+8pkG6GIZiF1kNg1jAjh27IDxEPKwdGul3snfztFerfPGI1LjDezZp3i7BElXompqEtPmoPx6c2wgtWsOA==} engines: {node: '>=18'} @@ -3134,6 +3189,9 @@ packages: '@tokenizer/token@0.3.0': resolution: {integrity: sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==} + '@tootallnate/quickjs-emscripten@0.23.0': + resolution: {integrity: sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA==} + '@ts-rest/core@3.52.1': resolution: {integrity: sha512-tAjz7Kxq/grJodcTA1Anop4AVRDlD40fkksEV5Mmal88VoZeRKAG8oMHsDwdwPZz+B/zgnz0q2sF+cm5M7Bc7g==} peerDependencies: @@ -3248,6 +3306,9 @@ packages: '@types/lodash@4.17.20': resolution: {integrity: sha512-H3MHACvFUEiujabxhaI/ImO6gUrd8oOurg7LQtS7mbwIXA/cUqWrvBsaeJ23aZEPk1TAYkurjfMbSELfoCXlGA==} + '@types/luxon@3.4.2': + resolution: {integrity: sha512-TifLZlFudklWlMBfhubvgqTXRzLDI5pCbGa4P8a3wPyUQSW+1xQ5eDsreP9DWHX3tjq1ke96uYG/nwundroWcA==} + '@types/mdast@4.0.4': resolution: {integrity: sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==} @@ -3379,6 +3440,18 @@ packages: resolution: {integrity: sha512-uk574k8IU0rOF/AjniX8qbLSGURJVUCeM5e4MIMKBFFi8weeiLrG1fyQejyLXQpRZbU/1BuQasleV/RfHC3hHg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@ucast/core@1.10.2': + resolution: {integrity: sha512-ons5CwXZ/51wrUPfoduC+cO7AS1/wRb0ybpQJ9RrssossDxVy4t49QxWoWgfBDvVKsz9VXzBk9z0wqTdZ+Cq8g==} + + '@ucast/js@3.1.0': + resolution: {integrity: sha512-eJ7yQeYtMK85UZjxoxBEbTWx6UMxEXKbjVyp+NlzrT5oMKV5Gpo/9bjTl3r7msaXTVC8iD9NJacqJ8yp7joX+Q==} + + '@ucast/mongo2js@1.4.1': + resolution: {integrity: sha512-9aeg5cmqwRQnKCXHN6I17wk83Rcm487bHelaG8T4vfpWneAI469wSI3Srnbu+PuZ5znWRbnwtVq9RgPL+bN6CA==} + + '@ucast/mongo@2.4.3': + resolution: {integrity: sha512-XcI8LclrHWP83H+7H2anGCEeDq0n+12FU2mXCTz6/Tva9/9ddK/iacvvhCyW6cijAAOILmt0tWplRyRhVyZLsA==} + '@ungap/structured-clone@1.3.0': resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} @@ -3941,6 +4014,10 @@ packages: resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} engines: {node: '>=12'} + ast-types@0.13.4: + resolution: {integrity: sha512-x1FCFnFifvYDDzTaLII71vG5uvDwgtmDTEVWAxrgeiR8VjMONcCXJx7E+USjDtHlwFmt9MysbqgF9b9Vjr6w+w==} + engines: {node: '>=4'} + astral-regex@2.0.0: resolution: {integrity: sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==} engines: {node: '>=8'} @@ -3984,6 +4061,9 @@ packages: axios@1.12.2: resolution: {integrity: sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==} + axios@1.13.5: + resolution: {integrity: sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==} + babel-jest@30.2.0: resolution: {integrity: sha512-0YiBEOxWqKkSQWL9nNGGEgndoeL0ZpWrbLMNL5u/Kaxrli3Eaxlt3ZtIDktEvXt4L/R9r3ODr2zKwGM/2BjxVw==} engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} @@ -4037,6 +4117,10 @@ packages: resolution: {integrity: sha512-Sg0xJUNDU1sJNGdfGWhVHX0kkZ+HWcvmVymJbj6NSgZZmW/8S9Y2HQ5euytnIgakgxN6papOAWiwDo1ctFDcoQ==} hasBin: true + basic-ftp@5.2.0: + resolution: {integrity: sha512-VoMINM2rqJwJgfdHq6RiUudKt2BV+FY5ZFezP/ypmwayk68+NzzAQy4XXLlqsGD4MCzq3DrmNFD/uUmBJuGoXw==} + engines: {node: '>=10.0.0'} + bcrypt-pbkdf@1.0.2: resolution: {integrity: sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==} @@ -4221,6 +4305,11 @@ packages: resolution: {integrity: sha512-rNjApaLzuwaOTjCiT8lSDdGN1APCiqkChLMJxJPWLunPAt5fy8xgU9/jNOchV84wfIxrA0lRQB7oCT8jrn/wrQ==} engines: {node: '>=6.0'} + chromedriver@146.0.0: + resolution: {integrity: sha512-fDAbuEy+Dn9F/h8fphiQIUEyUDOTGlfjZHfI9dJZz75+ui/LIHqWzStQt87vpwA9oV3ut4C2W3flfvbn3KELFQ==} + engines: {node: '>=20'} + hasBin: true + ci-info@4.3.1: resolution: {integrity: sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==} engines: {node: '>=8'} @@ -4350,6 +4439,9 @@ packages: compare-func@2.0.0: resolution: {integrity: sha512-zHig5N+tPWARooBnb0Zx1MFcdfpyJrfTJ3Y5L+IFvUm8rM74hHz66z0gw0x4tijh5CorKkKUCnW82R2vmpeCRA==} + compare-versions@6.1.1: + resolution: {integrity: sha512-4hm4VPpIecmlg59CHXnRDnqGplJFrbLG4aFEl5vl6cK1u76ws3LLvX7ikFnTDl5vo39sjWD6AaDPYodJp/NNHg==} + component-emitter@1.3.1: resolution: {integrity: sha512-T0+barUSQRTUQASh8bx02dl+DhF54GtIDY13Y3m9oWTklKbb3Wv974meRpeZ3lp1JpLVECWWNHC4vaG2XHXouQ==} @@ -4457,6 +4549,9 @@ packages: cron-validator@1.4.0: resolution: {integrity: sha512-wGcJ9FCy65iaU6egSH8b5dZYJF7GU/3Jh06wzaT9lsa5dbqExjljmu+0cJ8cpKn+vUyZa/EM4WAxeLR6SypJXw==} + cron@3.5.0: + resolution: {integrity: sha512-0eYZqCnapmxYcV06uktql93wNWdlTmmBFP2iYz+JPVcQqlyFYcn1lFuIk4R54pkOmE7mcldTAPZv6X5XA4Q46A==} + cross-spawn@6.0.6: resolution: {integrity: sha512-VqCUuhcd1iB+dsv8gxPttb5iZh/D0iubSP21g36KXdEuf6I5JiioesUVjpCdHV9MZRUfVFlvwtIUyPfxo5trtw==} engines: {node: '>=4.8'} @@ -4507,6 +4602,10 @@ packages: resolution: {integrity: sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==} engines: {node: '>=0.10'} + data-uri-to-buffer@6.0.2: + resolution: {integrity: sha512-7hvf7/GW8e86rW0ptuwS3OcBGDjIi6SZva7hCyWC0yYry2cOPmLIjXAUHI6DK2HsnwJd9ifmt57i8eV2n4YNpw==} + engines: {node: '>= 14'} + data-urls@5.0.0: resolution: {integrity: sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg==} engines: {node: '>=18'} @@ -4543,6 +4642,15 @@ packages: supports-color: optional: true + debug@4.3.1: + resolution: {integrity: sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + debug@4.4.3: resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} engines: {node: '>=6.0'} @@ -4599,6 +4707,10 @@ packages: defu@6.1.4: resolution: {integrity: sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==} + degenerator@5.0.1: + resolution: {integrity: sha512-TllpMR/t0M5sqCXfj85i4XaAzxmS5tVA16dqvdkMwGmzI+dXLXnw3J+3Vdv7VKw+ThlTMboK6i9rnZ6Nntj5CQ==} + engines: {node: '>= 14'} + delayed-stream@1.0.0: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} @@ -4830,6 +4942,11 @@ packages: resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==} engines: {node: '>=12'} + escodegen@2.1.0: + resolution: {integrity: sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==} + engines: {node: '>=6.0'} + hasBin: true + eslint-compat-utils@0.5.1: resolution: {integrity: sha512-3z3vFexKIEnjHE3zCMRo6fn/e44U7T1khUjg+Hp0ZQMCigh28rALD0nPFBcGZuiLC5rLZa2ubQHDRln09JfU2Q==} engines: {node: '>=12'} @@ -5068,6 +5185,9 @@ packages: eventemitter2@6.4.7: resolution: {integrity: sha512-tYUSVOGeQPKt/eC1ABfhHy5Xd96N3oIijJvN3O9+TsC28T5V9yX9oEfEK5faP0EFSNVOG97qtAS68GBrQB2hDg==} + eventemitter2@6.4.9: + resolution: {integrity: sha512-JEPTiaOt9f04oa6NOkc4aH+nVp5I3wEjpHbIPqfgCdD5v5bUzy7xQqwcVO2aDQgOWhI28da57HksMrzK9HlRxg==} + eventemitter3@5.0.1: resolution: {integrity: sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==} @@ -5275,6 +5395,15 @@ packages: focus-trap@7.6.6: resolution: {integrity: sha512-v/Z8bvMCajtx4mEXmOo7QEsIzlIOqRXTIwgUfsFOF9gEsespdbD0AkPIka1bSXZ8Y8oZ+2IVDQZePkTfEHZl7Q==} + follow-redirects@1.15.11: + resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==} + engines: {node: '>=4.0'} + peerDependencies: + debug: '*' + peerDependenciesMeta: + debug: + optional: true + follow-redirects@1.15.9: resolution: {integrity: sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==} engines: {node: '>=4.0'} @@ -5413,6 +5542,10 @@ packages: get-tsconfig@4.13.0: resolution: {integrity: sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ==} + get-uri@6.0.5: + resolution: {integrity: sha512-b1O07XYq8eRuVzBNgJLstU6FYc1tS6wnMtF1I1D9lE8LxZSOGZ7LhxN54yPP6mGw5f2CkXY2BQUL9Fx41qvcIg==} + engines: {node: '>= 14'} + getpass@0.1.7: resolution: {integrity: sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==} @@ -5441,11 +5574,13 @@ packages: glob@10.4.5: resolution: {integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==} + deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me hasBin: true glob@11.0.3: resolution: {integrity: sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==} engines: {node: 20 || >=22} + deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me hasBin: true glob@13.0.0: @@ -5454,7 +5589,7 @@ packages: glob@7.2.3: resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} - deprecated: Glob versions prior to v9 are no longer supported + deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me global-directory@4.0.1: resolution: {integrity: sha512-wHTUcDUoZ1H5/0iVqEudYW4/kAlN5cZ3j/bXn0Dpbizl9iaUVeWSHqiOjsgk6OW2bkLclbBjzewBz6weQ1zA2Q==} @@ -5727,6 +5862,10 @@ packages: peerDependencies: fp-ts: ^2.5.0 + ip-address@10.1.0: + resolution: {integrity: sha512-XXADHxXmvT9+CRxhXg56LJovE+bmWnEWB78LB83VZTprKTmaC5QfruXocxzTZ2Kl0DNwKuBdlIhjL8LeY8Sf8Q==} + engines: {node: '>= 12'} + ip-regex@4.3.0: resolution: {integrity: sha512-B9ZWJxHHOHUhUjCPrMpLD4xEq35bUTClHM1S6CBU5ixQnkZmwipwgc96vAd7AAGM9TGHvJR+Uss+/Ak6UphK+Q==} engines: {node: '>=8'} @@ -5914,6 +6053,9 @@ packages: resolution: {integrity: sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==} engines: {node: '>=10'} + is-url@1.2.4: + resolution: {integrity: sha512-ITvGim8FhRiYe4IQ5uHSkj7pVaPDrCTkNd3yq3cV7iZAcJdHTUMPMEHcqSOy9xZ9qFenQCvi+2wjH9a1nXqHww==} + is-weakmap@2.0.2: resolution: {integrity: sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==} engines: {node: '>= 0.4'} @@ -5926,6 +6068,10 @@ packages: resolution: {integrity: sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==} engines: {node: '>= 0.4'} + is2@2.0.9: + resolution: {integrity: sha512-rZkHeBn9Zzq52sd9IUIV3a5mfwBY+o2HePMh0wkGBM4z4qjvy2GwVxQ6nNXSfw6MmVP6gf1QIlWjiOavhM3x5g==} + engines: {node: '>=v0.10.0'} + isarray@1.0.0: resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==} @@ -6135,6 +6281,10 @@ packages: resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} hasBin: true + js-yaml@4.1.1: + resolution: {integrity: sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==} + hasBin: true + jsbn@0.1.1: resolution: {integrity: sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==} @@ -6238,6 +6388,10 @@ packages: jws@4.0.0: resolution: {integrity: sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==} + keycloak-connect@25.0.6: + resolution: {integrity: sha512-UbOj4ee2u1LYNff5rkcVuWxc/GTaoga6TKg+/ylJd7djaGh20HVI3qmAVxfGme3BZPIa6/pxEIDpK4KQn+xx1w==} + engines: {node: '>=14'} + keycloak-js@26.2.1: resolution: {integrity: sha512-bZt6fQj/TLBAmivXSxSlqAJxBx/knNZDQGJIW4ensGYGN4N6tUKV8Zj3Y7/LOV8eIpvWsvqV70fbACihK8Ze0Q==} @@ -6401,6 +6555,14 @@ packages: lru-cache@5.1.1: resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} + lru-cache@7.18.3: + resolution: {integrity: sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==} + engines: {node: '>=12'} + + luxon@3.5.0: + resolution: {integrity: sha512-rh+Zjr6DNfUYR3bPwJEnuwDdqMbxZW7LOQfUN4B54+Cl+0o5zaU9RJ6bcidfDtC1cWCZXQ+nvX8bf6bAji37QQ==} + engines: {node: '>=12'} + magic-string@0.25.9: resolution: {integrity: sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==} @@ -6689,6 +6851,9 @@ packages: resolution: {integrity: sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==} engines: {node: '>=10'} + ms@2.1.2: + resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} @@ -6745,6 +6910,17 @@ packages: neo-async@2.6.2: resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==} + nest-keycloak-connect@1.10.1: + resolution: {integrity: sha512-tvAYOTPFnxDnQI06jrtcJa6UhyqVtah6V/XwRrNCCL2mklPYnfllGMgVJX0sc3Mca5yJiTVDZOoWruSxnM5qtg==} + peerDependencies: + '@nestjs/common': '>=6.0.0 <11.0.0' + '@nestjs/core': '>=6.0.0 <11.0.0' + '@nestjs/graphql': '>=6' + keycloak-connect: '>=10.0.0' + peerDependenciesMeta: + '@nestjs/graphql': + optional: true + nestjs-pino@4.5.0: resolution: {integrity: sha512-e54ChJMACSGF8gPYaHsuD07RW7l/OVoV6aI8Hqhpp0ZQ4WA8QY3eewL42JX7Z1U6rV7byNU7bGBV9l6d9V6PDQ==} engines: {node: '>= 14'} @@ -6754,6 +6930,10 @@ packages: pino-http: ^6.4.0 || ^7.0.0 || ^8.0.0 || ^9.0.0 || ^10.0.0 || ^11.0.0 rxjs: ^7.1.0 + netmask@2.0.2: + resolution: {integrity: sha512-dBpDMdxv9Irdq66304OLfEmQ9tbNRFnFTuZiLo+bD+r332bBmMJ8GBLXklIXXgxd3+v9+KUnZaUR5PJMa75Gsg==} + engines: {node: '>= 0.4.0'} + nice-try@1.0.5: resolution: {integrity: sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==} @@ -6948,6 +7128,14 @@ packages: resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} engines: {node: '>=6'} + pac-proxy-agent@7.2.0: + resolution: {integrity: sha512-TEB8ESquiLMc0lV8vcd5Ql/JAKAoyzHFXaStwjkzpOpC5Yv+pIzLfHvjTSdf3vpa2bMiUQrg9i6276yn8666aA==} + engines: {node: '>= 14'} + + pac-resolver@7.0.1: + resolution: {integrity: sha512-5NPgf87AT2STgwa2ntRMr45jTKrYBGkVU36yT0ig/n/GMAa3oPqhZfIQ2kMEimReg0+t9kZViDVZ83qfVUlckg==} + engines: {node: '>= 14'} + package-json-from-dist@1.0.1: resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} @@ -7233,12 +7421,19 @@ packages: resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} engines: {node: '>= 0.10'} + proxy-agent@6.5.0: + resolution: {integrity: sha512-TmatMXdr2KlRiA2CyDu8GqR8EjahTG3aY3nXjdzFyoZbmB8hrBsTyMezhULIXKnC0jpfjlmiZ3+EaCzoInSu/A==} + engines: {node: '>= 14'} + proxy-from-env@1.0.0: resolution: {integrity: sha512-F2JHgJQ1iqwnHDcQjVBsq3n/uoaFL+iPW/eAeL7kVxy/2RrWaN4WroKjjvbsoRtv0ftelNyC01bjRhn/bhcf4A==} proxy-from-env@1.1.0: resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} + proxy-from-env@2.0.0: + resolution: {integrity: sha512-h2lD3OfRraP3R51rNFKIE8nX+qoLr1mE74X91YhVxtDbt+OD6ntoNZv56+JgI4RCdtwQ5eexsOk1KdOQDfvPCQ==} + pstree.remy@1.1.8: resolution: {integrity: sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==} @@ -7688,6 +7883,10 @@ packages: resolution: {integrity: sha512-iOBWFgUX7caIZiuutICxVgX1SdxwAVFFKwt1EvMYYec/NWO5meOJ6K5uQxhrYBdQJne4KxiqZc+KptFOWFSI9w==} engines: {node: '>=18'} + smart-buffer@4.2.0: + resolution: {integrity: sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==} + engines: {node: '>= 6.0.0', npm: '>= 3.0.0'} + smob@1.5.0: resolution: {integrity: sha512-g6T+p7QO8npa+/hNx9ohv1E5pVCmWrVCUzUXJyLdMmftX6ER0oiWY/w9knEonLpnOp6b6FenKnMfR8gqwWdwig==} @@ -7695,6 +7894,14 @@ packages: resolution: {integrity: sha512-Gz11jbNU0plrReU9Sj7fmshSBxxJ9ShdD2q4ktHIHo/rpTH6lFyQoYHYKINPJtPe8aHFnsbtW46Ls0tCCBsIZg==} engines: {node: '>=0.10'} + socks-proxy-agent@8.0.5: + resolution: {integrity: sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw==} + engines: {node: '>= 14'} + + socks@2.8.7: + resolution: {integrity: sha512-HLpt+uLy/pxB+bum/9DzAgiKS8CX1EvbWxI4zlmgGCExImLdiad2iCwXT5Z4c9c3Eq8rP2318mPW2c+QbtjK8A==} + engines: {node: '>= 10.0.0', npm: '>= 3.0.0'} + sonic-boom@4.2.0: resolution: {integrity: sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww==} @@ -7927,6 +8134,7 @@ packages: supertest@7.2.1: resolution: {integrity: sha512-/OfhUL9WRLfoovZuWJ4l+2GVz3Eoo8Eo2TZVs9QxF2kmxdrmK7rCww4iJBstHevUH/M44aJ9TMN7yB+W+oWxlA==} engines: {node: '>=14.18.0'} + deprecated: Please upgrade to v7.2.2+ as we fixed an issue https://github.com/forwardemail/supertest/issues/875 supports-color@5.5.0: resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} @@ -7991,6 +8199,9 @@ packages: resolution: {integrity: sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==} engines: {node: '>=6'} + tcp-port-used@1.0.2: + resolution: {integrity: sha512-l7ar8lLUD3XS1V2lfoJlCBaeoaWo/2xfYt81hM7VlvR4RrMVFqfmzfhLVk40hAb368uitje5gPtBRL1m/DGvLA==} + temp-dir@2.0.0: resolution: {integrity: sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg==} engines: {node: '>=8'} @@ -8716,6 +8927,7 @@ packages: whatwg-encoding@3.1.1: resolution: {integrity: sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==} engines: {node: '>=18'} + deprecated: Use @exodus/bytes instead for a more spec-conformant and faster implementation whatwg-mimetype@4.0.0: resolution: {integrity: sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==} @@ -9854,6 +10066,17 @@ snapshots: dependencies: keyv: 5.5.3 + '@casl/ability@6.8.0': + dependencies: + '@ucast/mongo2js': 1.4.1 + + '@casl/prisma@1.6.1(@casl/ability@6.8.0)(@prisma/client@6.19.0(prisma@6.19.0(magicast@0.3.5)(typescript@5.9.3))(typescript@5.9.3))': + dependencies: + '@casl/ability': 6.8.0 + '@prisma/client': 6.19.0(prisma@6.19.0(magicast@0.3.5)(typescript@5.9.3))(typescript@5.9.3) + '@ucast/core': 1.10.2 + '@ucast/js': 3.1.0 + '@clack/core@0.4.1': dependencies: picocolors: 1.1.1 @@ -10396,7 +10619,7 @@ snapshots: globals: 14.0.0 ignore: 5.3.2 import-fresh: 3.3.1 - js-yaml: 4.1.0 + js-yaml: 4.1.1 minimatch: 3.1.2 strip-json-comments: 3.1.1 transitivePeerDependencies: @@ -10520,7 +10743,7 @@ snapshots: '@gitbeaker/requester-utils@40.6.0': dependencies: picomatch-browser: 2.2.6 - qs: 6.14.0 + qs: 6.14.1 rate-limiter-flexible: 4.0.1 xcase: 2.0.1 @@ -10952,6 +11175,12 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.5 + '@keycloak/keycloak-admin-client@24.0.5': + dependencies: + camelize-ts: 3.0.0 + url-join: 5.0.0 + url-template: 3.1.1 + '@keycloak/keycloak-admin-client@26.4.2': dependencies: camelize-ts: 3.0.0 @@ -11066,6 +11295,12 @@ snapshots: optionalDependencies: '@nestjs/platform-express': 11.1.11(@nestjs/common@11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/core@11.1.11) + '@nestjs/event-emitter@3.0.1(@nestjs/common@11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/core@11.1.11)': + dependencies: + '@nestjs/common': 11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2) + '@nestjs/core': 11.1.11(@nestjs/common@11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/platform-express@11.1.11)(reflect-metadata@0.2.2)(rxjs@7.8.2) + eventemitter2: 6.4.9 + '@nestjs/platform-express@11.1.11(@nestjs/common@11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/core@11.1.11)': dependencies: '@nestjs/common': 11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2) @@ -11078,6 +11313,12 @@ snapshots: transitivePeerDependencies: - supports-color + '@nestjs/schedule@5.0.1(@nestjs/common@11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/core@11.1.11)': + dependencies: + '@nestjs/common': 11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2) + '@nestjs/core': 11.1.11(@nestjs/common@11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/platform-express@11.1.11)(reflect-metadata@0.2.2)(rxjs@7.8.2) + cron: 3.5.0 + '@nestjs/schematics@11.0.9(chokidar@4.0.3)(typescript@5.9.3)': dependencies: '@angular-devkit/core': 19.2.17(chokidar@4.0.3) @@ -11324,6 +11565,9 @@ snapshots: dependencies: tslib: 2.8.1 + '@testim/chrome-version@1.1.4': + optional: true + '@tokenizer/inflate@0.4.1': dependencies: debug: 4.4.3(supports-color@8.1.1) @@ -11333,6 +11577,9 @@ snapshots: '@tokenizer/token@0.3.0': {} + '@tootallnate/quickjs-emscripten@0.23.0': + optional: true + '@ts-rest/core@3.52.1(@types/node@22.19.3)(zod@3.25.76)': optionalDependencies: '@types/node': 22.19.3 @@ -11480,6 +11727,8 @@ snapshots: '@types/lodash@4.17.20': {} + '@types/luxon@3.4.2': {} + '@types/mdast@4.0.4': dependencies: '@types/unist': 3.0.3 @@ -11648,6 +11897,22 @@ snapshots: '@typescript-eslint/types': 8.46.3 eslint-visitor-keys: 4.2.1 + '@ucast/core@1.10.2': {} + + '@ucast/js@3.1.0': + dependencies: + '@ucast/core': 1.10.2 + + '@ucast/mongo2js@1.4.1': + dependencies: + '@ucast/core': 1.10.2 + '@ucast/js': 3.1.0 + '@ucast/mongo': 2.4.3 + + '@ucast/mongo@2.4.3': + dependencies: + '@ucast/core': 1.10.2 + '@ungap/structured-clone@1.3.0': {} '@unocss/astro@66.5.4(vite@7.2.1(@types/node@24.10.0)(jiti@2.6.1)(terser@5.44.1)(tsx@4.19.3)(yaml@2.8.1))': @@ -12325,6 +12590,11 @@ snapshots: assertion-error@2.0.1: {} + ast-types@0.13.4: + dependencies: + tslib: 2.8.1 + optional: true + astral-regex@2.0.0: {} async-function@1.0.0: {} @@ -12365,6 +12635,15 @@ snapshots: transitivePeerDependencies: - debug + axios@1.13.5: + dependencies: + follow-redirects: 1.15.11 + form-data: 4.0.5 + proxy-from-env: 1.1.0 + transitivePeerDependencies: + - debug + optional: true + babel-jest@30.2.0(@babel/core@7.28.5): dependencies: '@babel/core': 7.28.5 @@ -12449,6 +12728,9 @@ snapshots: baseline-browser-mapping@2.9.11: {} + basic-ftp@5.2.0: + optional: true + bcrypt-pbkdf@1.0.2: dependencies: tweetnacl: 0.14.5 @@ -12478,7 +12760,7 @@ snapshots: http-errors: 2.0.0 iconv-lite: 0.7.1 on-finished: 2.4.1 - qs: 6.14.0 + qs: 6.14.1 raw-body: 3.0.2 type-is: 2.0.1 transitivePeerDependencies: @@ -12499,8 +12781,7 @@ snapshots: dependencies: fill-range: 7.1.1 - brorand@1.1.0: - optional: true + brorand@1.1.0: {} browserslist@4.28.1: dependencies: @@ -12668,6 +12949,20 @@ snapshots: chrome-trace-event@1.0.4: {} + chromedriver@146.0.0: + dependencies: + '@testim/chrome-version': 1.1.4 + axios: 1.13.5 + compare-versions: 6.1.1 + extract-zip: 2.0.1(supports-color@8.1.1) + proxy-agent: 6.5.0 + proxy-from-env: 2.0.0 + tcp-port-used: 1.0.2 + transitivePeerDependencies: + - debug + - supports-color + optional: true + ci-info@4.3.1: {} cidr-regex@3.1.1: @@ -12788,6 +13083,9 @@ snapshots: array-ify: 1.0.0 dot-prop: 5.3.0 + compare-versions@6.1.1: + optional: true + component-emitter@1.3.1: {} concat-map@0.0.1: {} @@ -12864,7 +13162,7 @@ snapshots: cosmiconfig@8.3.6(typescript@5.9.3): dependencies: import-fresh: 3.3.1 - js-yaml: 4.1.0 + js-yaml: 4.1.1 parse-json: 5.2.0 path-type: 4.0.0 optionalDependencies: @@ -12883,6 +13181,11 @@ snapshots: cron-validator@1.4.0: {} + cron@3.5.0: + dependencies: + '@types/luxon': 3.4.2 + luxon: 3.5.0 + cross-spawn@6.0.6: dependencies: nice-try: 1.0.5 @@ -12978,6 +13281,9 @@ snapshots: assert-plus: 1.0.0 optional: true + data-uri-to-buffer@6.0.2: + optional: true + data-urls@5.0.0: dependencies: whatwg-mimetype: 4.0.0 @@ -13017,6 +13323,11 @@ snapshots: supports-color: 8.1.1 optional: true + debug@4.3.1: + dependencies: + ms: 2.1.2 + optional: true + debug@4.4.3(supports-color@5.5.0): dependencies: ms: 2.1.3 @@ -13065,6 +13376,13 @@ snapshots: defu@6.1.4: {} + degenerator@5.0.1: + dependencies: + ast-types: 0.13.4 + escodegen: 2.1.0 + esprima: 4.0.1 + optional: true + delayed-stream@1.0.0: {} delegate@3.2.0: {} @@ -13172,7 +13490,6 @@ snapshots: inherits: 2.0.4 minimalistic-assert: 1.0.1 minimalistic-crypto-utils: 1.0.1 - optional: true emittery@0.13.1: {} @@ -13419,6 +13736,15 @@ snapshots: escape-string-regexp@5.0.0: {} + escodegen@2.1.0: + dependencies: + esprima: 4.0.1 + estraverse: 5.3.0 + esutils: 2.0.3 + optionalDependencies: + source-map: 0.6.1 + optional: true + eslint-compat-utils@0.5.1(eslint@9.39.1(jiti@2.6.1)): dependencies: eslint: 9.39.1(jiti@2.6.1) @@ -13743,6 +14069,8 @@ snapshots: eventemitter2@6.4.7: optional: true + eventemitter2@6.4.9: {} + eventemitter3@5.0.1: {} events@3.3.0: {} @@ -14065,6 +14393,9 @@ snapshots: dependencies: tabbable: 6.3.0 + follow-redirects@1.15.11: + optional: true + follow-redirects@1.15.9: {} for-each@0.3.5: @@ -14211,6 +14542,15 @@ snapshots: dependencies: resolve-pkg-maps: 1.0.0 + get-uri@6.0.5: + dependencies: + basic-ftp: 5.2.0 + data-uri-to-buffer: 6.0.2 + debug: 4.4.3(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + optional: true + getpass@0.1.7: dependencies: assert-plus: 1.0.0 @@ -14389,7 +14729,6 @@ snapshots: dependencies: inherits: 2.0.4 minimalistic-assert: 1.0.1 - optional: true hasha@5.2.2: dependencies: @@ -14412,7 +14751,6 @@ snapshots: hash.js: 1.1.7 minimalistic-assert: 1.0.1 minimalistic-crypto-utils: 1.0.1 - optional: true hookified@1.12.2: {} @@ -14543,6 +14881,9 @@ snapshots: dependencies: fp-ts: 2.16.9 + ip-address@10.1.0: + optional: true + ip-regex@4.3.0: {} ipaddr.js@1.9.1: {} @@ -14708,6 +15049,9 @@ snapshots: is-unicode-supported@0.1.0: {} + is-url@1.2.4: + optional: true + is-weakmap@2.0.2: {} is-weakref@1.1.1: @@ -14719,6 +15063,13 @@ snapshots: call-bound: 1.0.4 get-intrinsic: 1.3.0 + is2@2.0.9: + dependencies: + deep-is: 0.1.4 + ip-regex: 4.3.0 + is-url: 1.2.4 + optional: true + isarray@1.0.0: {} isarray@2.0.5: {} @@ -15118,6 +15469,10 @@ snapshots: dependencies: argparse: 2.0.1 + js-yaml@4.1.1: + dependencies: + argparse: 2.0.1 + jsbn@0.1.1: optional: true @@ -15239,7 +15594,6 @@ snapshots: asn1.js: 5.4.1 elliptic: 6.6.1 safe-buffer: 5.2.1 - optional: true jws@4.0.0: dependencies: @@ -15247,6 +15601,15 @@ snapshots: safe-buffer: 5.2.1 optional: true + keycloak-connect@25.0.6: + dependencies: + jwk-to-pem: 2.0.7 + optionalDependencies: + chromedriver: 146.0.0 + transitivePeerDependencies: + - debug + - supports-color + keycloak-js@26.2.1: {} keyv@4.5.4: @@ -15420,6 +15783,11 @@ snapshots: dependencies: yallist: 3.1.1 + lru-cache@7.18.3: + optional: true + + luxon@3.5.0: {} + magic-string@0.25.9: dependencies: sourcemap-codec: 1.4.8 @@ -15820,8 +16188,7 @@ snapshots: minimalistic-assert@1.0.1: {} - minimalistic-crypto-utils@1.0.1: - optional: true + minimalistic-crypto-utils@1.0.1: {} minimatch@10.1.1: dependencies: @@ -15862,6 +16229,9 @@ snapshots: mrmime@2.0.1: {} + ms@2.1.2: + optional: true + ms@2.1.3: {} muggle-string@0.4.1: {} @@ -15903,6 +16273,12 @@ snapshots: neo-async@2.6.2: {} + nest-keycloak-connect@1.10.1(@nestjs/common@11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/core@11.1.11)(keycloak-connect@25.0.6): + dependencies: + '@nestjs/common': 11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2) + '@nestjs/core': 11.1.11(@nestjs/common@11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/platform-express@11.1.11)(reflect-metadata@0.2.2)(rxjs@7.8.2) + keycloak-connect: 25.0.6 + nestjs-pino@4.5.0(@nestjs/common@11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2))(pino-http@11.0.0)(pino@10.1.0)(rxjs@7.8.2): dependencies: '@nestjs/common': 11.1.11(reflect-metadata@0.2.2)(rxjs@7.8.2) @@ -15910,6 +16286,9 @@ snapshots: pino-http: 11.0.0 rxjs: 7.8.2 + netmask@2.0.2: + optional: true + nice-try@1.0.5: {} node-abort-controller@3.1.1: {} @@ -16141,6 +16520,26 @@ snapshots: p-try@2.2.0: {} + pac-proxy-agent@7.2.0: + dependencies: + '@tootallnate/quickjs-emscripten': 0.23.0 + agent-base: 7.1.4 + debug: 4.4.3(supports-color@8.1.1) + get-uri: 6.0.5 + http-proxy-agent: 7.0.2 + https-proxy-agent: 7.0.6 + pac-resolver: 7.0.1 + socks-proxy-agent: 8.0.5 + transitivePeerDependencies: + - supports-color + optional: true + + pac-resolver@7.0.1: + dependencies: + degenerator: 5.0.1 + netmask: 2.0.2 + optional: true + package-json-from-dist@1.0.1: {} package-manager-detector@1.5.0: {} @@ -16413,11 +16812,28 @@ snapshots: forwarded: 0.2.0 ipaddr.js: 1.9.1 + proxy-agent@6.5.0: + dependencies: + agent-base: 7.1.4 + debug: 4.4.3(supports-color@8.1.1) + http-proxy-agent: 7.0.2 + https-proxy-agent: 7.0.6 + lru-cache: 7.18.3 + pac-proxy-agent: 7.2.0 + proxy-from-env: 1.1.0 + socks-proxy-agent: 8.0.5 + transitivePeerDependencies: + - supports-color + optional: true + proxy-from-env@1.0.0: optional: true proxy-from-env@1.1.0: {} + proxy-from-env@2.0.0: + optional: true + pstree.remy@1.1.8: {} pump@3.0.3: @@ -16591,7 +17007,7 @@ snapshots: request-oauth@1.0.1: dependencies: oauth-sign: 0.9.0 - qs: 6.14.0 + qs: 6.14.1 uuid: 8.3.2 request-progress@3.0.0: @@ -16945,12 +17361,30 @@ snapshots: ansi-styles: 6.2.3 is-fullwidth-code-point: 5.1.0 + smart-buffer@4.2.0: + optional: true + smob@1.5.0: {} smtp-address-parser@1.1.0: dependencies: nearley: 2.20.1 + socks-proxy-agent@8.0.5: + dependencies: + agent-base: 7.1.4 + debug: 4.4.3(supports-color@8.1.1) + socks: 2.8.7 + transitivePeerDependencies: + - supports-color + optional: true + + socks@2.8.7: + dependencies: + ip-address: 10.1.0 + smart-buffer: 4.2.0 + optional: true + sonic-boom@4.2.0: dependencies: atomic-sleep: 1.0.0 @@ -17336,6 +17770,14 @@ snapshots: tapable@2.3.0: {} + tcp-port-used@1.0.2: + dependencies: + debug: 4.3.1 + is2: 2.0.9 + transitivePeerDependencies: + - supports-color + optional: true + temp-dir@2.0.0: {} tempy@0.6.0: