diff --git a/.env.example b/.env.example index 10ee5100..9f8d3d6e 100644 --- a/.env.example +++ b/.env.example @@ -12,7 +12,7 @@ API_TOKEN='my-api-token' UI_AUTH_EXPIRE_HOURS='2' # Storage details -DATA_STORAGE=fs # could be s3 +DATA_STORAGE=fs # could be s3 or azure # S3 related configuration if DATA_STORAGE is "s3" S3_ENDPOINT="s3.endpoint", @@ -21,4 +21,10 @@ S3_SECRET_KEY="some_secret_key" S3_PORT=9000 # optional S3_REGION="us-east-1" S3_BUCKET="bucket_name" # by default "playwright-reports-server" -S3_BATCH_SIZE=10 # by default 10 \ No newline at end of file +S3_BATCH_SIZE=10 # by default 10 + +# Azure Blob Storage configuration if DATA_STORAGE is "azure" +AZURE_ACCOUNT_NAME="your_storage_account_name" +AZURE_ACCOUNT_KEY="your_storage_account_key" +AZURE_CONTAINER="playwright-reports-server" # by default "playwright-reports-server" +AZURE_BATCH_SIZE=10 # by default 10 \ No newline at end of file diff --git a/app/config/env.ts b/app/config/env.ts index 115ad8ea..b2123883 100644 --- a/app/config/env.ts +++ b/app/config/env.ts @@ -13,6 +13,10 @@ export const env = cleanEnv(process.env, { S3_REGION: str({ desc: 'S3 region', default: undefined }), S3_BUCKET: str({ desc: 'S3 bucket', default: 'playwright-reports-server' }), S3_BATCH_SIZE: num({ desc: 'S3 batch size', default: 10 }), + AZURE_ACCOUNT_NAME: str({ desc: 'Azure Storage account name', default: undefined }), + AZURE_ACCOUNT_KEY: str({ desc: 'Azure Storage account key', default: undefined }), + AZURE_CONTAINER: str({ desc: 'Azure Storage container name', default: 'playwright-reports-server' }), + AZURE_BATCH_SIZE: num({ desc: 'Azure batch size', default: 10 }), RESULT_EXPIRE_DAYS: num({ desc: 'How much days to keep results', default: undefined }), RESULT_EXPIRE_CRON_SCHEDULE: str({ desc: 'Cron schedule for results cleanup', default: '33 3 * * *' }), REPORT_EXPIRE_DAYS: num({ desc: 'How much days to keep reports', default: undefined }), diff --git a/app/lib/storage/azure.ts b/app/lib/storage/azure.ts new file mode 100644 index 00000000..588826a2 --- /dev/null +++ b/app/lib/storage/azure.ts @@ -0,0 +1,850 @@ +import { randomUUID, type UUID } from 'crypto'; +import fs from 'fs/promises'; +import path, { posix as posixPath } from 'node:path'; +import { PassThrough, Readable } from 'node:stream'; + +import { + BlobServiceClient, + BlobSASPermissions, + StorageSharedKeyCredential, + ContainerClient, +} from '@azure/storage-blob'; + +import { processBatch } from './batch'; +import { + Result, + Report, + ResultDetails, + ServerDataInfo, + isReportHistory, + ReadReportsInput, + ReadReportsOutput, + ReadResultsInput, + ReadResultsOutput, + ReportHistory, + ReportMetadata, + Storage, +} from './types'; +import { bytesToString } from './format'; +import { + REPORTS_FOLDER, + TMP_FOLDER, + REPORTS_BUCKET, + RESULTS_BUCKET, + REPORTS_PATH, + REPORT_METADATA_FILE, + APP_CONFIG_S3, + DATA_PATH, + DATA_FOLDER, +} from './constants'; +import { handlePagination } from './pagination'; +import { getFileReportID } from './file'; + +import { parse } from '@/app/lib/parser'; +import { serveReportRoute } from '@/app/lib/constants'; +import { generatePlaywrightReport } from '@/app/lib/pw'; +import { withError } from '@/app/lib/withError'; +import { env } from '@/app/config/env'; +import { SiteWhiteLabelConfig } from '@/app/types'; +import { defaultConfig, isConfigValid } from '@/app/lib/config'; + +const getTimestamp = (date?: Date | string) => { + if (!date) return 0; + if (typeof date === 'string') return new Date(date).getTime(); + + return date.getTime(); +}; + +const createClient = (): { serviceClient: BlobServiceClient; credential: StorageSharedKeyCredential } => { + const accountName = env.AZURE_ACCOUNT_NAME; + const accountKey = env.AZURE_ACCOUNT_KEY; + + if (!accountName) { + throw new Error('AZURE_ACCOUNT_NAME is required'); + } + + if (!accountKey) { + throw new Error('AZURE_ACCOUNT_KEY is required'); + } + + console.log('[azure] creating client'); + + const credential = new StorageSharedKeyCredential(accountName, accountKey); + const serviceClient = new BlobServiceClient(`https://${accountName}.blob.core.windows.net`, credential); + + return { serviceClient, credential }; +}; + +export class AzureBlob implements Storage { + private static instance: AzureBlob; + private readonly container: ContainerClient; + private readonly credential: StorageSharedKeyCredential; + private readonly containerName: string; + private readonly batchSize: number; + + private constructor() { + const { serviceClient, credential } = createClient(); + this.containerName = env.AZURE_CONTAINER; + this.batchSize = env.AZURE_BATCH_SIZE; + this.container = serviceClient.getContainerClient(this.containerName); + this.credential = credential; + } + + public static getInstance() { + if (!AzureBlob.instance) { + AzureBlob.instance = new AzureBlob(); + } + + return AzureBlob.instance; + } + + private async ensureContainerExists() { + await this.container.createIfNotExists(); + } + + private async write(dir: string, files: { name: string; content: Readable | Buffer | string; size?: number }[]) { + await this.ensureContainerExists(); + + await Promise.all( + files.map(async (file) => { + const blobPath = posixPath.join(dir, file.name); + + console.log(`[azure] writing ${blobPath}`); + + const blockBlobClient = this.container.getBlockBlobClient(blobPath); + + if (typeof file.content === 'string') { + const buffer = Buffer.from(file.content); + + await blockBlobClient.upload(buffer, buffer.length); + } else if (Buffer.isBuffer(file.content)) { + await blockBlobClient.upload(file.content, file.content.length); + } else { + await blockBlobClient.uploadStream(file.content); + } + }), + ); + } + + private async read(targetPath: string, contentType?: string | null) { + await this.ensureContainerExists(); + console.log(`[azure] read ${targetPath}`); + + const remotePath = targetPath.includes(REPORTS_BUCKET) ? targetPath : `${REPORTS_BUCKET}/${targetPath}`; + + console.log(`[azure] reading from remote path: ${remotePath}`); + + const blobClient = this.container.getBlobClient(remotePath); + const { result: downloadResponse, error } = await withError(blobClient.download()); + + if (error ?? !downloadResponse?.readableStreamBody) { + return { result: null, error }; + } + + const readStream = new Promise((resolve, reject) => { + const chunks: Uint8Array[] = []; + const stream = downloadResponse.readableStreamBody!; + + stream.on('data', (chunk: Uint8Array) => { + chunks.push(chunk); + }); + + stream.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + + stream.on('error', (err: Error) => { + console.error(`[azure] failed to read stream: ${err.message}`); + reject(err); + }); + }); + + const { result, error: readError } = await withError(readStream); + + return { + result: contentType === 'text/html' ? result?.toString('utf-8') : result, + error: error ?? readError ?? null, + }; + } + + async clear(...paths: string[]) { + console.log(`[azure] clearing ${paths}`); + await processBatch(this, paths, this.batchSize, async (blobPath) => { + await this.container.getBlobClient(blobPath).deleteIfExists(); + }); + } + + async getFolderSize(folderPath: string): Promise<{ size: number; resultCount: number; indexCount: number }> { + let resultCount = 0; + let indexCount = 0; + let totalSize = 0; + + for await (const blob of this.container.listBlobsFlat({ prefix: folderPath })) { + if (blob.name?.endsWith('.zip')) { + resultCount += 1; + } + + if (blob.name?.endsWith('index.html') && !blob.name.includes('/trace/index.html')) { + indexCount += 1; + } + + totalSize += blob.properties.contentLength ?? 0; + } + + return { size: totalSize, resultCount, indexCount }; + } + + async getServerDataInfo(): Promise { + await this.ensureContainerExists(); + console.log('[azure] getting server data'); + + const [results, reports] = await Promise.all([ + this.getFolderSize(RESULTS_BUCKET), + this.getFolderSize(REPORTS_BUCKET), + ]); + + const dataSize = results.size + reports.size; + + return { + dataFolderSizeinMB: bytesToString(dataSize), + numOfResults: results.resultCount, + resultsFolderSizeinMB: bytesToString(results.size), + numOfReports: reports.indexCount, + reportsFolderSizeinMB: bytesToString(reports.size), + }; + } + + async readFile(targetPath: string, contentType: string | null): Promise { + console.log(`[azure] reading ${targetPath} | ${contentType}`); + const { result, error } = await this.read(targetPath, contentType); + + if (error) { + console.error(`[azure] failed to read file ${targetPath}: ${error.message}`); + throw new Error(`[azure] failed to read file: ${error.message}`); + } + + return result!; + } + + async readResults(input?: ReadResultsInput): Promise { + await this.ensureContainerExists(); + console.log('[azure] reading results'); + + const jsonFiles: { name: string; lastModified?: Date }[] = []; + const resultSizes = new Map(); + + for await (const blob of this.container.listBlobsFlat({ prefix: RESULTS_BUCKET })) { + if (!blob.name) continue; + + if (blob.name.endsWith('.zip')) { + const resultID = path.basename(blob.name, '.zip'); + + resultSizes.set(resultID, blob.properties.contentLength ?? 0); + } + + if (!blob.name.endsWith('.json')) continue; + + jsonFiles.push({ name: blob.name, lastModified: blob.properties.lastModified }); + } + + console.log(`[azure] found ${jsonFiles.length} json files`); + + if (!jsonFiles.length) { + return { results: [], total: 0 }; + } + + jsonFiles.sort((a, b) => getTimestamp(b.lastModified) - getTimestamp(a.lastModified)); + + const noFilters = !input?.project && !input?.pagination; + const resultFiles = noFilters ? handlePagination(jsonFiles, input?.pagination) : jsonFiles; + + const results = await processBatch<{ name: string }, Result>(this, resultFiles, this.batchSize, async (file) => { + console.log(`[azure.batch] reading result: ${file.name}`); + const blobClient = this.container.getBlobClient(file.name); + const downloadResponse = await blobClient.download(); + + const chunks: Uint8Array[] = []; + + for await (const chunk of downloadResponse.readableStreamBody ?? []) { + chunks.push(chunk as Uint8Array); + } + + return JSON.parse(Buffer.concat(chunks).toString()); + }); + + const notMetadataKeys = ['resultID', 'title', 'createdAt', 'size', 'sizeBytes', 'project']; + + let filteredResults = results.filter((file) => (input?.project ? file.project === input.project : file)); + + if (input?.tags && input.tags.length > 0) { + filteredResults = filteredResults.filter((result) => { + const resultTags = Object.entries(result) + .filter(([key]) => !notMetadataKeys.includes(key)) + .map(([key, value]) => `${key}: ${value}`); + + return input.tags!.some((selectedTag) => resultTags.includes(selectedTag)); + }); + } + + if (input?.search?.trim()) { + const searchTerm = input.search.toLowerCase().trim(); + + filteredResults = filteredResults.filter((result) => { + const searchableFields = [ + result.title, + result.resultID, + result.project, + ...Object.entries(result) + .filter(([key]) => !notMetadataKeys.includes(key)) + .map(([key, value]) => `${key}: ${value}`), + ].filter(Boolean); + + return searchableFields.some((field) => field?.toLowerCase().includes(searchTerm)); + }); + } + + if (input?.dateFrom || input?.dateTo) { + const fromTimestamp = input.dateFrom ? getTimestamp(input.dateFrom) : 0; + const toTimestamp = input.dateTo ? getTimestamp(input.dateTo) : Number.MAX_SAFE_INTEGER; + + filteredResults = filteredResults.filter((result) => { + const resultTimestamp = getTimestamp(result.createdAt); + + return resultTimestamp >= fromTimestamp && resultTimestamp <= toTimestamp; + }); + } + + const currentFiles = noFilters ? results : handlePagination(filteredResults, input?.pagination); + + return { + results: currentFiles.map((result) => { + const sizeBytes = resultSizes.get(result.resultID) ?? 0; + + return { + ...result, + sizeBytes, + size: result.size ?? bytesToString(sizeBytes), + }; + }) as Result[], + total: noFilters ? jsonFiles.length : filteredResults.length, + }; + } + + async readReports(input?: ReadReportsInput): Promise { + await this.ensureContainerExists(); + console.log('[azure] reading reports from external storage'); + + const reports: Report[] = []; + const reportSizes = new Map(); + + for await (const blob of this.container.listBlobsFlat({ prefix: REPORTS_BUCKET })) { + if (!blob.name) continue; + + const reportID = getFileReportID(blob.name); + const newSize = (reportSizes.get(reportID) ?? 0) + (blob.properties.contentLength ?? 0); + + reportSizes.set(reportID, newSize); + + if (!blob.name.endsWith('index.html') || blob.name.includes('trace')) continue; + + const dir = posixPath.dirname(blob.name); + const id = posixPath.basename(dir); + const parentDir = posixPath.basename(posixPath.dirname(dir)); + + const projectName = parentDir === REPORTS_PATH ? '' : parentDir; + + const noFilters = !input?.project && !input?.ids; + const shouldFilterByProject = input?.project && projectName === input.project; + const shouldFilterByID = input?.ids?.includes(id); + + const report = { + reportID: id, + project: projectName, + createdAt: blob.properties.lastModified, + reportUrl: `${serveReportRoute}/${projectName ? encodeURIComponent(projectName) : ''}/${id}/index.html`, + size: '', + sizeBytes: 0, + }; + + if (noFilters || shouldFilterByProject || shouldFilterByID) { + reports.push(report); + } + } + + reports.sort((a, b) => getTimestamp(b.createdAt) - getTimestamp(a.createdAt)); + + const currentReports = handlePagination(reports, input?.pagination); + const withMetadata = await this.getReportsMetadata(currentReports as ReportHistory[]); + + let filteredReports = withMetadata; + + if (input?.search && input.search.trim()) { + const searchTerm = input.search.toLowerCase().trim(); + + filteredReports = filteredReports.filter((report) => { + const searchableFields = [ + report.title, + report.reportID, + report.project, + ...Object.entries(report) + .filter( + ([key]) => + !['reportID', 'title', 'createdAt', 'size', 'sizeBytes', 'project', 'reportUrl', 'stats'].includes( + key, + ), + ) + .map(([key, value]) => `${key}: ${value}`), + ].filter(Boolean); + + return searchableFields.some((field) => field?.toLowerCase().includes(searchTerm)); + }); + } + + if (input?.dateFrom || input?.dateTo) { + const fromTimestamp = input.dateFrom ? getTimestamp(input.dateFrom) : 0; + const toTimestamp = input.dateTo ? getTimestamp(input.dateTo) : Number.MAX_SAFE_INTEGER; + + filteredReports = filteredReports.filter((report) => { + const reportTimestamp = getTimestamp(report.createdAt); + + return reportTimestamp >= fromTimestamp && reportTimestamp <= toTimestamp; + }); + } + + const finalReports = handlePagination(filteredReports, input?.pagination); + + return { + reports: finalReports.map((report) => { + const sizeBytes = reportSizes.get(report.reportID) ?? 0; + + return { + ...report, + sizeBytes, + size: bytesToString(sizeBytes), + }; + }), + total: filteredReports.length, + }; + } + + async getReportsMetadata(reports: ReportHistory[]): Promise { + return await processBatch(this, reports, this.batchSize, async (report) => { + console.log(`[azure.batch] reading report ${report.reportID} metadata`); + + const { result: metadata, error: metadataError } = await withError( + this.readOrParseReportMetadata(report.reportID, report.project), + ); + + if (metadataError) { + console.error(`[azure] failed to read or create metadata for ${report.reportID}: ${metadataError.message}`); + + return report; + } + + if (!metadata) { + return report; + } + + return Object.assign(metadata, report); + }); + } + + async readOrParseReportMetadata(id: string, projectName: string): Promise { + const { result: metadataContent, error: metadataError } = await withError( + this.readFile(posixPath.join(REPORTS_BUCKET, projectName, id, REPORT_METADATA_FILE), 'utf-8'), + ); + + if (metadataError) console.error(`[azure] failed to read metadata for ${id}: ${metadataError.message}`); + + const metadata = metadataContent && !metadataError ? JSON.parse(metadataContent.toString()) : {}; + + if (isReportHistory(metadata)) { + console.log(`metadata found for report ${id}`); + + return metadata; + } + + console.log(`metadata file not found for ${id}, creating new metadata`); + try { + const { result: htmlContent, error: htmlError } = await withError( + this.readFile(posixPath.join(REPORTS_BUCKET, projectName, id, 'index.html'), 'utf-8'), + ); + + if (htmlError) console.error(`[azure] failed to read index.html for ${id}: ${htmlError.message}`); + + const created = await this.parseReportMetadata( + id, + path.join(REPORTS_FOLDER, projectName, id), + { + project: projectName, + reportID: id, + }, + htmlContent?.toString(), + ); + + console.log(`metadata object created for ${id}: ${JSON.stringify(created)}`); + + await this.saveReportMetadata(id, path.join(REPORTS_FOLDER, projectName, id), created); + + Object.assign(metadata, created); + } catch (e) { + console.error(`failed to create metadata for ${id}: ${(e as Error).message}`); + } + + return metadata; + } + + async deleteResults(resultIDs: string[]): Promise { + const objects = resultIDs.flatMap((id) => [`${RESULTS_BUCKET}/${id}.json`, `${RESULTS_BUCKET}/${id}.zip`]); + + await withError(this.clear(...objects)); + } + + private async getReportObjects(reportIDs: string[]): Promise { + const files: string[] = []; + const reportIDSet = new Set(reportIDs); + + for await (const blob of this.container.listBlobsFlat({ prefix: REPORTS_BUCKET })) { + if (!blob.name) continue; + + const reportID = posixPath.basename(posixPath.dirname(blob.name)); + + if (reportIDSet.has(reportID)) { + files.push(blob.name); + } + } + + return files; + } + + async deleteReports(reportIDs: string[]): Promise { + const objects = await this.getReportObjects(reportIDs); + + await withError(this.clear(...objects)); + } + + async generatePresignedUploadUrl(fileName: string) { + await this.ensureContainerExists(); + const blobPath = posixPath.join(RESULTS_BUCKET, fileName); + const blockBlobClient = this.container.getBlockBlobClient(blobPath); + + const expiresOn = new Date(); + + expiresOn.setSeconds(expiresOn.getSeconds() + 30 * 60); // 30 minutes + + return blockBlobClient.generateSasUrl({ + expiresOn, + permissions: BlobSASPermissions.parse('w'), + }); + } + + async saveResult(filename: string, stream: PassThrough) { + return await this.write(RESULTS_BUCKET, [ + { + name: filename, + content: stream, + }, + ]); + } + + async saveResultDetails(resultID: string, resultDetails: ResultDetails, size: number): Promise { + const metaData = { + resultID, + createdAt: new Date().toISOString(), + project: resultDetails?.project ?? '', + ...resultDetails, + size: bytesToString(size), + sizeBytes: size, + }; + + await this.write(RESULTS_BUCKET, [ + { + name: `${resultID}.json`, + content: JSON.stringify(metaData), + }, + ]); + + return metaData as Result; + } + + private async uploadReport(reportId: string, reportPath: string, remotePath: string) { + console.log(`[azure] upload report: ${reportPath}`); + + const files = await fs.readdir(reportPath, { recursive: true, withFileTypes: true }); + + await processBatch(this, files, this.batchSize, async (file) => { + if (!file.isFile()) { + return; + } + + console.log(`[azure] uploading file: ${JSON.stringify(file)}`); + + const nestedPath = (file as any).path.split(reportId).pop(); + const azurePath = posixPath.join(remotePath, nestedPath ?? '', file.name); + + console.log(`[azure] uploading to ${azurePath}`); + + const { error } = await withError(this.uploadFileWithRetry(azurePath, path.join((file as any).path, file.name))); + + if (error) { + console.error(`[azure] failed to upload report: ${error.message}`); + throw new Error(`[azure] failed to upload report: ${error.message}`); + } + }); + } + + private async uploadFileWithRetry(remotePath: string, filePath: string, attempt = 1): Promise { + if (attempt > 3) { + throw new Error(`[azure] failed to upload file after ${attempt} attempts: ${filePath}`); + } + + const blockBlobClient = this.container.getBlockBlobClient(remotePath); + const { error } = await withError(blockBlobClient.uploadFile(filePath)); + + if (error) { + console.error(`[azure] failed to upload file: ${error.message}`); + console.log(`[azure] will retry in 3s...`); + await new Promise((resolve) => setTimeout(resolve, 3000)); + + return await this.uploadFileWithRetry(remotePath, filePath, attempt + 1); + } + } + + private async clearTempFolders(id?: string) { + const withReportPathMaybe = id ? ` for report ${id}` : ''; + + console.log(`[azure] clear temp folders${withReportPathMaybe}`); + + await Promise.all([ + withError(fs.rm(path.join(TMP_FOLDER, id ?? ''), { recursive: true, force: true })), + withError(fs.rm(REPORTS_FOLDER, { recursive: true, force: true })), + ]); + } + + async generateReport(resultsIds: string[], metadata?: ReportMetadata): Promise { + console.log(`[azure] generate report from results: ${JSON.stringify(resultsIds)}`); + console.log(`[azure] create temp folders`); + + const { error: mkdirReportsError } = await withError(fs.mkdir(REPORTS_FOLDER, { recursive: true })); + + if (mkdirReportsError) { + console.error(`[azure] failed to create reports folder: ${mkdirReportsError.message}`); + } + + const reportId = randomUUID(); + const tempFolder = path.join(TMP_FOLDER, reportId); + + const { error: mkdirTempError } = await withError(fs.mkdir(tempFolder, { recursive: true })); + + if (mkdirTempError) { + console.error(`[azure] failed to create temporary folder: ${mkdirTempError.message}`); + } + + console.log(`[azure] start processing...`); + + const resultsIdSet = new Set(resultsIds); + const blobsToDownload: { blobName: string; localFilePath: string }[] = []; + + for await (const blob of this.container.listBlobsFlat({ prefix: RESULTS_BUCKET })) { + const fileName = path.basename(blob.name); + const id = fileName.replace(path.extname(fileName), ''); + + if (resultsIdSet.has(id)) { + blobsToDownload.push({ blobName: blob.name, localFilePath: path.join(tempFolder, fileName) }); + } + } + + await processBatch(this, blobsToDownload, this.batchSize, async ({ blobName, localFilePath }) => { + console.log(`[azure] downloading ${blobName}...`); + const blobClient = this.container.getBlobClient(blobName); + const { error } = await withError(blobClient.downloadToFile(localFilePath)); + + if (error) { + console.error(`[azure] failed to download ${blobName}: ${error.message}`); + throw new Error(`failed to download ${blobName}: ${error.message}`); + } + + console.log(`[azure] Downloaded: ${blobName} to ${localFilePath}`); + }); + + const { reportPath } = await generatePlaywrightReport(reportId, metadata!); + + console.log(`[azure] report generated: ${reportId} | ${reportPath}`); + + const { result: info, error: parseReportMetadataError } = await withError( + this.parseReportMetadata(reportId, reportPath, metadata), + ); + + if (parseReportMetadataError) console.error(parseReportMetadataError.message); + + const remotePath = posixPath.join(REPORTS_BUCKET, metadata?.project ?? '', reportId); + + const { error: uploadError } = await withError(this.uploadReport(reportId, reportPath, remotePath)); + + if (uploadError) { + console.error(`[azure] failed to upload report: ${uploadError.message}`); + } else { + const { error } = await withError(this.saveReportMetadata(reportId, reportPath, info ?? metadata ?? {})); + + if (error) console.error(`[azure] failed to save report metadata: ${error.message}`); + } + + await this.clearTempFolders(reportId); + + return reportId; + } + + private async saveReportMetadata(reportId: string, reportPath: string, metadata: ReportMetadata) { + console.log(`[azure] report uploaded: ${reportId}, uploading metadata to ${reportPath}`); + const { error: metadataError } = await withError( + this.write(posixPath.join(REPORTS_BUCKET, metadata.project ?? '', reportId), [ + { + name: REPORT_METADATA_FILE, + content: JSON.stringify(metadata), + }, + ]), + ); + + if (metadataError) console.error(`[azure] failed to upload report metadata: ${metadataError.message}`); + } + + private async parseReportMetadata( + reportId: string, + reportPath: string, + metadata?: Record, + htmlContent?: string, + ): Promise { + console.log(`[azure] creating report metadata for ${reportId} and ${reportPath}`); + const html = htmlContent ?? (await fs.readFile(path.join(reportPath, 'index.html'), 'utf-8')); + + const info = await parse(html as string); + + const content = Object.assign(info, metadata, { + reportId, + createdAt: new Date().toISOString(), + }); + + return content; + } + + async readConfigFile(): Promise<{ result?: SiteWhiteLabelConfig; error: Error | null }> { + await this.ensureContainerExists(); + console.log(`[azure] checking config file`); + + const blobClient = this.container.getBlobClient(APP_CONFIG_S3); + const { result: downloadResponse, error } = await withError(blobClient.download()); + + if (error) { + console.error(`[azure] failed to read config file: ${error.message}`); + + return { error }; + } + + const configChunks: Uint8Array[] = []; + + for await (const chunk of downloadResponse?.readableStreamBody ?? []) { + configChunks.push(chunk as Uint8Array); + } + + const existingConfig = Buffer.concat(configChunks).toString(); + + try { + const parsed = JSON.parse(existingConfig); + const isValid = isConfigValid(parsed); + + if (!isValid) { + return { error: new Error('invalid config') }; + } + + for (const image of [ + { path: parsed.faviconPath, default: defaultConfig.faviconPath }, + { path: parsed.logoPath, default: defaultConfig.logoPath }, + ]) { + if (!image) continue; + if (image.path === image.default) continue; + + const localPath = path.join(DATA_FOLDER, image.path); + const { error: accessError } = await withError(fs.access(localPath)); + + if (accessError) { + const remotePath = posixPath.join(DATA_PATH, image.path); + + console.log(`[azure] downloading config image: ${remotePath} to ${localPath}`); + await this.container.getBlobClient(remotePath).downloadToFile(localPath); + } + } + + return { result: parsed, error: null }; + } catch (e) { + return { error: new Error(`failed to parse config: ${e instanceof Error ? e.message : e}`) }; + } + } + + async saveConfigFile(config: Partial) { + console.log(`[azure] writing config file`); + + const { result: existingConfig, error: readExistingConfigError } = await this.readConfigFile(); + + if (readExistingConfigError) { + console.error(`[azure] failed to read existing config file: ${readExistingConfigError.message}`); + } + + const { error: clearExistingConfigError } = await withError(this.clear(APP_CONFIG_S3)); + + if (clearExistingConfigError) { + console.error(`[azure] failed to clear existing config file: ${clearExistingConfigError.message}`); + } + + const uploadConfig = { ...(existingConfig ?? {}), ...config } as SiteWhiteLabelConfig; + + const isDefaultImage = (key: keyof SiteWhiteLabelConfig) => config[key] && config[key] === defaultConfig[key]; + + const shouldBeUploaded = async (key: keyof SiteWhiteLabelConfig) => { + if (!config[key]) return false; + if (isDefaultImage(key)) return false; + + const blobClient = this.container.getBlobClient(uploadConfig[key] as string); + const { result: exists } = await withError(blobClient.exists()); + + return !exists; + }; + + if (await shouldBeUploaded('logoPath')) { + await this.uploadConfigImage(uploadConfig.logoPath); + } + + if (await shouldBeUploaded('faviconPath')) { + await this.uploadConfigImage(uploadConfig.faviconPath); + } + + const { error } = await withError( + this.write(DATA_PATH, [ + { + name: 'config.json', + content: JSON.stringify(uploadConfig, null, 2), + }, + ]), + ); + + if (error) console.error(`[azure] failed to write config file: ${error.message}`); + + return { result: uploadConfig, error }; + } + + private async uploadConfigImage(imagePath: string): Promise { + console.log(`[azure] uploading config image: ${imagePath}`); + + const localPath = path.join(DATA_FOLDER, imagePath); + const remotePath = posixPath.join(DATA_PATH, imagePath); + + const { error } = await withError(this.uploadFileWithRetry(remotePath, localPath)); + + if (error) { + console.error(`[azure] failed to upload config image: ${error.message}`); + + return error; + } + + return null; + } +} diff --git a/app/lib/storage/index.ts b/app/lib/storage/index.ts index cf3abbe7..2826b3da 100644 --- a/app/lib/storage/index.ts +++ b/app/lib/storage/index.ts @@ -1,7 +1,9 @@ export * from './types'; import { FS } from './fs'; import { S3 } from './s3'; +import { AzureBlob } from './azure'; import { env } from '@/app/config/env'; -export const storage = env.DATA_STORAGE === 's3' ? S3.getInstance() : FS; +export const storage = + env.DATA_STORAGE === 's3' ? S3.getInstance() : env.DATA_STORAGE === 'azure' ? AzureBlob.getInstance() : FS; diff --git a/package-lock.json b/package-lock.json index f51b0b05..1973511a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8,6 +8,7 @@ "name": "playwright-reports-server", "version": "5.8.0", "dependencies": { + "@azure/storage-blob": "^12.31.0", "@heroui/link": "2.2.12", "@heroui/navbar": "2.2.13", "@heroui/react": "2.7.4", @@ -117,6 +118,240 @@ } } }, + "node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-auth": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.10.1.tgz", + "integrity": "sha512-ykRMW8PjVAn+RS6ww5cmK9U2CyH9p4Q88YJwvUslfuMmN98w/2rdGRLPqJYObapBCdzBVeDgYWdJnFPFb7qzpg==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.1.2", + "@azure/core-util": "^1.13.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@azure/core-client": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/@azure/core-client/-/core-client-1.10.1.tgz", + "integrity": "sha512-Nh5PhEOeY6PrnxNPsEHRr9eimxLwgLlpmguQaHKBinFYA/RU9+kOYVOQqOrTsCL+KSxrLLl1gD8Dk5BFW/7l/w==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.1.2", + "@azure/core-auth": "^1.10.0", + "@azure/core-rest-pipeline": "^1.22.0", + "@azure/core-tracing": "^1.3.0", + "@azure/core-util": "^1.13.0", + "@azure/logger": "^1.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@azure/core-http-compat": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/@azure/core-http-compat/-/core-http-compat-2.3.2.tgz", + "integrity": "sha512-Tf6ltdKzOJEgxZeWLCjMxrxbodB/ZeCbzzA1A2qHbhzAjzjHoBVSUeSl/baT/oHAxhc4qdqVaDKnc2+iE932gw==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.1.2" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "@azure/core-client": "^1.10.0", + "@azure/core-rest-pipeline": "^1.22.0" + } + }, + "node_modules/@azure/core-lro": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.7.2.tgz", + "integrity": "sha512-0YIpccoX8m/k00O7mDDMdJpbr6mf1yWo2dfmxt5A8XVZVVMz2SSKaEbMCeJRvgQ0IaSlqhjT47p4hVIRRy90xw==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-util": "^1.2.0", + "@azure/logger": "^1.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-paging": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.6.2.tgz", + "integrity": "sha512-YKWi9YuCU04B55h25cnOYZHxXYtEvQEbKST5vqRga7hWY9ydd3FZHdeQF8pyh+acWZvppw13M/LMGx0LABUVMA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-rest-pipeline": { + "version": "1.23.0", + "resolved": "https://registry.npmjs.org/@azure/core-rest-pipeline/-/core-rest-pipeline-1.23.0.tgz", + "integrity": "sha512-Evs1INHo+jUjwHi1T6SG6Ua/LHOQBCLuKEEE6efIpt4ZOoNonaT1kP32GoOcdNDbfqsD2445CPri3MubBy5DEQ==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.1.2", + "@azure/core-auth": "^1.10.0", + "@azure/core-tracing": "^1.3.0", + "@azure/core-util": "^1.13.0", + "@azure/logger": "^1.3.0", + "@typespec/ts-http-runtime": "^0.3.4", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@azure/core-tracing": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.3.1.tgz", + "integrity": "sha512-9MWKevR7Hz8kNzzPLfX4EAtGM2b8mr50HPDBvio96bURP/9C+HjdH3sBlLSNNrvRAr5/k/svoH457gB5IKpmwQ==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@azure/core-util": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.13.1.tgz", + "integrity": "sha512-XPArKLzsvl0Hf0CaGyKHUyVgF7oDnhKoP85Xv6M4StF/1AhfORhZudHtOyf2s+FcbuQ9dPRAjB8J2KvRRMUK2A==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.1.2", + "@typespec/ts-http-runtime": "^0.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@azure/core-xml": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@azure/core-xml/-/core-xml-1.5.0.tgz", + "integrity": "sha512-D/sdlJBMJfx7gqoj66PKVmhDDaU6TKA49ptcolxdas29X7AfvLTmfAGLjAcIMBK7UZ2o4lygHIqVckOlQU3xWw==", + "license": "MIT", + "dependencies": { + "fast-xml-parser": "^5.0.7", + "tslib": "^2.8.1" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@azure/core-xml/node_modules/fast-xml-parser": { + "version": "5.5.9", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.5.9.tgz", + "integrity": "sha512-jldvxr1MC6rtiZKgrFnDSvT8xuH+eJqxqOBThUVjYrxssYTo1avZLGql5l0a0BAERR01CadYzZ83kVEkbyDg+g==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT", + "dependencies": { + "fast-xml-builder": "^1.1.4", + "path-expression-matcher": "^1.2.0", + "strnum": "^2.2.2" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, + "node_modules/@azure/core-xml/node_modules/strnum": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-2.2.2.tgz", + "integrity": "sha512-DnR90I+jtXNSTXWdwrEy9FakW7UX+qUZg28gj5fk2vxxl7uS/3bpI4fjFYVmdK9etptYBPNkpahuQnEwhwECqA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT" + }, + "node_modules/@azure/logger": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.3.0.tgz", + "integrity": "sha512-fCqPIfOcLE+CGqGPd66c8bZpwAji98tZ4JI9i/mlTNTlsIWslCfpg48s/ypyLxZTump5sypjrKn2/kY7q8oAbA==", + "license": "MIT", + "dependencies": { + "@typespec/ts-http-runtime": "^0.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@azure/storage-blob": { + "version": "12.31.0", + "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.31.0.tgz", + "integrity": "sha512-DBgNv10aCSxopt92DkTDD0o9xScXeBqPKGmR50FPZQaEcH4JLQ+GEOGEDv19V5BMkB7kxr+m4h6il/cCDPvmHg==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.1.2", + "@azure/core-auth": "^1.9.0", + "@azure/core-client": "^1.9.3", + "@azure/core-http-compat": "^2.2.0", + "@azure/core-lro": "^2.2.0", + "@azure/core-paging": "^1.6.2", + "@azure/core-rest-pipeline": "^1.19.1", + "@azure/core-tracing": "^1.2.0", + "@azure/core-util": "^1.11.0", + "@azure/core-xml": "^1.4.5", + "@azure/logger": "^1.1.4", + "@azure/storage-common": "^12.3.0", + "events": "^3.0.0", + "tslib": "^2.8.1" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@azure/storage-common": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@azure/storage-common/-/storage-common-12.3.0.tgz", + "integrity": "sha512-/OFHhy86aG5Pe8dP5tsp+BuJ25JOAl9yaMU3WZbkeoiFMHFtJ7tu5ili7qEdBXNW9G5lDB19trwyI6V49F/8iQ==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.1.2", + "@azure/core-auth": "^1.9.0", + "@azure/core-http-compat": "^2.2.0", + "@azure/core-rest-pipeline": "^1.19.1", + "@azure/core-tracing": "^1.2.0", + "@azure/core-util": "^1.11.0", + "@azure/logger": "^1.1.4", + "events": "^3.3.0", + "tslib": "^2.8.1" + }, + "engines": { + "node": ">=20.0.0" + } + }, "node_modules/@babel/code-frame": { "version": "7.27.1", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", @@ -7276,6 +7511,20 @@ "url": "https://opencollective.com/typescript-eslint" } }, + "node_modules/@typespec/ts-http-runtime": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/@typespec/ts-http-runtime/-/ts-http-runtime-0.3.4.tgz", + "integrity": "sha512-CI0NhTrz4EBaa0U+HaaUZrJhPoso8sG7ZFya8uQoBA57fjzrjRSv87ekCjLZOFExN+gXE/z0xuN2QfH4H2HrLQ==", + "license": "MIT", + "dependencies": { + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, "node_modules/@ungap/structured-clone": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", @@ -7595,6 +7844,15 @@ "node": ">=0.4.0" } }, + "node_modules/agent-base": { + "version": "7.1.4", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz", + "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==", + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, "node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", @@ -8862,7 +9120,6 @@ "version": "4.4.3", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", - "dev": true, "license": "MIT", "dependencies": { "ms": "^2.1.3" @@ -10185,6 +10442,15 @@ "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", "license": "MIT" }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "license": "MIT", + "engines": { + "node": ">=0.8.x" + } + }, "node_modules/execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", @@ -10307,6 +10573,21 @@ "dev": true, "license": "MIT" }, + "node_modules/fast-xml-builder": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/fast-xml-builder/-/fast-xml-builder-1.1.4.tgz", + "integrity": "sha512-f2jhpN4Eccy0/Uz9csxh3Nu6q4ErKxf0XIsasomfOihuSUa3/xw6w8dnOtCDgEItQFJG8KyXPzQXzcODDrrbOg==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT", + "dependencies": { + "path-expression-matcher": "^1.1.3" + } + }, "node_modules/fast-xml-parser": { "version": "4.5.3", "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.3.tgz", @@ -10901,6 +11182,32 @@ "dev": true, "license": "MIT" }, + "node_modules/http-proxy-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, "node_modules/human-signals": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", @@ -13367,6 +13674,21 @@ "node": ">=8" } }, + "node_modules/path-expression-matcher": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/path-expression-matcher/-/path-expression-matcher-1.2.0.tgz", + "integrity": "sha512-DwmPWeFn+tq7TiyJ2CxezCAirXjFxvaiD03npak3cRjlP9+OjTmSy1EpIrEbh+l6JgUundniloMLDQ/6VTdhLQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, "node_modules/path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", diff --git a/package.json b/package.json index 951d9ff5..5858a0be 100644 --- a/package.json +++ b/package.json @@ -14,6 +14,7 @@ "test:ui": "npx playwright test --project=ui" }, "dependencies": { + "@azure/storage-blob": "^12.31.0", "@heroui/link": "2.2.12", "@heroui/navbar": "2.2.13", "@heroui/react": "2.7.4",