AvanzaCast/e2e/publish_artifact.js
Cesar Mendivil 8b458a3ddf feat: add initial LiveKit Meet integration with utility scripts, configs, and core components
- Add Next.js app structure with base configs, linting, and formatting
- Implement LiveKit Meet page, types, and utility functions
- Add Docker, Compose, and deployment scripts for backend and token server
- Provide E2E and smoke test scaffolding with Puppeteer and Playwright helpers
- Include CSS modules and global styles for UI
- Add postMessage and studio integration utilities
- Update package.json with dependencies and scripts for development and testing
2025-11-20 12:50:38 -07:00

71 lines
2.5 KiB
JavaScript

#!/usr/bin/env node
// e2e/publish_artifact.js
// Packages a result JSON and its screenshot into e2e/artifacts/<timestamp>-<run>.tgz
// If AWS_S3_BUCKET is set and `aws` CLI is available, uploads it and prints the S3 URL.
const fs = require('fs');
const path = require('path');
const { execSync } = require('child_process');
function usage() {
console.error('Usage: node publish_artifact.js <resultJsonPath> [label]');
process.exit(2);
}
const resultJsonPath = process.argv[2];
const label = process.argv[3] || 'run';
if (!resultJsonPath || !fs.existsSync(resultJsonPath)) usage();
const outDir = path.resolve(__dirname, 'artifacts');
if (!fs.existsSync(outDir)) fs.mkdirSync(outDir, { recursive: true });
try {
const results = JSON.parse(fs.readFileSync(resultJsonPath, 'utf8'));
const ts = new Date().toISOString().replace(/[:.]/g, '-');
const name = `${ts}-${label}`;
const tarName = `${name}.tgz`;
const tarPath = path.join(outDir, tarName);
// collect files: the JSON itself and any screenshot referenced
const files = [resultJsonPath];
if (results && results.screenshot && fs.existsSync(results.screenshot)) files.push(results.screenshot);
// create tar.gz with relative paths
const cwd = process.cwd();
const relFiles = files.map(f => path.relative(cwd, f));
const cmd = `tar -czf ${tarPath} ${relFiles.map(f => `'${f}'`).join(' ')}`;
execSync(cmd, { stdio: 'inherit' });
// If AWS_S3_BUCKET is present and aws CLI available, upload
const bucket = process.env.AWS_S3_BUCKET || process.env.AWS_BUCKET;
if (bucket) {
try {
// check aws CLI
execSync('aws --version', { stdio: 'ignore' });
const s3Key = `e2e-artifacts/${tarName}`;
const uploadCmd = `aws s3 cp ${tarPath} s3://${bucket}/${s3Key} --acl public-read`;
execSync(uploadCmd, { stdio: 'inherit' });
// derive public URL (best-effort)
const region = process.env.AWS_REGION || process.env.AWS_DEFAULT_REGION || '';
let url = '';
if (region) url = `https://${bucket}.s3.${region}.amazonaws.com/${s3Key}`;
else url = `https://${bucket}.s3.amazonaws.com/${s3Key}`;
console.log(url);
process.exit(0);
} catch (err) {
// upload failed, fall back to local path
console.warn('AWS upload failed or aws CLI missing, returning local artifact path');
console.log(tarPath);
process.exit(0);
}
}
// no upload requested, print local path
console.log(tarPath);
process.exit(0);
} catch (err) {
console.error('publish_artifact failed:', err && err.stack ? err.stack : err);
process.exit(1);
}