Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 52 additions & 0 deletions .github/workflows/live-smoke.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
name: live-smoke

on:
push:
branches:
- main
workflow_dispatch:

jobs:
smoke:
runs-on: ubuntu-latest
timeout-minutes: 30

steps:
- name: Checkout
uses: actions/checkout@v4

- name: Setup Bun
uses: oven-sh/setup-bun@v2
with:
bun-version: latest

- name: Install Dependencies
run: bun install

- name: Install Vercel CLI
run: npm install --global vercel@latest

- name: Install Playwright Chromium
run: bunx playwright install --with-deps chromium

- name: Generate Smoke Audio Fixtures
run: bun run smoke:prepare-audio
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}

- name: Run Live Smoke
run: bun run smoke:live
env:
CI: "true"
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }}
VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }}
VERCEL_TOKEN: ${{ secrets.VERCEL_TOKEN }}

- name: Upload Smoke Artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: smoke-artifacts
path: .artifacts/smoke
if-no-files-found: ignore
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -33,4 +33,7 @@ yarn-error.log*

# bun
bun.lockb
bun.lock
bun.lock

# smoke artifacts
.artifacts
10 changes: 9 additions & 1 deletion biome.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
{
"extends": ["@rubriclab/config/biome"]
"css": {
"parser": {
"tailwindDirectives": true
}
},
"extends": ["@rubriclab/config/biome"],
"files": {
"includes": ["**", "!next-env.d.ts"]
}
}
23 changes: 23 additions & 0 deletions components.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"$schema": "https://ui.shadcn.com/schema.json",
"aliases": {
"components": "@/components",
"hooks": "@/hooks",
"lib": "@/lib",
"ui": "@/components/ui",
"utils": "@/lib/utils"
},
"iconLibrary": "lucide",
"registries": {},
"rsc": true,
"rtl": false,
"style": "new-york",
"tailwind": {
"baseColor": "neutral",
"config": "tailwind.config.ts",
"css": "src/app/styles.css",
"cssVariables": true,
"prefix": ""
},
"tsx": true
}
21 changes: 20 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,20 +1,36 @@
{
"dependencies": {
"@prisma/client": "^6.19.0",
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-scroll-area": "^1.2.10",
"@radix-ui/react-select": "^2.2.6",
"@radix-ui/react-separator": "^1.1.8",
"@radix-ui/react-slider": "^1.3.6",
"@radix-ui/react-slot": "^1.2.4",
"@radix-ui/react-switch": "^1.2.6",
"@radix-ui/react-tabs": "^1.1.13",
"@t3-oss/env-nextjs": "^0.13.8",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"dotenv": "^17.2.3",
"framer-motion": "^12.0.0",
"lucide-react": "^0.576.0",
"next": "16.0.10",
"react": "^19.2.0",
"react-dom": "^19.2.0"
"react-dom": "^19.2.0",
"tailwind-merge": "^3.5.0",
"tailwindcss-animate": "^1.0.7"
},
"description": "This project was bootstrapped with create-rubric-app",
"devDependencies": {
"@rubriclab/config": "^0.0.22",
"@tailwindcss/postcss": "^4.2.1",
"@types/node": "^24.10.1",
"@types/react": "^19.2.3",
"@types/react-dom": "^19.2.3",
"playwright": "^1.58.2",
"prisma": "^6.19.0",
"tailwindcss": "^4.2.1",
"typescript": "^5.9.3",
"zod": "^4.1.12"
},
Expand All @@ -32,6 +48,9 @@
"db:studio": "prisma studio",
"dev": "next dev",
"format": "bun x biome check --write .",
"smoke:live": "bun run scripts/smoke/runLiveConversationSmoke.ts --target-url https://lilac.chat",
"smoke:local": "bun run scripts/smoke/runLiveConversationSmoke.ts --target-url http://localhost:3000 --skip-vercel-logs --skip-chat",
"smoke:prepare-audio": "bun run scripts/smoke/generateSmokeAudio.ts",
"start": "next start"
},
"version": "0.0.0"
Expand Down
84 changes: 84 additions & 0 deletions scripts/smoke/captureVercelRuntimeLogs.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
import { spawn } from 'node:child_process'
import { createWriteStream } from 'node:fs'
import { mkdir, readFile } from 'node:fs/promises'
import { join } from 'node:path'

export type RuntimeLogCapture = {
stderrPath: string
stdoutPath: string
stop: () => Promise<void>
}

export type StartRuntimeLogCaptureInput = {
artifactDirectoryPath: string
deploymentDomain: string
vercelToken?: string
}

export async function startVercelRuntimeLogCapture(
input: StartRuntimeLogCaptureInput
): Promise<RuntimeLogCapture> {
const runtimeDirectoryPath = join(input.artifactDirectoryPath, 'runtime-logs')
await mkdir(runtimeDirectoryPath, { recursive: true })

const stdoutPath = join(runtimeDirectoryPath, 'vercel-runtime.jsonl')
const stderrPath = join(runtimeDirectoryPath, 'vercel-runtime.stderr.log')
const stdoutWriter = createWriteStream(stdoutPath, { flags: 'w' })
const stderrWriter = createWriteStream(stderrPath, { flags: 'w' })

const captureEnvironment = {
...process.env
}
if (input.vercelToken) captureEnvironment.VERCEL_TOKEN = input.vercelToken

const vercelProcess = spawn('vercel', ['logs', input.deploymentDomain, '--json'], {
env: captureEnvironment,
stdio: ['ignore', 'pipe', 'pipe']
})

vercelProcess.stdout.pipe(stdoutWriter)
vercelProcess.stderr.pipe(stderrWriter)

async function stop(): Promise<void> {
if (!vercelProcess.killed) {
vercelProcess.kill('SIGINT')
}
await new Promise<void>(resolve => {
vercelProcess.once('close', () => resolve())
setTimeout(() => resolve(), 3000)
})
stdoutWriter.end()
stderrWriter.end()
}

return {
stderrPath,
stdoutPath,
stop
}
}

export async function collectRuntimeErrorMatches(runtimeLogPath: string): Promise<string[]> {
const runtimeLogContent = await readFile(runtimeLogPath, 'utf8')
const runtimeLogLines = runtimeLogContent
.split('\n')
.map(line => line.trim())
.filter(Boolean)

const matchPatterns = [
/Unsupported parameter/i,
/Unknown parameter/i,
/Invalid 'item\.id'/i,
/Unhandled/i,
/An error occurred in the Server Components render/i,
/\bdigest\b/i
]

const matches: string[] = []
for (const runtimeLine of runtimeLogLines) {
if (matchPatterns.some(pattern => pattern.test(runtimeLine))) {
matches.push(runtimeLine)
}
}
return matches
}
141 changes: 141 additions & 0 deletions scripts/smoke/generateSmokeAudio.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
import { spawnSync } from 'node:child_process'
import { mkdir, writeFile } from 'node:fs/promises'
import { join, resolve } from 'node:path'

type SmokeAudioPrompt = {
fileName: string
languageCode: 'en' | 'es'
text: string
}

const smokeAudioPrompts: SmokeAudioPrompt[] = [
{
fileName: 'en_smoke',
languageCode: 'en',
text:
'Hello everyone. This is a Lilac translation smoke test. We are validating live production conversation flow.'
},
{
fileName: 'es_smoke',
languageCode: 'es',
text:
'Hola a todos. Esta es una prueba de humo de traduccion de Lilac. Estamos validando el flujo de conversacion en produccion.'
}
]

const fixturesDirectoryPath = resolve(process.cwd(), '.artifacts/smoke/fixtures')

function requireOpenAiApiKey(): string {
const openAiApiKey = process.env.OPENAI_API_KEY?.trim()
if (!openAiApiKey) {
throw new Error('OPENAI_API_KEY is required for smoke audio generation.')
}
return openAiApiKey
}

function runFfmpeg(args: string[]): void {
const ffmpegResult = spawnSync('ffmpeg', args, { encoding: 'utf8' })
if (ffmpegResult.status !== 0) {
throw new Error(`ffmpeg failed: ${ffmpegResult.stderr || ffmpegResult.stdout}`)
}
}

async function generatePromptAudio(prompt: SmokeAudioPrompt, openAiApiKey: string): Promise<void> {
const mp3OutputPath = join(fixturesDirectoryPath, `${prompt.fileName}.mp3`)
const wavOutputPath = join(fixturesDirectoryPath, `${prompt.fileName}.wav`)

const ttsResponse = await fetch('https://api.openai.com/v1/audio/speech', {
body: JSON.stringify({
input: prompt.text,
model: 'gpt-4o-mini-tts',
voice: 'alloy'
}),
headers: {
Authorization: `Bearer ${openAiApiKey}`,
'Content-Type': 'application/json'
},
method: 'POST'
})

if (!ttsResponse.ok) {
const errorText = await ttsResponse.text()
throw new Error(`TTS generation failed for ${prompt.fileName}: ${errorText}`)
}

const audioBuffer = Buffer.from(await ttsResponse.arrayBuffer())
await writeFile(mp3OutputPath, audioBuffer)

runFfmpeg([
'-y',
'-i',
mp3OutputPath,
'-ar',
'48000',
'-ac',
'1',
'-sample_fmt',
's16',
wavOutputPath
])
}

function buildConversationFixture(): void {
const conversationOutputPath = join(fixturesDirectoryPath, 'conversation_smoke.wav')
const englishWavPath = join(fixturesDirectoryPath, 'en_smoke.wav')
const spanishWavPath = join(fixturesDirectoryPath, 'es_smoke.wav')

runFfmpeg([
'-y',
'-f',
'lavfi',
'-i',
'anullsrc=r=48000:cl=mono:d=4',
'-i',
englishWavPath,
'-f',
'lavfi',
'-i',
'anullsrc=r=48000:cl=mono:d=3',
'-i',
spanishWavPath,
'-f',
'lavfi',
'-i',
'anullsrc=r=48000:cl=mono:d=3',
'-i',
englishWavPath,
'-f',
'lavfi',
'-i',
'anullsrc=r=48000:cl=mono:d=3',
'-i',
spanishWavPath,
'-filter_complex',
'[0:a][1:a][2:a][3:a][4:a][5:a][6:a]concat=n=7:v=0:a=1',
'-ar',
'48000',
'-ac',
'1',
'-sample_fmt',
's16',
conversationOutputPath
])
}

async function main(): Promise<void> {
const openAiApiKey = requireOpenAiApiKey()
await mkdir(fixturesDirectoryPath, { recursive: true })

for (const prompt of smokeAudioPrompts) {
await generatePromptAudio(prompt, openAiApiKey)
}

buildConversationFixture()

console.log(`Smoke fixtures generated in ${fixturesDirectoryPath}`)
}

void main().catch(error => {
console.error(error instanceof Error ? error.message : 'Smoke audio generation failed.')
process.exit(1)
})
Loading