diff --git a/packages/engine/src/services/screenshotService.test.ts b/packages/engine/src/services/screenshotService.test.ts
index 30a711400..62fc4b740 100644
--- a/packages/engine/src/services/screenshotService.test.ts
+++ b/packages/engine/src/services/screenshotService.test.ts
@@ -1,7 +1,12 @@
// @vitest-environment node
import { describe, it, expect, vi } from "vitest";
+import { parseHTML } from "linkedom";
import { type Page } from "puppeteer-core";
-import { pageScreenshotCapture, cdpSessionCache } from "./screenshotService.js";
+import {
+ pageScreenshotCapture,
+ cdpSessionCache,
+ injectVideoFramesBatch,
+} from "./screenshotService.js";
// Stub a Page + CDPSession just enough that pageScreenshotCapture can call
// `client.send("Page.captureScreenshot", ...)` and we can inspect the args.
@@ -20,7 +25,7 @@ describe("pageScreenshotCapture supersample plumbing", () => {
const ONE_PIXEL_PNG_B64 =
"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkAAIAAAoAAv/lxKUAAAAASUVORK5CYII=";
- it("omits `clip` when deviceScaleFactor is undefined (default 1)", async () => {
+ it("passes `clip` with scale 1 when deviceScaleFactor is undefined (default 1)", async () => {
const send = vi.fn().mockResolvedValue({ data: ONE_PIXEL_PNG_B64 });
const page = makeFakePageWithCdp(send);
@@ -34,11 +39,13 @@ describe("pageScreenshotCapture supersample plumbing", () => {
expect(send).toHaveBeenCalledWith(
"Page.captureScreenshot",
- expect.not.objectContaining({ clip: expect.anything() }),
+ expect.objectContaining({
+ clip: { x: 0, y: 0, width: 1920, height: 1080, scale: 1 },
+ }),
);
});
- it("omits `clip` when deviceScaleFactor is exactly 1", async () => {
+ it("passes `clip` with scale 1 when deviceScaleFactor is exactly 1", async () => {
const send = vi.fn().mockResolvedValue({ data: ONE_PIXEL_PNG_B64 });
const page = makeFakePageWithCdp(send);
@@ -50,8 +57,8 @@ describe("pageScreenshotCapture supersample plumbing", () => {
deviceScaleFactor: 1,
});
- const params = send.mock.calls[0]?.[1] as { clip?: unknown };
- expect(params.clip).toBeUndefined();
+ const params = send.mock.calls[0]?.[1] as { clip?: { scale: number } };
+ expect(params.clip).toEqual({ x: 0, y: 0, width: 1920, height: 1080, scale: 1 });
});
it("passes `clip` with `scale = dpr` when deviceScaleFactor > 1 (the supersample contract)", async () => {
@@ -90,3 +97,97 @@ describe("pageScreenshotCapture supersample plumbing", () => {
expect(params.clip?.scale).toBe(3);
});
});
+
+describe("injectVideoFramesBatch replacement layout", () => {
+ it("does not copy opposing inset constraints onto the injected frame image", async () => {
+ const { window, document } = parseHTML(
+ '
',
+ );
+
+ Object.defineProperty(window.HTMLImageElement.prototype, "decode", {
+ configurable: true,
+ value: () => Promise.resolve(),
+ });
+
+ const video = document.getElementById("clip") as HTMLVideoElement;
+ Object.defineProperties(video, {
+ offsetLeft: { configurable: true, get: () => 0 },
+ offsetTop: { configurable: true, get: () => 0 },
+ offsetWidth: { configurable: true, get: () => 1920 },
+ offsetHeight: { configurable: true, get: () => 1080 },
+ });
+ video.getBoundingClientRect = () =>
+ ({
+ x: 0,
+ y: 0,
+ left: 0,
+ top: 0,
+ right: 1920,
+ bottom: 1080,
+ width: 1920,
+ height: 1080,
+ toJSON: () => ({}),
+ }) as DOMRect;
+
+ const computedStyle = document.createElement("div").style;
+ computedStyle.position = "absolute";
+ computedStyle.width = "1920px";
+ computedStyle.height = "1080px";
+ computedStyle.top = "0px";
+ computedStyle.left = "0px";
+ computedStyle.right = "0px";
+ computedStyle.bottom = "0px";
+ computedStyle.inset = "0px";
+ computedStyle.objectFit = "cover";
+ computedStyle.objectPosition = "center center";
+ computedStyle.zIndex = "3";
+ computedStyle.opacity = "1";
+ Object.defineProperty(window, "getComputedStyle", {
+ configurable: true,
+ value: () => computedStyle,
+ });
+
+ const globals = globalThis as unknown as {
+ window?: typeof window;
+ document?: Document;
+ };
+ const previousWindow = globals.window;
+ const previousDocument = globals.document;
+ globals.window = window;
+ globals.document = document;
+ try {
+ const page = {
+ evaluate: async (
+ fn: (
+ updates: Array<{ videoId: string; dataUri: string }>,
+ visualProperties: string[],
+ ) => Promise,
+ updates: Array<{ videoId: string; dataUri: string }>,
+ visualProperties: string[],
+ ) => fn(updates, visualProperties),
+ } as unknown as Page;
+
+ await injectVideoFramesBatch(page, [
+ {
+ videoId: "clip",
+ dataUri:
+ "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkAAIAAAoAAv/lxKUAAAAASUVORK5CYII=",
+ },
+ ]);
+ } finally {
+ globals.window = previousWindow;
+ globals.document = previousDocument;
+ }
+
+ const img = video.nextElementSibling as HTMLImageElement | null;
+ expect(img).not.toBeNull();
+ expect(img?.style.position).toBe("absolute");
+ expect(img?.style.left).toBe("0px");
+ expect(img?.style.top).toBe("0px");
+ expect(img?.style.width).toBe("1920px");
+ expect(img?.style.height).toBe("1080px");
+ expect(img?.style.right).toBe("auto");
+ expect(img?.style.bottom).toBe("auto");
+ expect(img?.style.inset).toBe("auto");
+ });
+});
diff --git a/packages/engine/src/services/screenshotService.ts b/packages/engine/src/services/screenshotService.ts
index e2ff30bcd..acba0e500 100644
--- a/packages/engine/src/services/screenshotService.ts
+++ b/packages/engine/src/services/screenshotService.ts
@@ -130,19 +130,14 @@ export async function pageScreenshotCapture(page: Page, options: CaptureOptions)
const client = await getCdpSession(page);
const isPng = options.format === "png";
const dpr = options.deviceScaleFactor ?? 1;
- // When supersampling, pass an explicit clip with `scale` so Chrome emits a
- // screenshot at device-pixel dimensions (`width × height × dpr`). Without
- // this, `Page.captureScreenshot` returns at CSS dimensions regardless of
- // the viewport's deviceScaleFactor.
- const clip =
- dpr > 1 ? { x: 0, y: 0, width: options.width, height: options.height, scale: dpr } : undefined;
+ const clip = { x: 0, y: 0, width: options.width, height: options.height, scale: dpr };
const result = await client.send("Page.captureScreenshot", {
format: isPng ? "png" : "jpeg",
quality: isPng ? undefined : (options.quality ?? 80),
fromSurface: true,
captureBeyondViewport: false,
optimizeForSpeed: !isPng,
- ...(clip ? { clip } : {}),
+ clip,
});
return Buffer.from(result.data, "base64");
}
@@ -382,6 +377,15 @@ export async function injectVideoFramesBatch(
await page.evaluate(
async (items: Array<{ videoId: string; dataUri: string }>, visualProperties: string[]) => {
const pendingDecodes: Array> = [];
+ const replacementLayoutProperties = new Set([
+ "width",
+ "height",
+ "top",
+ "left",
+ "right",
+ "bottom",
+ "inset",
+ ]);
for (const item of items) {
const video = document.getElementById(item.videoId) as HTMLVideoElement | null;
if (!video) continue;
@@ -395,7 +399,6 @@ export async function injectVideoFramesBatch(
// and accurately reflects the user's intent on every frame.
const opacityParsed = parseFloat(computedStyle.opacity);
const computedOpacity = Number.isNaN(opacityParsed) ? 1 : opacityParsed;
- const sourceIsStatic = !computedStyle.position || computedStyle.position === "static";
if (isNewImage) {
img = document.createElement("img");
@@ -406,10 +409,35 @@ export async function injectVideoFramesBatch(
}
if (!img) continue;
+ for (const property of visualProperties) {
+ // Opacity is handled explicitly via `computedOpacity` below — copying
+ // via the generic loop would race against the opacity:0 hide applied
+ // to the