Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
256 changes: 256 additions & 0 deletions lib/pipeline.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -642,6 +642,262 @@ describe("PipelineExecutor", () => {
});
});

describe("validation - error message prefixes", () => {
test("error for invalid width includes full task prefix", async () => {
const engine = createMockEngine();
const client = createMockClient();
const executor = new PipelineExecutor(
engine as never,
client as never,
mockS3Config,
{
maxTasks: 10,
enableFetch: false,
dimensionLimit: DEFAULT_DIMENSION_LIMIT,
},
);

const config = createValidConfig({
tasks: [
{
id: "t1",
transform: { format: ImageType.Jpeg, width: -10 },
output: { bucket: "b", key: "k" },
},
],
});

try {
await executor.execute(config, new Uint8Array([1, 2, 3]));
expect(true).toBe(false);
} catch (e) {
expect(e).toBeInstanceOf(HttpError);
const err = e as HttpError;
expect(err.body).toBe("task[0].transform.width: must be at least 1");
}
});

test("error for invalid format includes full task prefix", async () => {
const engine = createMockEngine();
const client = createMockClient();
const executor = new PipelineExecutor(
engine as never,
client as never,
mockS3Config,
{
maxTasks: 10,
enableFetch: false,
dimensionLimit: DEFAULT_DIMENSION_LIMIT,
},
);

const config = {
tasks: [
{
id: "t1",
transform: { format: "bmp" },
output: { bucket: "b", key: "k" },
},
],
} as unknown as PipelineConfig;

try {
await executor.execute(config, new Uint8Array([1, 2, 3]));
expect(true).toBe(false);
} catch (e) {
expect(e).toBeInstanceOf(HttpError);
const err = e as HttpError;
expect(String(err.body)).toContain("task[0].transform.format:");
expect(String(err.body)).toContain("unknown format 'bmp'");
}
});

test("error for second task uses correct index", async () => {
const engine = createMockEngine();
const client = createMockClient();
const executor = new PipelineExecutor(
engine as never,
client as never,
mockS3Config,
{
maxTasks: 10,
enableFetch: false,
dimensionLimit: DEFAULT_DIMENSION_LIMIT,
},
);

const config = createValidConfig({
tasks: [
{
id: "t1",
transform: { format: ImageType.Jpeg },
output: { bucket: "b", key: "k1" },
},
{
id: "t2",
transform: { format: ImageType.Jpeg, quality: 200 },
output: { bucket: "b", key: "k2" },
},
],
});

try {
await executor.execute(config, new Uint8Array([1, 2, 3]));
expect(true).toBe(false);
} catch (e) {
expect(e).toBeInstanceOf(HttpError);
const err = e as HttpError;
expect(err.body).toBe("task[1].transform.quality: must be at most 100");
}
});

test("error for invalid effort includes format in message", async () => {
const engine = createMockEngine();
const client = createMockClient();
const executor = new PipelineExecutor(
engine as never,
client as never,
mockS3Config,
{
maxTasks: 10,
enableFetch: false,
dimensionLimit: DEFAULT_DIMENSION_LIMIT,
},
);

const config = createValidConfig({
tasks: [
{
id: "t1",
transform: { format: ImageType.Webp, effort: 10 },
output: { bucket: "b", key: "k" },
},
],
});

try {
await executor.execute(config, new Uint8Array([1, 2, 3]));
expect(true).toBe(false);
} catch (e) {
expect(e).toBeInstanceOf(HttpError);
const err = e as HttpError;
expect(err.body).toBe("task[0].transform.effort: must be at most 6 for webp");
}
});
});

describe("validation - metadata options", () => {
test("throws error for invalid metadata exif value", async () => {
const engine = createMockEngine();
const client = createMockClient();
const executor = new PipelineExecutor(
engine as never,
client as never,
mockS3Config,
{
maxTasks: 10,
enableFetch: false,
dimensionLimit: DEFAULT_DIMENSION_LIMIT,
},
);

const config = {
tasks: [
{
id: "t1",
transform: { format: ImageType.Jpeg },
output: { bucket: "b", key: "k" },
metadata: { exif: "invalid" },
},
],
} as unknown as PipelineConfig;

try {
await executor.execute(config, new Uint8Array([1, 2, 3]));
expect(true).toBe(false);
} catch (e) {
expect(e).toBeInstanceOf(HttpError);
const err = e as HttpError;
expect(err.body).toBe("task[0].metadata.exif: must be a boolean");
}
});

test("throws error for invalid metadata stats value", async () => {
const engine = createMockEngine();
const client = createMockClient();
const executor = new PipelineExecutor(
engine as never,
client as never,
mockS3Config,
{
maxTasks: 10,
enableFetch: false,
dimensionLimit: DEFAULT_DIMENSION_LIMIT,
},
);

const config = {
tasks: [
{
id: "t1",
transform: { format: ImageType.Jpeg },
output: { bucket: "b", key: "k" },
metadata: { stats: 42 },
},
],
} as unknown as PipelineConfig;

try {
await executor.execute(config, new Uint8Array([1, 2, 3]));
expect(true).toBe(false);
} catch (e) {
expect(e).toBeInstanceOf(HttpError);
const err = e as HttpError;
expect(err.body).toBe("task[0].metadata.stats: must be a boolean");
}
});

test("metadata error for second task uses correct index", async () => {
const engine = createMockEngine();
const client = createMockClient();
const executor = new PipelineExecutor(
engine as never,
client as never,
mockS3Config,
{
maxTasks: 10,
enableFetch: false,
dimensionLimit: DEFAULT_DIMENSION_LIMIT,
},
);

const config = {
tasks: [
{
id: "t1",
transform: { format: ImageType.Jpeg },
output: { bucket: "b", key: "k1" },
},
{
id: "t2",
transform: { format: ImageType.Jpeg },
output: { bucket: "b", key: "k2" },
metadata: { thumbhash: "yes" },
},
],
} as unknown as PipelineConfig;

try {
await executor.execute(config, new Uint8Array([1, 2, 3]));
expect(true).toBe(false);
} catch (e) {
expect(e).toBeInstanceOf(HttpError);
const err = e as HttpError;
expect(err.body).toBe("task[1].metadata.thumbhash: must be a boolean");
}
});
});

describe("image source", () => {
test("throws error when no image is provided", async () => {
const engine = createMockEngine();
Expand Down
64 changes: 23 additions & 41 deletions lib/pipeline.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,27 +3,21 @@ import type { ImageEngine } from "./image.ts";
import { createS3Client, getS3Url, uploadToS3 } from "./s3.ts";
import {
HttpError,
type ImageOptions,
type MetadataResult,
type PipelineConfig,
type PipelineResponse,
type PipelineTask,
type S3Config,
type TaskResult,
type TransformOptions,
} from "./types.ts";
import {
getMimetype,
type ParseContext,
parseMetadataFlags,
parseTransformOptions,
validateContentType,
validateBlurStrict,
validateBooleanStrict,
validateDimensionStrict,
validateEffortStrict,
validateFitStrict,
validateFormatStrict,
validateKernelStrict,
validatePositionStrict,
validatePresetStrict,
validateQualityStrict,
validateFormat,
} from "./validation.ts";

import type { S3Client } from "bun";
Expand Down Expand Up @@ -176,40 +170,28 @@ export class PipelineExecutor {
index: number,
dimensionLimit: number,
): ParsedTask {
const prefix = `task[${index}].transform`;
const t = task.transform;

// Parse format
const format = validateFormatStrict(t.format, prefix);

// Parse and validate all transform options
const options: Omit<ImageOptions, "data"> = {
format,
width: validateDimensionStrict(t.width, "width", prefix, dimensionLimit),
height: validateDimensionStrict(t.height, "height", prefix, dimensionLimit),
quality: validateQualityStrict(t.quality, prefix),
blur: validateBlurStrict(t.blur, prefix),
greyscale: validateBooleanStrict(t.greyscale, "greyscale", prefix),
lossless: validateBooleanStrict(t.lossless, "lossless", prefix),
progressive: validateBooleanStrict(t.progressive, "progressive", prefix),
effort: validateEffortStrict(t.effort, format, prefix),
fit: validateFitStrict(t.fit, prefix),
kernel: validateKernelStrict(t.kernel, prefix),
position: validatePositionStrict(t.position, prefix),
preset: validatePresetStrict(t.preset, prefix),
const ctx: ParseContext = {
strict: false,
failFast: true,
fromString: false,
prefix: `task[${index}].transform.`,
warnings: [],
dimensionLimit,
};
const format = validateFormat(task.transform.format, ctx);
const options = parseTransformOptions(
task.transform as Record<string, unknown>,
format,
ctx,
);

// Parse metadata options if provided (empty object returns basic metadata)
let metadata: ParsedTask["metadata"];
if (task.metadata) {
const metaPrefix = `task[${index}].metadata`;
metadata = {
exif: validateBooleanStrict(task.metadata.exif, "exif", metaPrefix) ?? false,
stats: validateBooleanStrict(task.metadata.stats, "stats", metaPrefix) ?? false,
thumbhash:
validateBooleanStrict(task.metadata.thumbhash, "thumbhash", metaPrefix) ??
false,
const metaCtx: ParseContext = {
...ctx,
prefix: `task[${index}].metadata.`,
};
metadata = parseMetadataFlags(task.metadata as Record<string, unknown>, metaCtx);
}

return {
Expand Down Expand Up @@ -286,7 +268,7 @@ export class PipelineExecutor {

interface ParsedTask {
id: string;
options: Omit<ImageOptions, "data">;
options: TransformOptions;
output: PipelineTask["output"];
metadata?: {
exif: boolean;
Expand Down
Loading