diff --git a/libsquoosh/src/codecs.ts b/libsquoosh/src/codecs.ts index 868a889b..ebc7811f 100644 --- a/libsquoosh/src/codecs.ts +++ b/libsquoosh/src/codecs.ts @@ -257,7 +257,7 @@ export const preprocessors = { numRotations: 0, }, }, -}; +} as const; export const codecs = { mozjpeg: { @@ -444,4 +444,4 @@ export const codecs = { max: 1, }, }, -}; +} as const; diff --git a/libsquoosh/src/emscripten-utils.ts b/libsquoosh/src/emscripten-utils.ts index a6481508..90589a3f 100644 --- a/libsquoosh/src/emscripten-utils.ts +++ b/libsquoosh/src/emscripten-utils.ts @@ -11,7 +11,7 @@ export function instantiateEmscriptenWasm( factory: EmscriptenWasm.ModuleFactory, path: string, workerJS: string = '', -): Promise { +): Promise { return factory({ locateFile(requestPath) { // The glue code generated by emscripten uses the original diff --git a/libsquoosh/src/index.js b/libsquoosh/src/index.ts similarity index 67% rename from libsquoosh/src/index.js rename to libsquoosh/src/index.ts index c627e9c4..9027900f 100644 --- a/libsquoosh/src/index.js +++ b/libsquoosh/src/index.ts @@ -5,10 +5,18 @@ import { promises as fsp } from 'fs'; import { codecs as encoders, preprocessors } from './codecs.js'; import WorkerPool from './worker_pool.js'; import { autoOptimize } from './auto-optimizer.js'; +import type ImageData from './image_data'; export { ImagePool, encoders, preprocessors }; +type EncoderKey = keyof typeof encoders; +type PreprocessorKey = keyof typeof preprocessors; +type FileLike = Buffer | ArrayBuffer | string | { buffer: Buffer }; -async function decodeFile({ file }) { +async function decodeFile({ + file, +}: { + file: FileLike; +}): Promise<{ bitmap: ImageData; size: number }> { let buffer; if (ArrayBuffer.isView(file)) { buffer = Buffer.from(file.buffer); @@ -28,23 +36,33 @@ async function decodeFile({ file }) { const firstChunkString = Array.from(firstChunk) .map((v) => String.fromCodePoint(v)) .join(''); - const key = Object.entries(encoders).find(([name, { detectors }]) => + const key = Object.entries(encoders).find(([_name, { detectors }]) => detectors.some((detector) => detector.exec(firstChunkString)), - )?.[0]; + )?.[0] as EncoderKey | undefined; if (!key) { throw Error(`${file} has an unsupported format`); } - const rgba = (await encoders[key].dec()).decode(new Uint8Array(buffer)); + const encoder = encoders[key]; + const mod = await encoder.dec(); + const rgba = mod.decode(new Uint8Array(buffer)); return { bitmap: rgba, size: buffer.length, }; } -async function preprocessImage({ preprocessorName, options, image }) { +async function preprocessImage({ + preprocessorName, + options, + image, +}: { + preprocessorName: PreprocessorKey; + options: any; + image: { bitmap: ImageData }; +}) { const preprocessor = await preprocessors[preprocessorName].instantiate(); image.bitmap = await preprocessor( - image.bitmap.data, + Uint8Array.from(image.bitmap.data), image.bitmap.width, image.bitmap.height, options, @@ -58,14 +76,20 @@ async function encodeImage({ encConfig, optimizerButteraugliTarget, maxOptimizerRounds, +}: { + bitmap: ImageData; + encName: EncoderKey; + encConfig: string | { [key: string]: any }; + optimizerButteraugliTarget: number; + maxOptimizerRounds: number; }) { - let binary; + let binary: Uint8Array; let optionsUsed = encConfig; const encoder = await encoders[encName].enc(); if (encConfig === 'auto') { const optionToOptimize = encoders[encName].autoOptimize.option; const decoder = await encoders[encName].dec(); - const encode = (bitmapIn, quality) => + const encode = (bitmapIn: ImageData, quality: number) => encoder.encode( bitmapIn.data, bitmapIn.width, @@ -74,7 +98,7 @@ async function encodeImage({ [optionToOptimize]: quality, }), ); - const decode = (binary) => decoder.decode(binary); + const decode = (binary: Uint8Array) => decoder.decode(binary); const { binary: optimizedBinary, quality } = await autoOptimize( bitmapIn, encode, @@ -107,10 +131,14 @@ async function encodeImage({ }; } -// both decoding and encoding go through the worker pool -function handleJob(params) { - const { operation } = params; - switch (operation) { +type EncodeParams = { operation: 'encode' } & Parameters[0]; +type DecodeParams = { operation: 'decode' } & Parameters[0]; +type PreprocessParams = { operation: 'preprocess' } & Parameters< + typeof preprocessImage +>[0]; +export type JobMessage = EncodeParams | DecodeParams | PreprocessParams; +function handleJob(params: JobMessage) { + switch (params.operation) { case 'encode': return encodeImage(params); case 'decode': @@ -118,7 +146,7 @@ function handleJob(params) { case 'preprocess': return preprocessImage(params); default: - throw Error(`Invalid job "${operation}"`); + throw Error(`Invalid job "${(params as any).operation}"`); } } @@ -126,7 +154,12 @@ function handleJob(params) { * Represents an ingested image. */ class Image { - constructor(workerPool, file) { + public file: FileLike; + public workerPool: WorkerPool; + public decoded: Promise<{ bitmap: ImageData }>; + public encodedWith: { [key: string]: any }; + + constructor(workerPool: WorkerPool, file: FileLike) { this.file = file; this.workerPool = workerPool; this.decoded = workerPool.dispatchJob({ operation: 'decode', file }); @@ -143,14 +176,15 @@ class Image { if (!Object.keys(preprocessors).includes(name)) { throw Error(`Invalid preprocessor "${name}"`); } + const preprocessorName = name as PreprocessorKey; const preprocessorOptions = Object.assign( {}, - preprocessors[name].defaultOptions, + preprocessors[preprocessorName].defaultOptions, options, ); this.decoded = this.workerPool.dispatchJob({ operation: 'preprocess', - preprocessorName: name, + preprocessorName, image: await this.decoded, options: preprocessorOptions, }); @@ -161,14 +195,20 @@ class Image { /** * Define one or several encoders to use on the image. * @param {object} encodeOptions - An object with encoders to use, and their settings. - * @returns {Promise} - A promise that resolves when the image has been encoded with all the specified encoders. + * @returns {Promise} - A promise that resolves when the image has been encoded with all the specified encoders. */ - async encode(encodeOptions = {}) { + async encode( + encodeOptions: { + optimizerButteraugliTarget?: number; + maxOptimizerRounds?: number; + } = {}, + ): Promise { const { bitmap } = await this.decoded; - for (const [encName, options] of Object.entries(encodeOptions)) { - if (!Object.keys(encoders).includes(encName)) { + for (const [name, options] of Object.entries(encodeOptions)) { + if (!Object.keys(encoders).includes(name)) { continue; } + const encName = name as EncoderKey; const encRef = encoders[encName]; const encConfig = typeof options === 'string' @@ -193,28 +233,30 @@ class Image { * A pool where images can be ingested and squooshed. */ class ImagePool { + public workerPool: WorkerPool; + /** * Create a new pool. * @param {number} [threads] - Number of concurrent image processes to run in the pool. Defaults to the number of CPU cores in the system. */ - constructor(threads) { + constructor(threads: number) { this.workerPool = new WorkerPool(threads || cpus().length, __filename); } /** * Ingest an image into the image pool. - * @param {string | Buffer | URL | object} image - The image or path to the image that should be ingested and decoded. + * @param {FileLike} image - The image or path to the image that should be ingested and decoded. * @returns {Image} - A custom class reference to the decoded image. */ - ingestImage(image) { + ingestImage(image: FileLike): Image { return new Image(this.workerPool, image); } /** * Closes the underlying image processing pipeline. The already processed images will still be there, but no new processing can start. - * @returns {Promise} - A promise that resolves when the underlying pipeline has closed. + * @returns {Promise} - A promise that resolves when the underlying pipeline has closed. */ - async close() { + async close(): Promise { await this.workerPool.join(); } } diff --git a/libsquoosh/src/worker_pool.js b/libsquoosh/src/worker_pool.ts similarity index 80% rename from libsquoosh/src/worker_pool.js rename to libsquoosh/src/worker_pool.ts index 646d3870..c0b41522 100644 --- a/libsquoosh/src/worker_pool.js +++ b/libsquoosh/src/worker_pool.ts @@ -1,5 +1,7 @@ import { Worker, parentPort } from 'worker_threads'; +// @ts-ignore import { TransformStream } from 'web-streams-polyfill'; +import type { JobMessage } from './index'; function uuid() { return Array.from({ length: 16 }, () => @@ -7,7 +9,7 @@ function uuid() { ).join(''); } -function jobPromise(worker, msg) { +function jobPromise(worker: Worker, msg: JobMessage) { return new Promise((resolve, reject) => { const id = uuid(); worker.postMessage({ msg, id }); @@ -26,7 +28,12 @@ function jobPromise(worker, msg) { } export default class WorkerPool { - constructor(numWorkers, workerFile) { + public numWorkers: number; + public jobQueue: TransformStream; + public workerQueue: TransformStream; + public done: Promise; + + constructor(numWorkers: number, workerFile: string) { this.numWorkers = numWorkers; this.jobQueue = new TransformStream(); this.workerQueue = new TransformStream(); @@ -82,7 +89,7 @@ export default class WorkerPool { await this.done; } - dispatchJob(msg) { + dispatchJob(msg: JobMessage): Promise { return new Promise((resolve, reject) => { const writer = this.jobQueue.writable.getWriter(); writer.write({ msg, resolve, reject }); @@ -90,14 +97,14 @@ export default class WorkerPool { }); } - static useThisThreadAsWorker(cb) { - parentPort.on('message', async (data) => { + static useThisThreadAsWorker(cb: (msg: JobMessage) => any) { + parentPort!.on('message', async (data) => { const { msg, id } = data; try { const result = await cb(msg); - parentPort.postMessage({ result, id }); + parentPort!.postMessage({ result, id }); } catch (e) { - parentPort.postMessage({ error: e.message, id }); + parentPort!.postMessage({ error: e.message, id }); } }); }