Convert remaining JS to TS in libSquoosh

This commit is contained in:
Steven
2021-07-26 22:37:00 -04:00
parent 3f2dd66726
commit 16a53caa48
4 changed files with 85 additions and 36 deletions

View File

@@ -257,7 +257,7 @@ export const preprocessors = {
numRotations: 0, numRotations: 0,
}, },
}, },
}; } as const;
export const codecs = { export const codecs = {
mozjpeg: { mozjpeg: {
@@ -444,4 +444,4 @@ export const codecs = {
max: 1, max: 1,
}, },
}, },
}; } as const;

View File

@@ -11,7 +11,7 @@ export function instantiateEmscriptenWasm<T extends EmscriptenWasm.Module>(
factory: EmscriptenWasm.ModuleFactory<T>, factory: EmscriptenWasm.ModuleFactory<T>,
path: string, path: string,
workerJS: string = '', workerJS: string = '',
): Promise<T> { ): Promise<any> {
return factory({ return factory({
locateFile(requestPath) { locateFile(requestPath) {
// The glue code generated by emscripten uses the original // The glue code generated by emscripten uses the original

View File

@@ -5,10 +5,18 @@ import { promises as fsp } from 'fs';
import { codecs as encoders, preprocessors } from './codecs.js'; import { codecs as encoders, preprocessors } from './codecs.js';
import WorkerPool from './worker_pool.js'; import WorkerPool from './worker_pool.js';
import { autoOptimize } from './auto-optimizer.js'; import { autoOptimize } from './auto-optimizer.js';
import type ImageData from './image_data';
export { ImagePool, encoders, preprocessors }; export { ImagePool, encoders, preprocessors };
type EncoderKey = keyof typeof encoders;
type PreprocessorKey = keyof typeof preprocessors;
type FileLike = Buffer | ArrayBuffer | string | { buffer: Buffer };
async function decodeFile({ file }) { async function decodeFile({
file,
}: {
file: FileLike;
}): Promise<{ bitmap: ImageData; size: number }> {
let buffer; let buffer;
if (ArrayBuffer.isView(file)) { if (ArrayBuffer.isView(file)) {
buffer = Buffer.from(file.buffer); buffer = Buffer.from(file.buffer);
@@ -28,23 +36,33 @@ async function decodeFile({ file }) {
const firstChunkString = Array.from(firstChunk) const firstChunkString = Array.from(firstChunk)
.map((v) => String.fromCodePoint(v)) .map((v) => String.fromCodePoint(v))
.join(''); .join('');
const key = Object.entries(encoders).find(([name, { detectors }]) => const key = Object.entries(encoders).find(([_name, { detectors }]) =>
detectors.some((detector) => detector.exec(firstChunkString)), detectors.some((detector) => detector.exec(firstChunkString)),
)?.[0]; )?.[0] as EncoderKey | undefined;
if (!key) { if (!key) {
throw Error(`${file} has an unsupported format`); throw Error(`${file} has an unsupported format`);
} }
const rgba = (await encoders[key].dec()).decode(new Uint8Array(buffer)); const encoder = encoders[key];
const mod = await encoder.dec();
const rgba = mod.decode(new Uint8Array(buffer));
return { return {
bitmap: rgba, bitmap: rgba,
size: buffer.length, size: buffer.length,
}; };
} }
async function preprocessImage({ preprocessorName, options, image }) { async function preprocessImage({
preprocessorName,
options,
image,
}: {
preprocessorName: PreprocessorKey;
options: any;
image: { bitmap: ImageData };
}) {
const preprocessor = await preprocessors[preprocessorName].instantiate(); const preprocessor = await preprocessors[preprocessorName].instantiate();
image.bitmap = await preprocessor( image.bitmap = await preprocessor(
image.bitmap.data, Uint8Array.from(image.bitmap.data),
image.bitmap.width, image.bitmap.width,
image.bitmap.height, image.bitmap.height,
options, options,
@@ -58,14 +76,20 @@ async function encodeImage({
encConfig, encConfig,
optimizerButteraugliTarget, optimizerButteraugliTarget,
maxOptimizerRounds, maxOptimizerRounds,
}: {
bitmap: ImageData;
encName: EncoderKey;
encConfig: string | { [key: string]: any };
optimizerButteraugliTarget: number;
maxOptimizerRounds: number;
}) { }) {
let binary; let binary: Uint8Array;
let optionsUsed = encConfig; let optionsUsed = encConfig;
const encoder = await encoders[encName].enc(); const encoder = await encoders[encName].enc();
if (encConfig === 'auto') { if (encConfig === 'auto') {
const optionToOptimize = encoders[encName].autoOptimize.option; const optionToOptimize = encoders[encName].autoOptimize.option;
const decoder = await encoders[encName].dec(); const decoder = await encoders[encName].dec();
const encode = (bitmapIn, quality) => const encode = (bitmapIn: ImageData, quality: number) =>
encoder.encode( encoder.encode(
bitmapIn.data, bitmapIn.data,
bitmapIn.width, bitmapIn.width,
@@ -74,7 +98,7 @@ async function encodeImage({
[optionToOptimize]: quality, [optionToOptimize]: quality,
}), }),
); );
const decode = (binary) => decoder.decode(binary); const decode = (binary: Uint8Array) => decoder.decode(binary);
const { binary: optimizedBinary, quality } = await autoOptimize( const { binary: optimizedBinary, quality } = await autoOptimize(
bitmapIn, bitmapIn,
encode, encode,
@@ -107,10 +131,14 @@ async function encodeImage({
}; };
} }
// both decoding and encoding go through the worker pool type EncodeParams = { operation: 'encode' } & Parameters<typeof encodeImage>[0];
function handleJob(params) { type DecodeParams = { operation: 'decode' } & Parameters<typeof decodeFile>[0];
const { operation } = params; type PreprocessParams = { operation: 'preprocess' } & Parameters<
switch (operation) { typeof preprocessImage
>[0];
export type JobMessage = EncodeParams | DecodeParams | PreprocessParams;
function handleJob(params: JobMessage) {
switch (params.operation) {
case 'encode': case 'encode':
return encodeImage(params); return encodeImage(params);
case 'decode': case 'decode':
@@ -118,7 +146,7 @@ function handleJob(params) {
case 'preprocess': case 'preprocess':
return preprocessImage(params); return preprocessImage(params);
default: default:
throw Error(`Invalid job "${operation}"`); throw Error(`Invalid job "${(params as any).operation}"`);
} }
} }
@@ -126,7 +154,12 @@ function handleJob(params) {
* Represents an ingested image. * Represents an ingested image.
*/ */
class Image { class Image {
constructor(workerPool, file) { public file: FileLike;
public workerPool: WorkerPool;
public decoded: Promise<{ bitmap: ImageData }>;
public encodedWith: { [key: string]: any };
constructor(workerPool: WorkerPool, file: FileLike) {
this.file = file; this.file = file;
this.workerPool = workerPool; this.workerPool = workerPool;
this.decoded = workerPool.dispatchJob({ operation: 'decode', file }); this.decoded = workerPool.dispatchJob({ operation: 'decode', file });
@@ -143,14 +176,15 @@ class Image {
if (!Object.keys(preprocessors).includes(name)) { if (!Object.keys(preprocessors).includes(name)) {
throw Error(`Invalid preprocessor "${name}"`); throw Error(`Invalid preprocessor "${name}"`);
} }
const preprocessorName = name as PreprocessorKey;
const preprocessorOptions = Object.assign( const preprocessorOptions = Object.assign(
{}, {},
preprocessors[name].defaultOptions, preprocessors[preprocessorName].defaultOptions,
options, options,
); );
this.decoded = this.workerPool.dispatchJob({ this.decoded = this.workerPool.dispatchJob({
operation: 'preprocess', operation: 'preprocess',
preprocessorName: name, preprocessorName,
image: await this.decoded, image: await this.decoded,
options: preprocessorOptions, options: preprocessorOptions,
}); });
@@ -161,14 +195,20 @@ class Image {
/** /**
* Define one or several encoders to use on the image. * Define one or several encoders to use on the image.
* @param {object} encodeOptions - An object with encoders to use, and their settings. * @param {object} encodeOptions - An object with encoders to use, and their settings.
* @returns {Promise<undefined>} - A promise that resolves when the image has been encoded with all the specified encoders. * @returns {Promise<void>} - A promise that resolves when the image has been encoded with all the specified encoders.
*/ */
async encode(encodeOptions = {}) { async encode(
encodeOptions: {
optimizerButteraugliTarget?: number;
maxOptimizerRounds?: number;
} = {},
): Promise<void> {
const { bitmap } = await this.decoded; const { bitmap } = await this.decoded;
for (const [encName, options] of Object.entries(encodeOptions)) { for (const [name, options] of Object.entries(encodeOptions)) {
if (!Object.keys(encoders).includes(encName)) { if (!Object.keys(encoders).includes(name)) {
continue; continue;
} }
const encName = name as EncoderKey;
const encRef = encoders[encName]; const encRef = encoders[encName];
const encConfig = const encConfig =
typeof options === 'string' typeof options === 'string'
@@ -193,28 +233,30 @@ class Image {
* A pool where images can be ingested and squooshed. * A pool where images can be ingested and squooshed.
*/ */
class ImagePool { class ImagePool {
public workerPool: WorkerPool;
/** /**
* Create a new pool. * Create a new pool.
* @param {number} [threads] - Number of concurrent image processes to run in the pool. Defaults to the number of CPU cores in the system. * @param {number} [threads] - Number of concurrent image processes to run in the pool. Defaults to the number of CPU cores in the system.
*/ */
constructor(threads) { constructor(threads: number) {
this.workerPool = new WorkerPool(threads || cpus().length, __filename); this.workerPool = new WorkerPool(threads || cpus().length, __filename);
} }
/** /**
* Ingest an image into the image pool. * Ingest an image into the image pool.
* @param {string | Buffer | URL | object} image - The image or path to the image that should be ingested and decoded. * @param {FileLike} image - The image or path to the image that should be ingested and decoded.
* @returns {Image} - A custom class reference to the decoded image. * @returns {Image} - A custom class reference to the decoded image.
*/ */
ingestImage(image) { ingestImage(image: FileLike): Image {
return new Image(this.workerPool, image); return new Image(this.workerPool, image);
} }
/** /**
* Closes the underlying image processing pipeline. The already processed images will still be there, but no new processing can start. * Closes the underlying image processing pipeline. The already processed images will still be there, but no new processing can start.
* @returns {Promise<undefined>} - A promise that resolves when the underlying pipeline has closed. * @returns {Promise<void>} - A promise that resolves when the underlying pipeline has closed.
*/ */
async close() { async close(): Promise<void> {
await this.workerPool.join(); await this.workerPool.join();
} }
} }

View File

@@ -1,5 +1,7 @@
import { Worker, parentPort } from 'worker_threads'; import { Worker, parentPort } from 'worker_threads';
// @ts-ignore
import { TransformStream } from 'web-streams-polyfill'; import { TransformStream } from 'web-streams-polyfill';
import type { JobMessage } from './index';
function uuid() { function uuid() {
return Array.from({ length: 16 }, () => return Array.from({ length: 16 }, () =>
@@ -7,7 +9,7 @@ function uuid() {
).join(''); ).join('');
} }
function jobPromise(worker, msg) { function jobPromise(worker: Worker, msg: JobMessage) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const id = uuid(); const id = uuid();
worker.postMessage({ msg, id }); worker.postMessage({ msg, id });
@@ -26,7 +28,12 @@ function jobPromise(worker, msg) {
} }
export default class WorkerPool { export default class WorkerPool {
constructor(numWorkers, workerFile) { public numWorkers: number;
public jobQueue: TransformStream;
public workerQueue: TransformStream;
public done: Promise<void>;
constructor(numWorkers: number, workerFile: string) {
this.numWorkers = numWorkers; this.numWorkers = numWorkers;
this.jobQueue = new TransformStream(); this.jobQueue = new TransformStream();
this.workerQueue = new TransformStream(); this.workerQueue = new TransformStream();
@@ -82,7 +89,7 @@ export default class WorkerPool {
await this.done; await this.done;
} }
dispatchJob(msg) { dispatchJob(msg: JobMessage): Promise<any> {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const writer = this.jobQueue.writable.getWriter(); const writer = this.jobQueue.writable.getWriter();
writer.write({ msg, resolve, reject }); writer.write({ msg, resolve, reject });
@@ -90,14 +97,14 @@ export default class WorkerPool {
}); });
} }
static useThisThreadAsWorker(cb) { static useThisThreadAsWorker(cb: (msg: JobMessage) => any) {
parentPort.on('message', async (data) => { parentPort!.on('message', async (data) => {
const { msg, id } = data; const { msg, id } = data;
try { try {
const result = await cb(msg); const result = await cb(msg);
parentPort.postMessage({ result, id }); parentPort!.postMessage({ result, id });
} catch (e) { } catch (e) {
parentPort.postMessage({ error: e.message, id }); parentPort!.postMessage({ error: e.message, id });
} }
}); });
} }