mirror of
https://github.com/GoogleChromeLabs/squoosh.git
synced 2025-11-18 03:29:17 +00:00
Separate CLI and API
This commit is contained in:
3
api/.gitignore
vendored
Normal file
3
api/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
node_modules
|
||||
build
|
||||
.DS_Store
|
||||
1
api/.npmignore
Normal file
1
api/.npmignore
Normal file
@@ -0,0 +1 @@
|
||||
node_modules
|
||||
0
api/.npmrc
Normal file
0
api/.npmrc
Normal file
161
api/README.md
Normal file
161
api/README.md
Normal file
@@ -0,0 +1,161 @@
|
||||
# Squoosh API
|
||||
|
||||
Squoosh API is an _experimental_ way to run all the codecs you know from the [Squoosh] web app directly inside your own JavaScript program. The Squoosh API uses a worker pool to parallelize processing images. This way you can apply the same codec to many images at once.
|
||||
|
||||
Squoosh API is currently not the fastest image compression tool in town and doesn’t aim to be. It is, however, fast enough to compress many images sufficiently quick at once.
|
||||
|
||||
## Installation
|
||||
|
||||
The Squoosh API can be installed to your local project with the following command:
|
||||
|
||||
```
|
||||
$ npm --install @squoosh/api
|
||||
```
|
||||
|
||||
You can start using the API by adding these lines to the top of your JS program:
|
||||
|
||||
```js
|
||||
import { ImagePool } from '@squoosh/api';
|
||||
const imagePool = new ImagePool();
|
||||
```
|
||||
|
||||
This will create an image pool with an underlying processing pipeline that you can use to ingest and encode images. The ImagePool constructor takes one argument that defines how many parallel operations it is allowed to run at any given time. By default, this number is set to the amount of CPU cores available in the system it is running on.
|
||||
|
||||
## Ingesting images
|
||||
|
||||
You can ingest a new image like so:
|
||||
|
||||
```js
|
||||
const imagePath = 'path/to/image.png';
|
||||
const image = imagePool.ingestImage(imagePath);
|
||||
```
|
||||
|
||||
These `ingestImage` function can take anything the node [`readFile`][readFile] function can take, uncluding a buffer and `FileHandle`.
|
||||
|
||||
The returned `image` object is a representation of the original image, that you can now manipulate, encode, and extract information about.
|
||||
|
||||
## Manipulating and encoding images
|
||||
|
||||
When an image has been ingested, you can start manipulating it and encoding it to other formats. This example will resize the image and then encode it to a `.jpg` and `.jxl` image:
|
||||
|
||||
```js
|
||||
await image.decoded; //Wait until the image is decoded before running manipulations
|
||||
|
||||
const manipulateOptions: {
|
||||
resize: {
|
||||
enabled: true,
|
||||
width: 100,
|
||||
height: 50,
|
||||
}
|
||||
}
|
||||
await image.manipulate(manipulateOptions);
|
||||
|
||||
const encodeOptions: {
|
||||
mozjpeg: {}, //an empty object means 'use default settings'
|
||||
jxl: {
|
||||
quality: 90,
|
||||
},
|
||||
}
|
||||
await image.encode(encodeOptions);
|
||||
|
||||
```
|
||||
|
||||
The default values for each option can be found in the [`codecs.js`][codecs.js] file under `defaultEncoderOptions`. Every unspecified value will use the default value specified there. _Better documentation is needed here._
|
||||
|
||||
You can run your own code inbetween the different steps, if, for example, you want to change how much the image should be resized based on its original height. (See [Extracting image information](#extracting-image-information) to learn how to get the image dimensions).
|
||||
|
||||
## Closing the ImagePool
|
||||
|
||||
When you have encoded everything you need, it is recommended to close the processing pipeline in the ImagePool. This will not delete the images you have already encoded, but it will prevent you from ingesting and encoding new images.
|
||||
|
||||
Close the ImagePool pipeline with this line:
|
||||
|
||||
```js
|
||||
await imagePool.close();
|
||||
```
|
||||
|
||||
## Writing encoded images to the file system
|
||||
|
||||
When you have encoded an image, you normally want write it to a file.
|
||||
|
||||
This example takes an image that has been encoded as a `jpg` and writes it to a file:
|
||||
|
||||
```js
|
||||
const rawEncodedImage = (await image.encodedAs.jpg).binary;
|
||||
|
||||
fs.writeFile('/path/to/new/image.jpg', rawEncodedImage);
|
||||
```
|
||||
|
||||
This example iterates through all encoded versions of the image and writes them to a specific path:
|
||||
|
||||
```js
|
||||
const newImagePath = '/path/to/image.'; //extension is added automatically
|
||||
|
||||
for(const [extension, encodedImage] of Object.entries(image.encodedAs)){
|
||||
fs.writeFile(newImagePath + extension, (await encodedImage).binary);
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Extracting image information
|
||||
|
||||
Information about a decoded image is available at `Image.decoded`. It looks something like this:
|
||||
|
||||
```js
|
||||
console.log(await image.decoded);
|
||||
// Returns:
|
||||
{
|
||||
bitmap: {
|
||||
data: Uint8ClampedArray(47736584) [
|
||||
225, 228, 237, 255, 225, 228, 237, 255, 225, 228, 237, 255,
|
||||
225, 228, 237, 255, 225, 228, 237, 255, 225, 228, 237, 255,
|
||||
225, 228, 237, 255,
|
||||
... //the entire raw image
|
||||
],
|
||||
width: 4606, //pixels
|
||||
height: 2591 //pixels
|
||||
},
|
||||
size: 2467795 //bytes
|
||||
}
|
||||
```
|
||||
|
||||
Information about an encoded image can be found at `Image.encodedAs[extension]`. It looks something like this:
|
||||
|
||||
|
||||
```js
|
||||
console.log(await image.encodedAs.jxl);
|
||||
// Returns:
|
||||
{
|
||||
optionsUsed: {
|
||||
quality: 75,
|
||||
baseline: false,
|
||||
arithmetic: false,
|
||||
progressive: true,
|
||||
... //all the possible options for this encoder
|
||||
},
|
||||
binary: Uint8Array(1266975) [
|
||||
1, 0, 0, 1, 0, 1, 0, 0, 255, 219, 0, 132,
|
||||
113, 119, 156, 156, 209, 1, 8, 8, 8, 8, 9, 8,
|
||||
9, 10, 10, 9,
|
||||
... //the entire raw encoded image
|
||||
],
|
||||
size: 1266975 //bytes
|
||||
}
|
||||
```
|
||||
|
||||
## Auto optimizer
|
||||
|
||||
Squoosh API has an _experimental_ auto optimizer that compresses an image as much as possible, trying to hit a specific [Butteraugli] target value. The higher the Butteraugli target value, the more artifacts can be introduced.
|
||||
|
||||
You can make use of the auto optimizer by using “auto” as the config object.
|
||||
|
||||
```
|
||||
const encodeOptions: {
|
||||
mozjpeg: 'auto',
|
||||
}
|
||||
```
|
||||
|
||||
[squoosh]: https://squoosh.app
|
||||
[codecs.js]: https://github.com/GoogleChromeLabs/squoosh/blob/dev/cli/src/codecs.js
|
||||
[butteraugli]: https://github.com/google/butteraugli
|
||||
[readFile]: https://nodejs.org/api/fs.html#fs_fspromises_readfile_path_options
|
||||
75
api/lib/asset-plugin.js
Normal file
75
api/lib/asset-plugin.js
Normal file
@@ -0,0 +1,75 @@
|
||||
/**
|
||||
* Copyright 2020 Google Inc. All Rights Reserved.
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
import { promises as fs } from "fs";
|
||||
import { basename } from "path";
|
||||
|
||||
const defaultOpts = {
|
||||
prefix: "asset-url"
|
||||
};
|
||||
|
||||
export default function assetPlugin(opts) {
|
||||
opts = { ...defaultOpts, ...opts };
|
||||
|
||||
/** @type {Map<string, Buffer>} */
|
||||
let assetIdToSourceBuffer;
|
||||
|
||||
const prefix = opts.prefix + ":";
|
||||
return {
|
||||
name: "asset-plugin",
|
||||
buildStart() {
|
||||
assetIdToSourceBuffer = new Map();
|
||||
},
|
||||
augmentChunkHash(info) {
|
||||
// Get the sources for all assets imported by this chunk.
|
||||
const buffers = Object.keys(info.modules)
|
||||
.map(moduleId => assetIdToSourceBuffer.get(moduleId))
|
||||
.filter(Boolean);
|
||||
|
||||
if (buffers.length === 0) return;
|
||||
|
||||
for (const moduleId of Object.keys(info.modules)) {
|
||||
const buffer = assetIdToSourceBuffer.get(moduleId);
|
||||
if (buffer) buffers.push(buffer);
|
||||
}
|
||||
|
||||
const combinedBuffer =
|
||||
buffers.length === 1 ? buffers[0] : Buffer.concat(buffers);
|
||||
|
||||
return combinedBuffer;
|
||||
},
|
||||
async resolveId(id, importer) {
|
||||
if (!id.startsWith(prefix)) return;
|
||||
const realId = id.slice(prefix.length);
|
||||
const resolveResult = await this.resolve(realId, importer);
|
||||
|
||||
if (!resolveResult) {
|
||||
throw Error(`Cannot find ${realId}`);
|
||||
}
|
||||
// Add an additional .js to the end so it ends up with .js at the end in the _virtual folder.
|
||||
return prefix + resolveResult.id + ".js";
|
||||
},
|
||||
async load(id) {
|
||||
if (!id.startsWith(prefix)) return;
|
||||
const realId = id.slice(prefix.length, -".js".length);
|
||||
const source = await fs.readFile(realId);
|
||||
assetIdToSourceBuffer.set(id, source);
|
||||
this.addWatchFile(realId);
|
||||
|
||||
return `export default import.meta.ROLLUP_FILE_URL_${this.emitFile({
|
||||
type: "asset",
|
||||
source,
|
||||
name: basename(realId)
|
||||
})}`;
|
||||
}
|
||||
};
|
||||
}
|
||||
12
api/lib/autojson-plugin.js
Normal file
12
api/lib/autojson-plugin.js
Normal file
@@ -0,0 +1,12 @@
|
||||
import { promises as fsp } from 'fs';
|
||||
|
||||
export default function autojsonPlugin() {
|
||||
return {
|
||||
name: 'autojson-plugin',
|
||||
async load(id) {
|
||||
if (id.endsWith('.json') && !id.startsWith('json:')) {
|
||||
return 'export default ' + await fsp.readFile(id, 'utf8');
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
38
api/lib/json-plugin.js
Normal file
38
api/lib/json-plugin.js
Normal file
@@ -0,0 +1,38 @@
|
||||
import { promises as fsp } from 'fs';
|
||||
|
||||
const prefix = 'json:';
|
||||
|
||||
const reservedKeys = ['public'];
|
||||
|
||||
export default function jsonPlugin() {
|
||||
return {
|
||||
name: 'json-plugin',
|
||||
async resolveId(id, importer) {
|
||||
if (!id.startsWith(prefix)) return;
|
||||
const realId = id.slice(prefix.length);
|
||||
const resolveResult = await this.resolve(realId, importer);
|
||||
|
||||
if (!resolveResult) {
|
||||
throw Error(`Cannot find ${realId}`);
|
||||
}
|
||||
// Add an additional .js to the end so it ends up with .js at the end in the _virtual folder.
|
||||
return prefix + resolveResult.id;
|
||||
},
|
||||
async load(id) {
|
||||
if (!id.startsWith(prefix)) return;
|
||||
const realId = id.slice(prefix.length);
|
||||
const source = await fsp.readFile(realId, 'utf8');
|
||||
|
||||
let code = '';
|
||||
for (const [key, value] of Object.entries(JSON.parse(source))) {
|
||||
if (reservedKeys.includes(key)) {
|
||||
continue;
|
||||
}
|
||||
code += `
|
||||
export const ${key} = ${JSON.stringify(value)};
|
||||
`;
|
||||
}
|
||||
return code;
|
||||
},
|
||||
};
|
||||
}
|
||||
1869
api/package-lock.json
generated
Normal file
1869
api/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
28
api/package.json
Normal file
28
api/package.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"name": "@squoosh/api",
|
||||
"version": "0.1.0",
|
||||
"description": "An API for Squoosh",
|
||||
"public": true,
|
||||
"main": "/build/index.js",
|
||||
"files": [
|
||||
"/build/*"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "rollup -c"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "Google Chrome Developers <chromium-dev@google.com>",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"web-streams-polyfill": "^3.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.14.0",
|
||||
"@babel/preset-env": "^7.14.0",
|
||||
"@rollup/plugin-babel": "^5.3.0",
|
||||
"@rollup/plugin-commonjs": "^18.0.0",
|
||||
"@rollup/plugin-node-resolve": "^11.2.1",
|
||||
"rollup": "^2.46.0",
|
||||
"rollup-plugin-terser": "^7.0.2"
|
||||
}
|
||||
}
|
||||
42
api/rollup.config.js
Normal file
42
api/rollup.config.js
Normal file
@@ -0,0 +1,42 @@
|
||||
import resolve from '@rollup/plugin-node-resolve';
|
||||
import cjs from '@rollup/plugin-commonjs';
|
||||
import asset from './lib/asset-plugin.js';
|
||||
import json from './lib/json-plugin.js';
|
||||
import autojson from './lib/autojson-plugin.js';
|
||||
import { getBabelOutputPlugin } from '@rollup/plugin-babel';
|
||||
import { builtinModules } from 'module';
|
||||
|
||||
/** @type {import('rollup').RollupOptions} */
|
||||
export default {
|
||||
input: 'src/index.js',
|
||||
output: {
|
||||
dir: 'build',
|
||||
format: 'cjs',
|
||||
assetFileNames: '[name]-[hash][extname]',
|
||||
},
|
||||
plugins: [
|
||||
resolve(),
|
||||
cjs(),
|
||||
asset(),
|
||||
autojson(),
|
||||
json(),
|
||||
getBabelOutputPlugin({
|
||||
babelrc: false,
|
||||
configFile: false,
|
||||
minified: process.env.DEBUG != '',
|
||||
comments: true,
|
||||
presets: [
|
||||
[
|
||||
'@babel/preset-env',
|
||||
{
|
||||
targets: {
|
||||
node: 12,
|
||||
},
|
||||
loose: true,
|
||||
},
|
||||
],
|
||||
],
|
||||
}),
|
||||
],
|
||||
external: [...builtinModules, 'web-streams-polyfill'],
|
||||
};
|
||||
70
api/src/auto-optimizer.js
Normal file
70
api/src/auto-optimizer.js
Normal file
@@ -0,0 +1,70 @@
|
||||
import { instantiateEmscriptenWasm } from "./emscripten-utils.js";
|
||||
|
||||
import visdif from "../../codecs/visdif/visdif.js";
|
||||
import visdifWasm from "asset-url:../../codecs/visdif/visdif.wasm";
|
||||
|
||||
// `measure` is a (async) function that takes exactly one numeric parameter and
|
||||
// returns a value. The function is assumed to be monotonic (an increase in `parameter`
|
||||
// will result in an increase in the return value. The function uses binary search
|
||||
// to find `parameter` such that `measure` returns `measureGoal`, within an error
|
||||
// of `epsilon`. It will use at most `maxRounds` attempts.
|
||||
export async function binarySearch(
|
||||
measureGoal,
|
||||
measure,
|
||||
{ min = 0, max = 100, epsilon = 0.1, maxRounds = 8 } = {}
|
||||
) {
|
||||
let parameter = (max - min) / 2 + min;
|
||||
let delta = (max - min) / 4;
|
||||
let value;
|
||||
let round = 1;
|
||||
while (true) {
|
||||
value = await measure(parameter);
|
||||
if (Math.abs(value - measureGoal) < epsilon || round >= maxRounds) {
|
||||
return { parameter, round, value };
|
||||
}
|
||||
if (value > measureGoal) {
|
||||
parameter -= delta;
|
||||
} else if (value < measureGoal) {
|
||||
parameter += delta;
|
||||
}
|
||||
delta /= 2;
|
||||
round++;
|
||||
}
|
||||
}
|
||||
|
||||
export async function autoOptimize(
|
||||
bitmapIn,
|
||||
encode,
|
||||
decode,
|
||||
{ butteraugliDistanceGoal = 1.4, ...otherOpts } = {}
|
||||
) {
|
||||
const { VisDiff } = await instantiateEmscriptenWasm(visdif, visdifWasm);
|
||||
|
||||
const comparator = new VisDiff(
|
||||
bitmapIn.data,
|
||||
bitmapIn.width,
|
||||
bitmapIn.height
|
||||
);
|
||||
|
||||
let bitmapOut;
|
||||
let binaryOut;
|
||||
// Increasing quality means _decrease_ in Butteraugli distance.
|
||||
// `binarySearch` assumes that increasing `parameter` will
|
||||
// increase the metric value. So multipliy Butteraugli values by -1.
|
||||
const { parameter } = await binarySearch(
|
||||
-1 * butteraugliDistanceGoal,
|
||||
async quality => {
|
||||
binaryOut = await encode(bitmapIn, quality);
|
||||
bitmapOut = await decode(binaryOut);
|
||||
return -1 * comparator.distance(bitmapOut.data);
|
||||
},
|
||||
otherOpts
|
||||
);
|
||||
comparator.delete();
|
||||
|
||||
return {
|
||||
bitmap: bitmapOut,
|
||||
binary: binaryOut,
|
||||
quality: parameter
|
||||
};
|
||||
}
|
||||
361
api/src/codecs.js
Normal file
361
api/src/codecs.js
Normal file
@@ -0,0 +1,361 @@
|
||||
import { promises as fsp } from 'fs';
|
||||
import { instantiateEmscriptenWasm, pathify } from './emscripten-utils.js';
|
||||
|
||||
// MozJPEG
|
||||
import mozEnc from '../../codecs/mozjpeg/enc/mozjpeg_node_enc.js';
|
||||
import mozEncWasm from 'asset-url:../../codecs/mozjpeg/enc/mozjpeg_node_enc.wasm';
|
||||
import mozDec from '../../codecs/mozjpeg/dec/mozjpeg_node_dec.js';
|
||||
import mozDecWasm from 'asset-url:../../codecs/mozjpeg/dec/mozjpeg_node_dec.wasm';
|
||||
|
||||
// WebP
|
||||
import webpEnc from '../../codecs/webp/enc/webp_node_enc.js';
|
||||
import webpEncWasm from 'asset-url:../../codecs/webp/enc/webp_node_enc.wasm';
|
||||
import webpDec from '../../codecs/webp/dec/webp_node_dec.js';
|
||||
import webpDecWasm from 'asset-url:../../codecs/webp/dec/webp_node_dec.wasm';
|
||||
|
||||
// AVIF
|
||||
import avifEnc from '../../codecs/avif/enc/avif_node_enc.js';
|
||||
import avifEncWasm from 'asset-url:../../codecs/avif/enc/avif_node_enc.wasm';
|
||||
import avifDec from '../../codecs/avif/dec/avif_node_dec.js';
|
||||
import avifDecWasm from 'asset-url:../../codecs/avif/dec/avif_node_dec.wasm';
|
||||
|
||||
// JXL
|
||||
import jxlEnc from '../../codecs/jxl/enc/jxl_node_enc.js';
|
||||
import jxlEncWasm from 'asset-url:../../codecs/jxl/enc/jxl_node_enc.wasm';
|
||||
import jxlDec from '../../codecs/jxl/dec/jxl_node_dec.js';
|
||||
import jxlDecWasm from 'asset-url:../../codecs/jxl/dec/jxl_node_dec.wasm';
|
||||
|
||||
// WP2
|
||||
import wp2Enc from '../../codecs/wp2/enc/wp2_node_enc.js';
|
||||
import wp2EncWasm from 'asset-url:../../codecs/wp2/enc/wp2_node_enc.wasm';
|
||||
import wp2Dec from '../../codecs/wp2/dec/wp2_node_dec.js';
|
||||
import wp2DecWasm from 'asset-url:../../codecs/wp2/dec/wp2_node_dec.wasm';
|
||||
|
||||
// PNG
|
||||
import * as pngEncDec from '../../codecs/png/pkg/squoosh_png.js';
|
||||
import pngEncDecWasm from 'asset-url:../../codecs/png/pkg/squoosh_png_bg.wasm';
|
||||
const pngEncDecPromise = pngEncDec.default(
|
||||
fsp.readFile(pathify(pngEncDecWasm)),
|
||||
);
|
||||
|
||||
// OxiPNG
|
||||
import * as oxipng from '../../codecs/oxipng/pkg/squoosh_oxipng.js';
|
||||
import oxipngWasm from 'asset-url:../../codecs/oxipng/pkg/squoosh_oxipng_bg.wasm';
|
||||
const oxipngPromise = oxipng.default(fsp.readFile(pathify(oxipngWasm)));
|
||||
|
||||
// Resize
|
||||
import * as resize from '../../codecs/resize/pkg/squoosh_resize.js';
|
||||
import resizeWasm from 'asset-url:../../codecs/resize/pkg/squoosh_resize_bg.wasm';
|
||||
const resizePromise = resize.default(fsp.readFile(pathify(resizeWasm)));
|
||||
|
||||
// rotate
|
||||
import rotateWasm from 'asset-url:../../codecs/rotate/rotate.wasm';
|
||||
|
||||
// ImageQuant
|
||||
import imageQuant from '../../codecs/imagequant/imagequant_node.js';
|
||||
import imageQuantWasm from 'asset-url:../../codecs/imagequant/imagequant_node.wasm';
|
||||
const imageQuantPromise = instantiateEmscriptenWasm(imageQuant, imageQuantWasm);
|
||||
|
||||
// Our decoders currently rely on a `ImageData` global.
|
||||
import ImageData from './image_data.js';
|
||||
globalThis.ImageData = ImageData;
|
||||
|
||||
function resizeNameToIndex(name) {
|
||||
switch (name) {
|
||||
case 'triangle':
|
||||
return 0;
|
||||
case 'catrom':
|
||||
return 1;
|
||||
case 'mitchell':
|
||||
return 2;
|
||||
case 'lanczos3':
|
||||
return 3;
|
||||
default:
|
||||
throw Error(`Unknown resize algorithm "${name}"`);
|
||||
}
|
||||
}
|
||||
|
||||
function resizeWithAspect({
|
||||
input_width,
|
||||
input_height,
|
||||
target_width,
|
||||
target_height,
|
||||
}) {
|
||||
if (!target_width && !target_height) {
|
||||
throw Error('Need to specify at least width or height when resizing');
|
||||
}
|
||||
if (target_width && target_height) {
|
||||
return { width: target_width, height: target_height };
|
||||
}
|
||||
if (!target_width) {
|
||||
return {
|
||||
width: Math.round((input_width / input_height) * target_height),
|
||||
height: target_height,
|
||||
};
|
||||
}
|
||||
if (!target_height) {
|
||||
return {
|
||||
width: target_width,
|
||||
height: Math.round((input_height / input_width) * target_width),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export const preprocessors = {
|
||||
resize: {
|
||||
name: 'Resize',
|
||||
description: 'Resize the image before compressing',
|
||||
instantiate: async () => {
|
||||
await resizePromise;
|
||||
return (
|
||||
buffer,
|
||||
input_width,
|
||||
input_height,
|
||||
{ width, height, method, premultiply, linearRGB },
|
||||
) => {
|
||||
({ width, height } = resizeWithAspect({
|
||||
input_width,
|
||||
input_height,
|
||||
target_width: width,
|
||||
target_height: height,
|
||||
}));
|
||||
return new ImageData(
|
||||
resize.resize(
|
||||
buffer,
|
||||
input_width,
|
||||
input_height,
|
||||
width,
|
||||
height,
|
||||
resizeNameToIndex(method),
|
||||
premultiply,
|
||||
linearRGB,
|
||||
),
|
||||
width,
|
||||
height,
|
||||
);
|
||||
};
|
||||
},
|
||||
defaultOptions: {
|
||||
method: 'lanczos3',
|
||||
fitMethod: 'stretch',
|
||||
premultiply: true,
|
||||
linearRGB: true,
|
||||
},
|
||||
},
|
||||
// // TODO: Need to handle SVGs and HQX
|
||||
quant: {
|
||||
name: 'ImageQuant',
|
||||
description: 'Reduce the number of colors used (aka. paletting)',
|
||||
instantiate: async () => {
|
||||
const imageQuant = await imageQuantPromise;
|
||||
return (buffer, width, height, { numColors, dither }) =>
|
||||
new ImageData(
|
||||
imageQuant.quantize(buffer, width, height, numColors, dither),
|
||||
width,
|
||||
height,
|
||||
);
|
||||
},
|
||||
defaultOptions: {
|
||||
numColors: 255,
|
||||
dither: 1.0,
|
||||
},
|
||||
},
|
||||
rotate: {
|
||||
name: 'Rotate',
|
||||
description: 'Rotate image',
|
||||
instantiate: async () => {
|
||||
return async (buffer, width, height, { numRotations }) => {
|
||||
const degrees = (numRotations * 90) % 360;
|
||||
const sameDimensions = degrees == 0 || degrees == 180;
|
||||
const size = width * height * 4;
|
||||
const { instance } = await WebAssembly.instantiate(
|
||||
await fsp.readFile(pathify(rotateWasm)),
|
||||
);
|
||||
const { memory } = instance.exports;
|
||||
const additionalPagesNeeded = Math.ceil(
|
||||
(size * 2 - memory.buffer.byteLength + 8) / (64 * 1024),
|
||||
);
|
||||
if (additionalPagesNeeded > 0) {
|
||||
memory.grow(additionalPagesNeeded);
|
||||
}
|
||||
const view = new Uint8ClampedArray(memory.buffer);
|
||||
view.set(buffer, 8);
|
||||
instance.exports.rotate(width, height, degrees);
|
||||
return new ImageData(
|
||||
view.slice(size + 8, size * 2 + 8),
|
||||
sameDimensions ? width : height,
|
||||
sameDimensions ? height : width,
|
||||
);
|
||||
};
|
||||
},
|
||||
defaultOptions: {
|
||||
numRotations: 0,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const codecs = {
|
||||
mozjpeg: {
|
||||
name: 'MozJPEG',
|
||||
extension: 'jpg',
|
||||
detectors: [/^\xFF\xD8\xFF/],
|
||||
dec: () => instantiateEmscriptenWasm(mozDec, mozDecWasm),
|
||||
enc: () => instantiateEmscriptenWasm(mozEnc, mozEncWasm),
|
||||
defaultEncoderOptions: {
|
||||
quality: 75,
|
||||
baseline: false,
|
||||
arithmetic: false,
|
||||
progressive: true,
|
||||
optimize_coding: true,
|
||||
smoothing: 0,
|
||||
color_space: 3 /*YCbCr*/,
|
||||
quant_table: 3,
|
||||
trellis_multipass: false,
|
||||
trellis_opt_zero: false,
|
||||
trellis_opt_table: false,
|
||||
trellis_loops: 1,
|
||||
auto_subsample: true,
|
||||
chroma_subsample: 2,
|
||||
separate_chroma_quality: false,
|
||||
chroma_quality: 75,
|
||||
},
|
||||
autoOptimize: {
|
||||
option: 'quality',
|
||||
min: 0,
|
||||
max: 100,
|
||||
},
|
||||
},
|
||||
webp: {
|
||||
name: 'WebP',
|
||||
extension: 'webp',
|
||||
detectors: [/^RIFF....WEBPVP8[LX ]/],
|
||||
dec: () => instantiateEmscriptenWasm(webpDec, webpDecWasm),
|
||||
enc: () => instantiateEmscriptenWasm(webpEnc, webpEncWasm),
|
||||
defaultEncoderOptions: {
|
||||
quality: 75,
|
||||
target_size: 0,
|
||||
target_PSNR: 0,
|
||||
method: 4,
|
||||
sns_strength: 50,
|
||||
filter_strength: 60,
|
||||
filter_sharpness: 0,
|
||||
filter_type: 1,
|
||||
partitions: 0,
|
||||
segments: 4,
|
||||
pass: 1,
|
||||
show_compressed: 0,
|
||||
preprocessing: 0,
|
||||
autofilter: 0,
|
||||
partition_limit: 0,
|
||||
alpha_compression: 1,
|
||||
alpha_filtering: 1,
|
||||
alpha_quality: 100,
|
||||
lossless: 0,
|
||||
exact: 0,
|
||||
image_hint: 0,
|
||||
emulate_jpeg_size: 0,
|
||||
thread_level: 0,
|
||||
low_memory: 0,
|
||||
near_lossless: 100,
|
||||
use_delta_palette: 0,
|
||||
use_sharp_yuv: 0,
|
||||
},
|
||||
autoOptimize: {
|
||||
option: 'quality',
|
||||
min: 0,
|
||||
max: 100,
|
||||
},
|
||||
},
|
||||
avif: {
|
||||
name: 'AVIF',
|
||||
extension: 'avif',
|
||||
detectors: [/^\x00\x00\x00 ftypavif\x00\x00\x00\x00/],
|
||||
dec: () => instantiateEmscriptenWasm(avifDec, avifDecWasm),
|
||||
enc: () => instantiateEmscriptenWasm(avifEnc, avifEncWasm),
|
||||
defaultEncoderOptions: {
|
||||
minQuantizer: 33,
|
||||
maxQuantizer: 63,
|
||||
minQuantizerAlpha: 33,
|
||||
maxQuantizerAlpha: 63,
|
||||
tileColsLog2: 0,
|
||||
tileRowsLog2: 0,
|
||||
speed: 8,
|
||||
subsample: 1,
|
||||
},
|
||||
autoOptimize: {
|
||||
option: 'maxQuantizer',
|
||||
min: 0,
|
||||
max: 62,
|
||||
},
|
||||
},
|
||||
jxl: {
|
||||
name: 'JPEG-XL',
|
||||
extension: 'jxl',
|
||||
detectors: [/^\xff\x0a/],
|
||||
dec: () => instantiateEmscriptenWasm(jxlDec, jxlDecWasm),
|
||||
enc: () => instantiateEmscriptenWasm(jxlEnc, jxlEncWasm),
|
||||
defaultEncoderOptions: {
|
||||
speed: 4,
|
||||
quality: 75,
|
||||
progressive: false,
|
||||
epf: -1,
|
||||
nearLossless: 0,
|
||||
lossyPalette: false,
|
||||
},
|
||||
autoOptimize: {
|
||||
option: 'quality',
|
||||
min: 0,
|
||||
max: 100,
|
||||
},
|
||||
},
|
||||
wp2: {
|
||||
name: 'WebP2',
|
||||
extension: 'wp2',
|
||||
detectors: [/^\xF4\xFF\x6F/],
|
||||
dec: () => instantiateEmscriptenWasm(wp2Dec, wp2DecWasm),
|
||||
enc: () => instantiateEmscriptenWasm(wp2Enc, wp2EncWasm),
|
||||
defaultEncoderOptions: {
|
||||
quality: 75,
|
||||
alpha_quality: 75,
|
||||
effort: 5,
|
||||
pass: 1,
|
||||
sns: 50,
|
||||
uv_mode: 0 /*UVMode.UVModeAuto*/,
|
||||
csp_type: 0 /*Csp.kYCoCg*/,
|
||||
error_diffusion: 0,
|
||||
use_random_matrix: false,
|
||||
},
|
||||
autoOptimize: {
|
||||
option: 'quality',
|
||||
min: 0,
|
||||
max: 100,
|
||||
},
|
||||
},
|
||||
oxipng: {
|
||||
name: 'OxiPNG',
|
||||
extension: 'png',
|
||||
detectors: [/^\x89PNG\x0D\x0A\x1A\x0A/],
|
||||
dec: async () => {
|
||||
await pngEncDecPromise;
|
||||
return { decode: pngEncDec.decode };
|
||||
},
|
||||
enc: async () => {
|
||||
await pngEncDecPromise;
|
||||
await oxipngPromise;
|
||||
return {
|
||||
encode: (buffer, width, height, opts) => {
|
||||
const simplePng = pngEncDec.encode(new Uint8Array(buffer), width, height);
|
||||
return oxipng.optimise(simplePng, opts.level);
|
||||
},
|
||||
};
|
||||
},
|
||||
defaultEncoderOptions: {
|
||||
level: 2,
|
||||
},
|
||||
autoOptimize: {
|
||||
option: 'level',
|
||||
min: 6,
|
||||
max: 1,
|
||||
},
|
||||
},
|
||||
};
|
||||
16
api/src/emscripten-utils.js
Normal file
16
api/src/emscripten-utils.js
Normal file
@@ -0,0 +1,16 @@
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
export function pathify(path) {
|
||||
if (path.startsWith('file://')) {
|
||||
path = fileURLToPath(path);
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
export function instantiateEmscriptenWasm(factory, path) {
|
||||
return factory({
|
||||
locateFile() {
|
||||
return pathify(path);
|
||||
},
|
||||
});
|
||||
}
|
||||
7
api/src/image_data.js
Normal file
7
api/src/image_data.js
Normal file
@@ -0,0 +1,7 @@
|
||||
export default class ImageData {
|
||||
constructor(data, width, height) {
|
||||
this.data = data;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
}
|
||||
}
|
||||
217
api/src/index.js
Normal file
217
api/src/index.js
Normal file
@@ -0,0 +1,217 @@
|
||||
import { isMainThread } from 'worker_threads';
|
||||
import { cpus } from 'os';
|
||||
import { promises as fsp } from 'fs';
|
||||
|
||||
import { codecs as encoders, preprocessors as manipulations } from './codecs.js';
|
||||
import WorkerPool from './worker_pool.js';
|
||||
import { autoOptimize } from './auto-optimizer.js';
|
||||
|
||||
export { ImagePool, encoders, manipulations };
|
||||
|
||||
|
||||
async function decodeFile({ file }) {
|
||||
const buffer = await fsp.readFile(file);
|
||||
const firstChunk = buffer.slice(0, 16);
|
||||
const firstChunkString = Array.from(firstChunk)
|
||||
.map((v) => String.fromCodePoint(v))
|
||||
.join('');
|
||||
const key = Object.entries(encoders).find(([name, { detectors }]) =>
|
||||
detectors.some((detector) => detector.exec(firstChunkString)),
|
||||
)?.[0];
|
||||
if (!key) {
|
||||
throw Error(`${file} has an unsupported format`);
|
||||
}
|
||||
const rgba = (await encoders[key].dec()).decode(
|
||||
new Uint8Array(buffer),
|
||||
);
|
||||
return {
|
||||
bitmap: rgba,
|
||||
size: buffer.length,
|
||||
};
|
||||
}
|
||||
|
||||
async function manipulateImage({ manipulationType, options, image }) {
|
||||
const manipulator = await manipulations[manipulationType].instantiate();
|
||||
image.bitmap = await manipulator(
|
||||
image.bitmap.data,
|
||||
image.bitmap.width,
|
||||
image.bitmap.height,
|
||||
options,
|
||||
);
|
||||
return image;
|
||||
}
|
||||
|
||||
async function encodeImage({
|
||||
bitmap: bitmapIn,
|
||||
encName,
|
||||
encConfig,
|
||||
optimizerButteraugliTarget,
|
||||
maxOptimizerRounds,
|
||||
}) {
|
||||
let binary;
|
||||
let optionsUsed = encConfig;
|
||||
const encoder = await encoders[encName].enc();
|
||||
if (encConfig === 'auto') {
|
||||
const optionToOptimize = encoders[encName].autoOptimize.option;
|
||||
const decoder = await encoders[encName].dec();
|
||||
const encode = (bitmapIn, quality) =>
|
||||
encoder.encode(
|
||||
bitmapIn.data,
|
||||
bitmapIn.width,
|
||||
bitmapIn.height,
|
||||
Object.assign({}, encoders[encName].defaultEncoderOptions, {
|
||||
[optionToOptimize]: quality,
|
||||
}),
|
||||
);
|
||||
const decode = (binary) => decoder.decode(binary);
|
||||
const { binary: optimizedBinary, quality } = await autoOptimize(
|
||||
bitmapIn,
|
||||
encode,
|
||||
decode,
|
||||
{
|
||||
min: encoders[encName].autoOptimize.min,
|
||||
max: encoders[encName].autoOptimize.max,
|
||||
butteraugliDistanceGoal: optimizerButteraugliTarget,
|
||||
maxRounds: maxOptimizerRounds,
|
||||
},
|
||||
);
|
||||
binary = optimizedBinary;
|
||||
optionsUsed = {
|
||||
// 5 significant digits is enough
|
||||
[optionToOptimize]: Math.round(quality * 10000) / 10000,
|
||||
};
|
||||
} else {
|
||||
binary = encoder.encode(
|
||||
bitmapIn.data.buffer,
|
||||
bitmapIn.width,
|
||||
bitmapIn.height,
|
||||
encConfig,
|
||||
);
|
||||
}
|
||||
return {
|
||||
optionsUsed,
|
||||
binary,
|
||||
size: binary.length,
|
||||
};
|
||||
}
|
||||
|
||||
// both decoding and encoding go through the worker pool
|
||||
function handleJob(params) {
|
||||
const { operation } = params;
|
||||
switch (operation) {
|
||||
case 'encode':
|
||||
return encodeImage(params);
|
||||
case 'decode':
|
||||
return decodeFile(params);
|
||||
case 'manipulate':
|
||||
return manipulateImage(params);
|
||||
default:
|
||||
throw Error(`Invalid job "${operation}"`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents an ingested image.
|
||||
*/
|
||||
class Image {
|
||||
constructor (workerPool, file) {
|
||||
this.workerPool = workerPool;
|
||||
this.decoded = workerPool.dispatchJob({operation: 'decode', file});
|
||||
this.encodedAs = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Define one or several manipulations to apply to the image.
|
||||
* @param {object} manipulateOptions - An object with manipulations to apply, and their settings.
|
||||
* @returns {Promise<undefined>} - A promise that resolves when all manipulations have completed.
|
||||
*/
|
||||
async manipulate (manipulateOptions = {}) {
|
||||
for (const [type, options] of Object.entries(manipulateOptions)) {
|
||||
if (!Object.keys(manipulations).includes(type)) {
|
||||
throw Error(`Invalid manipulation type "${type}"`);
|
||||
}
|
||||
const manipulatorOptions = Object.assign(
|
||||
{},
|
||||
manipulations[type].defaultOptions,
|
||||
options,
|
||||
);
|
||||
this.decoded = this.workerPool.dispatchJob({
|
||||
operation: 'manipulate',
|
||||
manipulationType: type,
|
||||
image: await this.decoded,
|
||||
options: manipulatorOptions,
|
||||
});
|
||||
await this.decoded;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Define one or several encoders to use on the image.
|
||||
* @param {object} encodeOptions - An object with encoders to use, and their settings.
|
||||
* @returns {Promise<undefined>} - A promise that resolves when the image has been encoded with all the specified encoders.
|
||||
*/
|
||||
async encode (encodeOptions = {}){
|
||||
const { bitmap } = await this.decoded;
|
||||
for (const [encName, options] of Object.entries(encodeOptions)) {
|
||||
if (!Object.keys(encoders).includes(encName)) {
|
||||
continue;
|
||||
}
|
||||
const encRef = encoders[encName];
|
||||
const encConfig =
|
||||
typeof options === 'string'
|
||||
? options
|
||||
: Object.assign(
|
||||
{},
|
||||
encRef.defaultEncoderOptions,
|
||||
options,
|
||||
);
|
||||
this.encodedAs[encRef.extension] = this.workerPool.dispatchJob({
|
||||
operation: 'encode',
|
||||
bitmap,
|
||||
encName,
|
||||
encConfig,
|
||||
optimizerButteraugliTarget: Number(
|
||||
encodeOptions.optimizerButteraugliTarget,
|
||||
),
|
||||
maxOptimizerRounds: Number(
|
||||
encodeOptions.maxOptimizerRounds
|
||||
),
|
||||
});
|
||||
}
|
||||
await Promise.all(Object.values(this.encodedAs));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A pool where images can be ingested and squooshed.
|
||||
*/
|
||||
class ImagePool {
|
||||
/**
|
||||
* Create a new pool.
|
||||
* @param {number} [threads] - Number of concurrent image processes to run in the pool. Defaults to the number of CPU cores in the system.
|
||||
*/
|
||||
constructor (threads) {
|
||||
this.workerPool = new WorkerPool(threads || cpus().length, __filename);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ingest an image into the image pool.
|
||||
* @param {string | Buffer | URL | object} image - The image or path to the image that should be ingested and decoded.
|
||||
* @returns {Image} - A custom class reference to the decoded image.
|
||||
*/
|
||||
ingestImage (image) {
|
||||
return new Image(this.workerPool, image);
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the underlying image processing pipeline. The already processed images will still be there, but no new processing can start.
|
||||
* @returns {Promise<undefined>} - A promise that resolves when the underlying pipeline has closed.
|
||||
*/
|
||||
async close () {
|
||||
await this.workerPool.join();
|
||||
}
|
||||
}
|
||||
|
||||
if (!isMainThread) {
|
||||
WorkerPool.useThisThreadAsWorker(handleJob);
|
||||
}
|
||||
94
api/src/worker_pool.js
Normal file
94
api/src/worker_pool.js
Normal file
@@ -0,0 +1,94 @@
|
||||
import { Worker, parentPort } from 'worker_threads';
|
||||
import { TransformStream } from 'web-streams-polyfill';
|
||||
|
||||
function uuid() {
|
||||
return Array.from({ length: 16 }, () =>
|
||||
Math.floor(Math.random() * 256).toString(16),
|
||||
).join('');
|
||||
}
|
||||
|
||||
function jobPromise(worker, msg) {
|
||||
return new Promise((resolve) => {
|
||||
const id = uuid();
|
||||
worker.postMessage({ msg, id });
|
||||
worker.on('message', function f({ result, id: rid }) {
|
||||
if (rid !== id) {
|
||||
return;
|
||||
}
|
||||
worker.off('message', f);
|
||||
resolve(result);
|
||||
});
|
||||
worker.on('error', (error) => console.error('Worker error: ', error));
|
||||
});
|
||||
}
|
||||
|
||||
export default class WorkerPool {
|
||||
constructor(numWorkers, workerFile) {
|
||||
this.numWorkers = numWorkers;
|
||||
this.jobQueue = new TransformStream();
|
||||
this.workerQueue = new TransformStream();
|
||||
|
||||
const writer = this.workerQueue.writable.getWriter();
|
||||
for (let i = 0; i < numWorkers; i++) {
|
||||
writer.write(new Worker(workerFile));
|
||||
}
|
||||
writer.releaseLock();
|
||||
|
||||
this.done = this._readLoop();
|
||||
}
|
||||
|
||||
async _readLoop() {
|
||||
const reader = this.jobQueue.readable.getReader();
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) {
|
||||
await this._terminateAll();
|
||||
return;
|
||||
}
|
||||
const { msg, resolve } = value;
|
||||
const worker = await this._nextWorker();
|
||||
jobPromise(worker, msg).then((result) => {
|
||||
resolve(result);
|
||||
const writer = this.workerQueue.writable.getWriter();
|
||||
writer.write(worker);
|
||||
writer.releaseLock();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async _nextWorker() {
|
||||
const reader = this.workerQueue.readable.getReader();
|
||||
const { value, done } = await reader.read();
|
||||
reader.releaseLock();
|
||||
return value;
|
||||
}
|
||||
|
||||
async _terminateAll() {
|
||||
for (let n = 0; n < this.numWorkers; n++) {
|
||||
const worker = await this._nextWorker();
|
||||
worker.terminate();
|
||||
}
|
||||
this.workerQueue.writable.close();
|
||||
}
|
||||
|
||||
async join() {
|
||||
this.jobQueue.writable.getWriter().close();
|
||||
await this.done;
|
||||
}
|
||||
|
||||
dispatchJob(msg) {
|
||||
return new Promise((resolve) => {
|
||||
const writer = this.jobQueue.writable.getWriter();
|
||||
writer.write({ msg, resolve });
|
||||
writer.releaseLock();
|
||||
});
|
||||
}
|
||||
|
||||
static useThisThreadAsWorker(cb) {
|
||||
parentPort.on('message', async (data) => {
|
||||
const { msg, id } = data;
|
||||
const result = await cb(msg);
|
||||
parentPort.postMessage({ result, id });
|
||||
});
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user