mirror of
https://github.com/GoogleChromeLabs/squoosh.git
synced 2025-11-11 16:26:20 +00:00
Merge branch 'API' into dev
This commit is contained in:
@@ -3,9 +3,9 @@
|
||||
[Squoosh] is an image compression web app that allows you to dive into the advanced options provided
|
||||
by various image compressors.
|
||||
|
||||
# CLI
|
||||
# API & CLI
|
||||
|
||||
[Squoosh now has a CLI](https://github.com/GoogleChromeLabs/squoosh/tree/dev/cli) that allows you to compress many images at once.
|
||||
Squoosh now has [an API](https://github.com/GoogleChromeLabs/squoosh/tree/dev/api) and [a CLI](https://github.com/GoogleChromeLabs/squoosh/tree/dev/cli) that allows you to compress many images at once.
|
||||
|
||||
# Privacy
|
||||
|
||||
|
||||
2445
cli/package-lock.json
generated
2445
cli/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,32 +1,24 @@
|
||||
{
|
||||
"name": "@squoosh/cli",
|
||||
"version": "0.6.0",
|
||||
"version": "0.7.0",
|
||||
"description": "A CLI for Squoosh",
|
||||
"public": true,
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"squoosh-cli": "build/index.js",
|
||||
"@squoosh/cli": "build/index.js"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "rollup -c"
|
||||
"squoosh-cli": "src/index.js",
|
||||
"@squoosh/cli": "src/index.js"
|
||||
},
|
||||
"files": [
|
||||
"/src/index.js"
|
||||
],
|
||||
"keywords": [],
|
||||
"author": "Google Chrome Developers <chromium-dev@google.com>",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"web-streams-polyfill": "^3.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.11.6",
|
||||
"@babel/preset-env": "^7.11.5",
|
||||
"@rollup/plugin-babel": "^5.2.1",
|
||||
"@rollup/plugin-commonjs": "^15.0.0",
|
||||
"@rollup/plugin-node-resolve": "^9.0.0",
|
||||
"commander": "^6.0.0",
|
||||
"json5": "^2.1.3",
|
||||
"kleur": "^4.1.3",
|
||||
"ora": "^5.1.0",
|
||||
"rollup": "^2.26.11",
|
||||
"rollup-plugin-terser": "^7.0.2"
|
||||
"@squoosh/lib": "^0.2.0",
|
||||
"commander": "^7.2.0",
|
||||
"json5": "^2.2.0",
|
||||
"kleur": "^4.1.4",
|
||||
"ora": "^5.4.0"
|
||||
}
|
||||
}
|
||||
|
||||
326
cli/src/index.js
Normal file → Executable file
326
cli/src/index.js
Normal file → Executable file
@@ -1,17 +1,13 @@
|
||||
import { program } from 'commander';
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { program } from 'commander/esm.mjs';
|
||||
import JSON5 from 'json5';
|
||||
import { isMainThread } from 'worker_threads';
|
||||
import { cpus } from 'os';
|
||||
import { extname, join, basename } from 'path';
|
||||
import path from 'path';
|
||||
import { promises as fsp } from 'fs';
|
||||
import { resolve as resolvePath } from 'path';
|
||||
import { version } from 'json:../package.json';
|
||||
import ora from 'ora';
|
||||
import kleur from 'kleur';
|
||||
|
||||
import { codecs as supportedFormats, preprocessors } from './codecs.js';
|
||||
import WorkerPool from './worker_pool.js';
|
||||
import { autoOptimize } from './auto-optimizer.js';
|
||||
import { ImagePool, preprocessors, encoders } from '@squoosh/lib';
|
||||
|
||||
function clamp(v, min, max) {
|
||||
if (v < min) return min;
|
||||
@@ -26,114 +22,6 @@ function prettyPrintSize(size) {
|
||||
return (size / 2 ** (10 * index)).toFixed(2) + suffix[index];
|
||||
}
|
||||
|
||||
async function decodeFile(file) {
|
||||
const buffer = await fsp.readFile(file);
|
||||
const firstChunk = buffer.slice(0, 16);
|
||||
const firstChunkString = Array.from(firstChunk)
|
||||
.map((v) => String.fromCodePoint(v))
|
||||
.join('');
|
||||
const key = Object.entries(supportedFormats).find(([name, { detectors }]) =>
|
||||
detectors.some((detector) => detector.exec(firstChunkString)),
|
||||
)?.[0];
|
||||
if (!key) {
|
||||
throw Error(`${file} has an unsupported format`);
|
||||
}
|
||||
const rgba = (await supportedFormats[key].dec()).decode(
|
||||
new Uint8Array(buffer),
|
||||
);
|
||||
return {
|
||||
file,
|
||||
bitmap: rgba,
|
||||
size: buffer.length,
|
||||
};
|
||||
}
|
||||
|
||||
async function preprocessImage({ preprocessorName, options, file }) {
|
||||
const preprocessor = await preprocessors[preprocessorName].instantiate();
|
||||
file.bitmap = await preprocessor(
|
||||
file.bitmap.data,
|
||||
file.bitmap.width,
|
||||
file.bitmap.height,
|
||||
options,
|
||||
);
|
||||
return file;
|
||||
}
|
||||
|
||||
async function encodeFile({
|
||||
file,
|
||||
size,
|
||||
bitmap: bitmapIn,
|
||||
outputFile,
|
||||
encName,
|
||||
encConfig,
|
||||
optimizerButteraugliTarget,
|
||||
maxOptimizerRounds,
|
||||
}) {
|
||||
let out, infoText;
|
||||
const encoder = await supportedFormats[encName].enc();
|
||||
if (encConfig === 'auto') {
|
||||
const optionToOptimize = supportedFormats[encName].autoOptimize.option;
|
||||
const decoder = await supportedFormats[encName].dec();
|
||||
const encode = (bitmapIn, quality) =>
|
||||
encoder.encode(
|
||||
bitmapIn.data,
|
||||
bitmapIn.width,
|
||||
bitmapIn.height,
|
||||
Object.assign({}, supportedFormats[encName].defaultEncoderOptions, {
|
||||
[optionToOptimize]: quality,
|
||||
}),
|
||||
);
|
||||
const decode = (binary) => decoder.decode(binary);
|
||||
const { bitmap, binary, quality } = await autoOptimize(
|
||||
bitmapIn,
|
||||
encode,
|
||||
decode,
|
||||
{
|
||||
min: supportedFormats[encName].autoOptimize.min,
|
||||
max: supportedFormats[encName].autoOptimize.max,
|
||||
butteraugliDistanceGoal: optimizerButteraugliTarget,
|
||||
maxRounds: maxOptimizerRounds,
|
||||
},
|
||||
);
|
||||
out = binary;
|
||||
const opts = {
|
||||
// 5 significant digits is enough
|
||||
[optionToOptimize]: Math.round(quality * 10000) / 10000,
|
||||
};
|
||||
infoText = ` using --${encName} '${JSON5.stringify(opts)}'`;
|
||||
} else {
|
||||
out = encoder.encode(
|
||||
bitmapIn.data.buffer,
|
||||
bitmapIn.width,
|
||||
bitmapIn.height,
|
||||
encConfig,
|
||||
);
|
||||
}
|
||||
await fsp.writeFile(outputFile, out);
|
||||
return {
|
||||
infoText,
|
||||
inputSize: size,
|
||||
inputFile: file,
|
||||
outputFile,
|
||||
outputSize: out.length,
|
||||
};
|
||||
}
|
||||
|
||||
// both decoding and encoding go through the worker pool
|
||||
function handleJob(params) {
|
||||
const { operation } = params;
|
||||
switch (operation) {
|
||||
case 'encode':
|
||||
return encodeFile(params);
|
||||
case 'decode':
|
||||
return decodeFile(params.file);
|
||||
case 'preprocess':
|
||||
return preprocessImage(params);
|
||||
default:
|
||||
throw Error(`Invalid job "${operation}"`);
|
||||
}
|
||||
}
|
||||
|
||||
function progressTracker(results) {
|
||||
const spinner = ora();
|
||||
const tracker = {};
|
||||
@@ -163,13 +51,12 @@ function progressTracker(results) {
|
||||
};
|
||||
function getResultsText() {
|
||||
let out = '';
|
||||
for (const [filename, result] of results.entries()) {
|
||||
out += `\n ${kleur.cyan(filename)}: ${prettyPrintSize(result.size)}`;
|
||||
for (const { outputFile, outputSize, infoText } of result.outputs) {
|
||||
const name = (program.suffix + extname(outputFile)).padEnd(5);
|
||||
out += `\n ${kleur.dim('└')} ${kleur.cyan(name)} → ${prettyPrintSize(
|
||||
outputSize,
|
||||
)}`;
|
||||
for (const result of results.values()) {
|
||||
out += `\n ${kleur.cyan(result.file)}: ${prettyPrintSize(result.size)}`;
|
||||
for (const { outputFile, size: outputSize, infoText } of result.outputs) {
|
||||
out += `\n ${kleur.dim('└')} ${kleur.cyan(
|
||||
outputFile.padEnd(5),
|
||||
)} → ${prettyPrintSize(outputSize)}`;
|
||||
const percent = ((outputSize / result.size) * 100).toPrecision(3);
|
||||
out += ` (${kleur[outputSize > result.size ? 'red' : 'green'](
|
||||
percent + '%',
|
||||
@@ -186,17 +73,17 @@ function progressTracker(results) {
|
||||
async function getInputFiles(paths) {
|
||||
const validFiles = [];
|
||||
|
||||
for (const path of paths) {
|
||||
const files = (await fsp.lstat(path)).isDirectory()
|
||||
? (await fsp.readdir(path)).map(file => join(path, file))
|
||||
: [path];
|
||||
for (const inputPath of paths) {
|
||||
const files = (await fsp.lstat(inputPath)).isDirectory()
|
||||
? (await fsp.readdir(inputPath)).map((file) => path.join(inputPath, file))
|
||||
: [inputPath];
|
||||
for (const file of files) {
|
||||
try {
|
||||
await fsp.stat(file);
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
console.warn(
|
||||
`Warning: Input file does not exist: ${resolvePath(file)}`,
|
||||
`Warning: Input file does not exist: ${path.resolve(file)}`,
|
||||
);
|
||||
continue;
|
||||
} else {
|
||||
@@ -214,7 +101,7 @@ async function getInputFiles(paths) {
|
||||
async function processFiles(files) {
|
||||
files = await getInputFiles(files);
|
||||
|
||||
const parallelism = cpus().length;
|
||||
const imagePool = new ImagePool();
|
||||
|
||||
const results = new Map();
|
||||
const progress = progressTracker(results);
|
||||
@@ -223,140 +110,123 @@ async function processFiles(files) {
|
||||
progress.totalOffset = files.length;
|
||||
progress.setProgress(0, files.length);
|
||||
|
||||
const workerPool = new WorkerPool(parallelism, __filename);
|
||||
// Create output directory
|
||||
await fsp.mkdir(program.outputDir, { recursive: true });
|
||||
await fsp.mkdir(program.opts().outputDir, { recursive: true });
|
||||
|
||||
let decoded = 0;
|
||||
let decodedFiles = await Promise.all(
|
||||
files.map(async (file) => {
|
||||
const result = await workerPool.dispatchJob({
|
||||
operation: 'decode',
|
||||
const image = imagePool.ingestImage(file);
|
||||
await image.decoded;
|
||||
results.set(image, {
|
||||
file,
|
||||
});
|
||||
results.set(file, {
|
||||
file: result.file,
|
||||
size: result.size,
|
||||
size: (await image.decoded).size,
|
||||
outputs: [],
|
||||
});
|
||||
progress.setProgress(++decoded, files.length);
|
||||
return result;
|
||||
return image;
|
||||
}),
|
||||
);
|
||||
|
||||
for (const [preprocessorName, value] of Object.entries(preprocessors)) {
|
||||
if (!program[preprocessorName]) {
|
||||
const preprocessOptions = {};
|
||||
|
||||
for (const preprocessorName of Object.keys(preprocessors)) {
|
||||
if (!program.opts()[preprocessorName]) {
|
||||
continue;
|
||||
}
|
||||
const preprocessorParam = program[preprocessorName];
|
||||
const preprocessorOptions = Object.assign(
|
||||
{},
|
||||
value.defaultOptions,
|
||||
JSON5.parse(preprocessorParam),
|
||||
);
|
||||
|
||||
decodedFiles = await Promise.all(
|
||||
decodedFiles.map(async (file) => {
|
||||
return workerPool.dispatchJob({
|
||||
file,
|
||||
operation: 'preprocess',
|
||||
preprocessorName,
|
||||
options: preprocessorOptions,
|
||||
});
|
||||
}),
|
||||
preprocessOptions[preprocessorName] = JSON5.parse(
|
||||
program.opts()[preprocessorName],
|
||||
);
|
||||
}
|
||||
|
||||
for (const image of decodedFiles) {
|
||||
image.preprocess(preprocessOptions);
|
||||
}
|
||||
|
||||
await Promise.all(decodedFiles.map((image) => image.decoded));
|
||||
|
||||
progress.progressOffset = decoded;
|
||||
progress.setStatus('Encoding ' + kleur.dim(`(${parallelism} threads)`));
|
||||
progress.setStatus(
|
||||
'Encoding ' + kleur.dim(`(${imagePool.workerPool.numWorkers} threads)`),
|
||||
);
|
||||
progress.setProgress(0, files.length);
|
||||
|
||||
const jobs = [];
|
||||
let jobsStarted = 0;
|
||||
let jobsFinished = 0;
|
||||
for (const { file, bitmap, size } of decodedFiles) {
|
||||
const ext = extname(file);
|
||||
const base = basename(file, ext) + program.suffix;
|
||||
for (const image of decodedFiles) {
|
||||
const originalFile = results.get(image).file;
|
||||
|
||||
for (const [encName, value] of Object.entries(supportedFormats)) {
|
||||
if (!program[encName]) {
|
||||
const encodeOptions = {
|
||||
optimizerButteraugliTarget: Number(
|
||||
program.opts().optimizerButteraugliTarget,
|
||||
),
|
||||
maxOptimizerRounds: Number(program.opts().maxOptimizerRounds),
|
||||
};
|
||||
for (const encName of Object.keys(encoders)) {
|
||||
if (!program.opts()[encName]) {
|
||||
continue;
|
||||
}
|
||||
const encParam =
|
||||
typeof program[encName] === 'string' ? program[encName] : '{}';
|
||||
const encParam = program.opts()[encName];
|
||||
const encConfig =
|
||||
encParam.toLowerCase() === 'auto'
|
||||
? 'auto'
|
||||
: Object.assign(
|
||||
{},
|
||||
value.defaultEncoderOptions,
|
||||
JSON5.parse(encParam),
|
||||
);
|
||||
const outputFile = join(program.outputDir, `${base}.${value.extension}`);
|
||||
jobsStarted++;
|
||||
const p = workerPool
|
||||
.dispatchJob({
|
||||
operation: 'encode',
|
||||
file,
|
||||
size,
|
||||
bitmap,
|
||||
outputFile,
|
||||
encName,
|
||||
encConfig,
|
||||
optimizerButteraugliTarget: Number(
|
||||
program.optimizerButteraugliTarget,
|
||||
),
|
||||
maxOptimizerRounds: Number(program.maxOptimizerRounds),
|
||||
})
|
||||
.then((output) => {
|
||||
jobsFinished++;
|
||||
results.get(file).outputs.push(output);
|
||||
progress.setProgress(jobsFinished, jobsStarted);
|
||||
});
|
||||
jobs.push(p);
|
||||
encParam.toLowerCase() === 'auto' ? 'auto' : JSON5.parse(encParam);
|
||||
encodeOptions[encName] = encConfig;
|
||||
}
|
||||
jobsStarted++;
|
||||
const job = image.encode(encodeOptions).then(async () => {
|
||||
jobsFinished++;
|
||||
const outputPath = path.join(
|
||||
program.opts().outputDir,
|
||||
program.opts().suffix +
|
||||
path.basename(originalFile, path.extname(originalFile)),
|
||||
);
|
||||
for (const output of Object.values(image.encodedWith)) {
|
||||
const outputFile = `${outputPath}.${(await output).extension}`;
|
||||
await fsp.writeFile(outputFile, (await output).binary);
|
||||
results
|
||||
.get(image)
|
||||
.outputs.push(Object.assign(await output, { outputFile }));
|
||||
}
|
||||
progress.setProgress(jobsFinished, jobsStarted);
|
||||
});
|
||||
jobs.push(job);
|
||||
}
|
||||
|
||||
// update the progress to account for multi-format
|
||||
progress.setProgress(jobsFinished, jobsStarted);
|
||||
// Wait for all jobs to finish
|
||||
await workerPool.join();
|
||||
await Promise.all(jobs);
|
||||
await imagePool.close();
|
||||
progress.finish('Squoosh results:');
|
||||
}
|
||||
|
||||
if (isMainThread) {
|
||||
program
|
||||
.name('squoosh-cli')
|
||||
.version(version)
|
||||
.arguments('<files...>')
|
||||
.option('-d, --output-dir <dir>', 'Output directory', '.')
|
||||
.option('-s, --suffix <suffix>', 'Append suffix to output files', '')
|
||||
.option(
|
||||
'--max-optimizer-rounds <rounds>',
|
||||
'Maximum number of compressions to use for auto optimizations',
|
||||
'6',
|
||||
)
|
||||
.option(
|
||||
'--optimizer-butteraugli-target <butteraugli distance>',
|
||||
'Target Butteraugli distance for auto optimizer',
|
||||
'1.4',
|
||||
)
|
||||
.action(processFiles);
|
||||
program
|
||||
.name('squoosh-cli')
|
||||
.arguments('<files...>')
|
||||
.option('-d, --output-dir <dir>', 'Output directory', '.')
|
||||
.option('-s, --suffix <suffix>', 'Append suffix to output files', '')
|
||||
.option(
|
||||
'--max-optimizer-rounds <rounds>',
|
||||
'Maximum number of compressions to use for auto optimizations',
|
||||
'6',
|
||||
)
|
||||
.option(
|
||||
'--optimizer-butteraugli-target <butteraugli distance>',
|
||||
'Target Butteraugli distance for auto optimizer',
|
||||
'1.4',
|
||||
)
|
||||
.action(processFiles);
|
||||
|
||||
// Create a CLI option for each supported preprocessor
|
||||
for (const [key, value] of Object.entries(preprocessors)) {
|
||||
program.option(`--${key} [config]`, value.description);
|
||||
}
|
||||
// Create a CLI option for each supported encoder
|
||||
for (const [key, value] of Object.entries(supportedFormats)) {
|
||||
program.option(
|
||||
`--${key} [config]`,
|
||||
`Use ${value.name} to generate a .${value.extension} file with the given configuration`,
|
||||
);
|
||||
}
|
||||
|
||||
program.parse(process.argv);
|
||||
} else {
|
||||
WorkerPool.useThisThreadAsWorker(handleJob);
|
||||
// Create a CLI option for each supported preprocessor
|
||||
for (const [key, value] of Object.entries(preprocessors)) {
|
||||
program.option(`--${key} [config]`, value.description);
|
||||
}
|
||||
// Create a CLI option for each supported encoder
|
||||
for (const [key, value] of Object.entries(encoders)) {
|
||||
program.option(
|
||||
`--${key} [config]`,
|
||||
`Use ${value.name} to generate a .${value.extension} file with the given configuration`,
|
||||
);
|
||||
}
|
||||
|
||||
program.parse(process.argv);
|
||||
|
||||
3
libsquoosh/.gitignore
vendored
Normal file
3
libsquoosh/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
node_modules
|
||||
build
|
||||
.DS_Store
|
||||
1
libsquoosh/.npmignore
Normal file
1
libsquoosh/.npmignore
Normal file
@@ -0,0 +1 @@
|
||||
node_modules
|
||||
0
libsquoosh/.npmrc
Normal file
0
libsquoosh/.npmrc
Normal file
163
libsquoosh/README.md
Normal file
163
libsquoosh/README.md
Normal file
@@ -0,0 +1,163 @@
|
||||
# Squoosh API
|
||||
|
||||
Squoosh API is an _experimental_ way to run all the codecs you know from the [Squoosh] web app directly inside your own JavaScript program. The Squoosh API uses a worker pool to parallelize processing images. This way you can apply the same codec to many images at once.
|
||||
|
||||
Squoosh API is currently not the fastest image compression tool in town and doesn’t aim to be. It is, however, fast enough to compress many images sufficiently quick at once.
|
||||
|
||||
## Installation
|
||||
|
||||
The Squoosh API can be installed to your local project with the following command:
|
||||
|
||||
```
|
||||
$ npm install @squoosh/api
|
||||
```
|
||||
|
||||
You can start using the API by adding these lines to the top of your JS program:
|
||||
|
||||
```js
|
||||
import { ImagePool } from '@squoosh/api';
|
||||
const imagePool = new ImagePool();
|
||||
```
|
||||
|
||||
This will create an image pool with an underlying processing pipeline that you can use to ingest and encode images. The ImagePool constructor takes one argument that defines how many parallel operations it is allowed to run at any given time. By default, this number is set to the amount of CPU cores available in the system it is running on.
|
||||
|
||||
## Ingesting images
|
||||
|
||||
You can ingest a new image like so:
|
||||
|
||||
```js
|
||||
const imagePath = 'path/to/image.png';
|
||||
const image = imagePool.ingestImage(imagePath);
|
||||
```
|
||||
|
||||
These `ingestImage` function can take anything the node [`readFile`][readfile] function can take, uncluding a buffer and `FileHandle`.
|
||||
|
||||
The returned `image` object is a representation of the original image, that you can now preprocess, encode, and extract information about.
|
||||
|
||||
## Preprocessing and encoding images
|
||||
|
||||
When an image has been ingested, you can start preprocessing it and encoding it to other formats. This example will resize the image and then encode it to a `.jpg` and `.jxl` image:
|
||||
|
||||
```js
|
||||
await image.decoded; //Wait until the image is decoded before running preprocessors
|
||||
|
||||
const preprocessOptions: {
|
||||
resize: {
|
||||
enabled: true,
|
||||
width: 100,
|
||||
height: 50,
|
||||
}
|
||||
}
|
||||
await image.preprocess(preprocessOptions);
|
||||
|
||||
const encodeOptions: {
|
||||
mozjpeg: {}, //an empty object means 'use default settings'
|
||||
jxl: {
|
||||
quality: 90,
|
||||
},
|
||||
}
|
||||
await image.encode(encodeOptions);
|
||||
|
||||
```
|
||||
|
||||
The default values for each option can be found in the [`codecs.js`][codecs.js] file under `defaultEncoderOptions`. Every unspecified value will use the default value specified there. _Better documentation is needed here._
|
||||
|
||||
You can run your own code inbetween the different steps, if, for example, you want to change how much the image should be resized based on its original height. (See [Extracting image information](#extracting-image-information) to learn how to get the image dimensions).
|
||||
|
||||
## Closing the ImagePool
|
||||
|
||||
When you have encoded everything you need, it is recommended to close the processing pipeline in the ImagePool. This will not delete the images you have already encoded, but it will prevent you from ingesting and encoding new images.
|
||||
|
||||
Close the ImagePool pipeline with this line:
|
||||
|
||||
```js
|
||||
await imagePool.close();
|
||||
```
|
||||
|
||||
## Writing encoded images to the file system
|
||||
|
||||
When you have encoded an image, you normally want to write it to a file.
|
||||
|
||||
This example takes an image that has been encoded as a `jpg` and writes it to a file:
|
||||
|
||||
```js
|
||||
const rawEncodedImage = (await image.encodedWidth.mozjpeg).binary;
|
||||
|
||||
fs.writeFile('/path/to/new/image.jpg', rawEncodedImage);
|
||||
```
|
||||
|
||||
This example iterates through all encoded versions of the image and writes them to a specific path:
|
||||
|
||||
```js
|
||||
const newImagePath = '/path/to/image.'; //extension is added automatically
|
||||
|
||||
for (const encodedImage of Object.values(image.encodedWith)) {
|
||||
fs.writeFile(
|
||||
newImagePath + (await encodedImage).extension,
|
||||
(await encodedImage).binary,
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## Extracting image information
|
||||
|
||||
Information about a decoded image is available at `Image.decoded`. It looks something like this:
|
||||
|
||||
```js
|
||||
console.log(await image.decoded);
|
||||
// Returns:
|
||||
{
|
||||
bitmap: {
|
||||
data: Uint8ClampedArray(47736584) [
|
||||
225, 228, 237, 255, 225, 228, 237, 255, 225, 228, 237, 255,
|
||||
225, 228, 237, 255, 225, 228, 237, 255, 225, 228, 237, 255,
|
||||
225, 228, 237, 255,
|
||||
... //the entire raw image
|
||||
],
|
||||
width: 4606, //pixels
|
||||
height: 2591 //pixels
|
||||
},
|
||||
size: 2467795 //bytes
|
||||
}
|
||||
```
|
||||
|
||||
Information about an encoded image can be found at `Image.encodedWith[encoderName]`. It looks something like this:
|
||||
|
||||
```js
|
||||
console.log(await image.encodedWith.jxl);
|
||||
// Returns:
|
||||
{
|
||||
optionsUsed: {
|
||||
quality: 75,
|
||||
baseline: false,
|
||||
arithmetic: false,
|
||||
progressive: true,
|
||||
... //all the possible options for this encoder
|
||||
},
|
||||
binary: Uint8Array(1266975) [
|
||||
1, 0, 0, 1, 0, 1, 0, 0, 255, 219, 0, 132,
|
||||
113, 119, 156, 156, 209, 1, 8, 8, 8, 8, 9, 8,
|
||||
9, 10, 10, 9,
|
||||
... //the entire raw encoded image
|
||||
],
|
||||
extension: 'jxl',
|
||||
size: 1266975 //bytes
|
||||
}
|
||||
```
|
||||
|
||||
## Auto optimizer
|
||||
|
||||
Squoosh API has an _experimental_ auto optimizer that compresses an image as much as possible, trying to hit a specific [Butteraugli] target value. The higher the Butteraugli target value, the more artifacts can be introduced.
|
||||
|
||||
You can make use of the auto optimizer by using “auto” as the config object.
|
||||
|
||||
```
|
||||
const encodeOptions: {
|
||||
mozjpeg: 'auto',
|
||||
}
|
||||
```
|
||||
|
||||
[squoosh]: https://squoosh.app
|
||||
[codecs.js]: https://github.com/GoogleChromeLabs/squoosh/blob/dev/cli/src/codecs.js
|
||||
[butteraugli]: https://github.com/google/butteraugli
|
||||
[readfile]: https://nodejs.org/api/fs.html#fs_fspromises_readfile_path_options
|
||||
@@ -10,11 +10,11 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
import { promises as fs } from "fs";
|
||||
import { basename } from "path";
|
||||
import { promises as fs } from 'fs';
|
||||
import { basename } from 'path';
|
||||
|
||||
const defaultOpts = {
|
||||
prefix: "asset-url"
|
||||
prefix: 'asset-url',
|
||||
};
|
||||
|
||||
export default function assetPlugin(opts) {
|
||||
@@ -23,16 +23,16 @@ export default function assetPlugin(opts) {
|
||||
/** @type {Map<string, Buffer>} */
|
||||
let assetIdToSourceBuffer;
|
||||
|
||||
const prefix = opts.prefix + ":";
|
||||
const prefix = opts.prefix + ':';
|
||||
return {
|
||||
name: "asset-plugin",
|
||||
name: 'asset-plugin',
|
||||
buildStart() {
|
||||
assetIdToSourceBuffer = new Map();
|
||||
},
|
||||
augmentChunkHash(info) {
|
||||
// Get the sources for all assets imported by this chunk.
|
||||
const buffers = Object.keys(info.modules)
|
||||
.map(moduleId => assetIdToSourceBuffer.get(moduleId))
|
||||
.map((moduleId) => assetIdToSourceBuffer.get(moduleId))
|
||||
.filter(Boolean);
|
||||
|
||||
if (buffers.length === 0) return;
|
||||
@@ -56,20 +56,20 @@ export default function assetPlugin(opts) {
|
||||
throw Error(`Cannot find ${realId}`);
|
||||
}
|
||||
// Add an additional .js to the end so it ends up with .js at the end in the _virtual folder.
|
||||
return prefix + resolveResult.id + ".js";
|
||||
return prefix + resolveResult.id + '.js';
|
||||
},
|
||||
async load(id) {
|
||||
if (!id.startsWith(prefix)) return;
|
||||
const realId = id.slice(prefix.length, -".js".length);
|
||||
const realId = id.slice(prefix.length, -'.js'.length);
|
||||
const source = await fs.readFile(realId);
|
||||
assetIdToSourceBuffer.set(id, source);
|
||||
this.addWatchFile(realId);
|
||||
|
||||
return `export default import.meta.ROLLUP_FILE_URL_${this.emitFile({
|
||||
type: "asset",
|
||||
type: 'asset',
|
||||
source,
|
||||
name: basename(realId)
|
||||
name: basename(realId),
|
||||
})}`;
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -5,8 +5,8 @@ export default function autojsonPlugin() {
|
||||
name: 'autojson-plugin',
|
||||
async load(id) {
|
||||
if (id.endsWith('.json') && !id.startsWith('json:')) {
|
||||
return 'export default ' + await fsp.readFile(id, 'utf8');
|
||||
return 'export default ' + (await fsp.readFile(id, 'utf8'));
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
};
|
||||
}
|
||||
1869
libsquoosh/package-lock.json
generated
Normal file
1869
libsquoosh/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
28
libsquoosh/package.json
Normal file
28
libsquoosh/package.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"name": "@squoosh/lib",
|
||||
"version": "0.2.0",
|
||||
"description": "A Node library for Squoosh",
|
||||
"public": true,
|
||||
"main": "/build/index.js",
|
||||
"files": [
|
||||
"/build/*"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "rollup -c"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "Google Chrome Developers <chromium-dev@google.com>",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"web-streams-polyfill": "^3.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.14.0",
|
||||
"@babel/preset-env": "^7.14.0",
|
||||
"@rollup/plugin-babel": "^5.3.0",
|
||||
"@rollup/plugin-commonjs": "^18.0.0",
|
||||
"@rollup/plugin-node-resolve": "^11.2.1",
|
||||
"rollup": "^2.46.0",
|
||||
"rollup-plugin-terser": "^7.0.2"
|
||||
}
|
||||
}
|
||||
@@ -13,9 +13,6 @@ export default {
|
||||
dir: 'build',
|
||||
format: 'cjs',
|
||||
assetFileNames: '[name]-[hash][extname]',
|
||||
// This is needed so the resulting `index.js` can be
|
||||
// executed by `npx`.
|
||||
banner: '#!/usr/bin/env node',
|
||||
},
|
||||
plugins: [
|
||||
resolve(),
|
||||
@@ -27,7 +24,7 @@ export default {
|
||||
babelrc: false,
|
||||
configFile: false,
|
||||
minified: process.env.DEBUG != '',
|
||||
comments: false,
|
||||
comments: true,
|
||||
presets: [
|
||||
[
|
||||
'@babel/preset-env',
|
||||
@@ -41,5 +38,5 @@ export default {
|
||||
],
|
||||
}),
|
||||
],
|
||||
external: builtinModules,
|
||||
external: [...builtinModules, 'web-streams-polyfill'],
|
||||
};
|
||||
@@ -1,7 +1,7 @@
|
||||
import { instantiateEmscriptenWasm } from "./emscripten-utils.js";
|
||||
import { instantiateEmscriptenWasm } from './emscripten-utils.js';
|
||||
|
||||
import visdif from "../../codecs/visdif/visdif.js";
|
||||
import visdifWasm from "asset-url:../../codecs/visdif/visdif.wasm";
|
||||
import visdif from '../../codecs/visdif/visdif.js';
|
||||
import visdifWasm from 'asset-url:../../codecs/visdif/visdif.wasm';
|
||||
|
||||
// `measure` is a (async) function that takes exactly one numeric parameter and
|
||||
// returns a value. The function is assumed to be monotonic (an increase in `parameter`
|
||||
@@ -11,7 +11,7 @@ import visdifWasm from "asset-url:../../codecs/visdif/visdif.wasm";
|
||||
export async function binarySearch(
|
||||
measureGoal,
|
||||
measure,
|
||||
{ min = 0, max = 100, epsilon = 0.1, maxRounds = 8 } = {}
|
||||
{ min = 0, max = 100, epsilon = 0.1, maxRounds = 8 } = {},
|
||||
) {
|
||||
let parameter = (max - min) / 2 + min;
|
||||
let delta = (max - min) / 4;
|
||||
@@ -36,14 +36,14 @@ export async function autoOptimize(
|
||||
bitmapIn,
|
||||
encode,
|
||||
decode,
|
||||
{ butteraugliDistanceGoal = 1.4, ...otherOpts } = {}
|
||||
{ butteraugliDistanceGoal = 1.4, ...otherOpts } = {},
|
||||
) {
|
||||
const { VisDiff } = await instantiateEmscriptenWasm(visdif, visdifWasm);
|
||||
|
||||
const comparator = new VisDiff(
|
||||
bitmapIn.data,
|
||||
bitmapIn.width,
|
||||
bitmapIn.height
|
||||
bitmapIn.height,
|
||||
);
|
||||
|
||||
let bitmapOut;
|
||||
@@ -53,18 +53,18 @@ export async function autoOptimize(
|
||||
// increase the metric value. So multipliy Butteraugli values by -1.
|
||||
const { parameter } = await binarySearch(
|
||||
-1 * butteraugliDistanceGoal,
|
||||
async quality => {
|
||||
async (quality) => {
|
||||
binaryOut = await encode(bitmapIn, quality);
|
||||
bitmapOut = await decode(binaryOut);
|
||||
return -1 * comparator.distance(bitmapOut.data);
|
||||
},
|
||||
otherOpts
|
||||
otherOpts,
|
||||
);
|
||||
comparator.delete();
|
||||
|
||||
return {
|
||||
bitmap: bitmapOut,
|
||||
binary: binaryOut,
|
||||
quality: parameter
|
||||
quality: parameter,
|
||||
};
|
||||
}
|
||||
@@ -344,7 +344,11 @@ export const codecs = {
|
||||
await oxipngPromise;
|
||||
return {
|
||||
encode: (buffer, width, height, opts) => {
|
||||
const simplePng = pngEncDec.encode(new Uint8Array(buffer), width, height);
|
||||
const simplePng = pngEncDec.encode(
|
||||
new Uint8Array(buffer),
|
||||
width,
|
||||
height,
|
||||
);
|
||||
return oxipng.optimise(simplePng, opts.level);
|
||||
},
|
||||
};
|
||||
209
libsquoosh/src/index.js
Normal file
209
libsquoosh/src/index.js
Normal file
@@ -0,0 +1,209 @@
|
||||
import { isMainThread } from 'worker_threads';
|
||||
import { cpus } from 'os';
|
||||
import { promises as fsp } from 'fs';
|
||||
|
||||
import { codecs as encoders, preprocessors } from './codecs.js';
|
||||
import WorkerPool from './worker_pool.js';
|
||||
import { autoOptimize } from './auto-optimizer.js';
|
||||
|
||||
export { ImagePool, encoders, preprocessors };
|
||||
|
||||
async function decodeFile({ file }) {
|
||||
const buffer = await fsp.readFile(file);
|
||||
const firstChunk = buffer.slice(0, 16);
|
||||
const firstChunkString = Array.from(firstChunk)
|
||||
.map((v) => String.fromCodePoint(v))
|
||||
.join('');
|
||||
const key = Object.entries(encoders).find(([name, { detectors }]) =>
|
||||
detectors.some((detector) => detector.exec(firstChunkString)),
|
||||
)?.[0];
|
||||
if (!key) {
|
||||
throw Error(`${file} has an unsupported format`);
|
||||
}
|
||||
const rgba = (await encoders[key].dec()).decode(new Uint8Array(buffer));
|
||||
return {
|
||||
bitmap: rgba,
|
||||
size: buffer.length,
|
||||
};
|
||||
}
|
||||
|
||||
async function preprocessImage({ preprocessorName, options, image }) {
|
||||
const preprocessor = await preprocessors[preprocessorName].instantiate();
|
||||
image.bitmap = await preprocessor(
|
||||
image.bitmap.data,
|
||||
image.bitmap.width,
|
||||
image.bitmap.height,
|
||||
options,
|
||||
);
|
||||
return image;
|
||||
}
|
||||
|
||||
async function encodeImage({
|
||||
bitmap: bitmapIn,
|
||||
encName,
|
||||
encConfig,
|
||||
optimizerButteraugliTarget,
|
||||
maxOptimizerRounds,
|
||||
}) {
|
||||
let binary;
|
||||
let optionsUsed = encConfig;
|
||||
const encoder = await encoders[encName].enc();
|
||||
if (encConfig === 'auto') {
|
||||
const optionToOptimize = encoders[encName].autoOptimize.option;
|
||||
const decoder = await encoders[encName].dec();
|
||||
const encode = (bitmapIn, quality) =>
|
||||
encoder.encode(
|
||||
bitmapIn.data,
|
||||
bitmapIn.width,
|
||||
bitmapIn.height,
|
||||
Object.assign({}, encoders[encName].defaultEncoderOptions, {
|
||||
[optionToOptimize]: quality,
|
||||
}),
|
||||
);
|
||||
const decode = (binary) => decoder.decode(binary);
|
||||
const { binary: optimizedBinary, quality } = await autoOptimize(
|
||||
bitmapIn,
|
||||
encode,
|
||||
decode,
|
||||
{
|
||||
min: encoders[encName].autoOptimize.min,
|
||||
max: encoders[encName].autoOptimize.max,
|
||||
butteraugliDistanceGoal: optimizerButteraugliTarget,
|
||||
maxRounds: maxOptimizerRounds,
|
||||
},
|
||||
);
|
||||
binary = optimizedBinary;
|
||||
optionsUsed = {
|
||||
// 5 significant digits is enough
|
||||
[optionToOptimize]: Math.round(quality * 10000) / 10000,
|
||||
};
|
||||
} else {
|
||||
binary = encoder.encode(
|
||||
bitmapIn.data.buffer,
|
||||
bitmapIn.width,
|
||||
bitmapIn.height,
|
||||
encConfig,
|
||||
);
|
||||
}
|
||||
return {
|
||||
optionsUsed,
|
||||
binary,
|
||||
extension: encoders[encName].extension,
|
||||
size: binary.length,
|
||||
};
|
||||
}
|
||||
|
||||
// both decoding and encoding go through the worker pool
|
||||
function handleJob(params) {
|
||||
const { operation } = params;
|
||||
switch (operation) {
|
||||
case 'encode':
|
||||
return encodeImage(params);
|
||||
case 'decode':
|
||||
return decodeFile(params);
|
||||
case 'preprocess':
|
||||
return preprocessImage(params);
|
||||
default:
|
||||
throw Error(`Invalid job "${operation}"`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents an ingested image.
|
||||
*/
|
||||
class Image {
|
||||
constructor(workerPool, file) {
|
||||
this.workerPool = workerPool;
|
||||
this.decoded = workerPool.dispatchJob({ operation: 'decode', file });
|
||||
this.encodedWith = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Define one or several preprocessors to use on the image.
|
||||
* @param {object} preprocessOptions - An object with preprocessors to use, and their settings.
|
||||
* @returns {Promise<undefined>} - A promise that resolves when all preprocessors have completed their work.
|
||||
*/
|
||||
async preprocess(preprocessOptions = {}) {
|
||||
for (const [name, options] of Object.entries(preprocessOptions)) {
|
||||
if (!Object.keys(preprocessors).includes(name)) {
|
||||
throw Error(`Invalid preprocessor "${name}"`);
|
||||
}
|
||||
const preprocessorOptions = Object.assign(
|
||||
{},
|
||||
preprocessors[name].defaultOptions,
|
||||
options,
|
||||
);
|
||||
this.decoded = this.workerPool.dispatchJob({
|
||||
operation: 'preprocess',
|
||||
preprocessorName: name,
|
||||
image: await this.decoded,
|
||||
options: preprocessorOptions,
|
||||
});
|
||||
await this.decoded;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Define one or several encoders to use on the image.
|
||||
* @param {object} encodeOptions - An object with encoders to use, and their settings.
|
||||
* @returns {Promise<undefined>} - A promise that resolves when the image has been encoded with all the specified encoders.
|
||||
*/
|
||||
async encode(encodeOptions = {}) {
|
||||
const { bitmap } = await this.decoded;
|
||||
for (const [encName, options] of Object.entries(encodeOptions)) {
|
||||
if (!Object.keys(encoders).includes(encName)) {
|
||||
continue;
|
||||
}
|
||||
const encRef = encoders[encName];
|
||||
const encConfig =
|
||||
typeof options === 'string'
|
||||
? options
|
||||
: Object.assign({}, encRef.defaultEncoderOptions, options);
|
||||
this.encodedWith[encName] = this.workerPool.dispatchJob({
|
||||
operation: 'encode',
|
||||
bitmap,
|
||||
encName,
|
||||
encConfig,
|
||||
optimizerButteraugliTarget: Number(
|
||||
encodeOptions.optimizerButteraugliTarget,
|
||||
),
|
||||
maxOptimizerRounds: Number(encodeOptions.maxOptimizerRounds),
|
||||
});
|
||||
}
|
||||
await Promise.all(Object.values(this.encodedWith));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A pool where images can be ingested and squooshed.
|
||||
*/
|
||||
class ImagePool {
|
||||
/**
|
||||
* Create a new pool.
|
||||
* @param {number} [threads] - Number of concurrent image processes to run in the pool. Defaults to the number of CPU cores in the system.
|
||||
*/
|
||||
constructor(threads) {
|
||||
this.workerPool = new WorkerPool(threads || cpus().length, __filename);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ingest an image into the image pool.
|
||||
* @param {string | Buffer | URL | object} image - The image or path to the image that should be ingested and decoded.
|
||||
* @returns {Image} - A custom class reference to the decoded image.
|
||||
*/
|
||||
ingestImage(image) {
|
||||
return new Image(this.workerPool, image);
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the underlying image processing pipeline. The already processed images will still be there, but no new processing can start.
|
||||
* @returns {Promise<undefined>} - A promise that resolves when the underlying pipeline has closed.
|
||||
*/
|
||||
async close() {
|
||||
await this.workerPool.join();
|
||||
}
|
||||
}
|
||||
|
||||
if (!isMainThread) {
|
||||
WorkerPool.useThisThreadAsWorker(handleJob);
|
||||
}
|
||||
Reference in New Issue
Block a user