Introduce libSquoosh

This commit is contained in:
Surma
2021-05-19 12:15:00 +01:00
parent 25754b91b7
commit 392aced394
19 changed files with 86 additions and 100 deletions

12
cli/package-lock.json generated
View File

@@ -4,13 +4,6 @@
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"@squoosh/api": {
"version": "file:../api/squoosh-api-0.1.0.tgz",
"integrity": "sha512-fraw9j1Qq4MKhiA3VF+8djKcvgV42qCWaMQvLjfkn3r7jpFjAlHhoyHNpkfLDunKY3M55BHpBdn2/ozXZWt8kw==",
"requires": {
"web-streams-polyfill": "^3.0.3"
}
},
"ansi-regex": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz",
@@ -247,11 +240,6 @@
"requires": {
"defaults": "^1.0.3"
}
},
"web-streams-polyfill": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.0.3.tgz",
"integrity": "sha512-d2H/t0eqRNM4w2WvmTdoeIvzAUSpK7JmATB8Nr2lb7nQ9BTIJVjbQ/TRFVEh2gUH1HwclPdoPtfMoFfetXaZnA=="
}
}
}

View File

@@ -15,7 +15,7 @@
"author": "Google Chrome Developers <chromium-dev@google.com>",
"license": "Apache-2.0",
"dependencies": {
"@squoosh/api": "0.1.0",
"@squoosh/lib": "0.1.0",
"commander": "^7.2.0",
"json5": "^2.2.0",
"kleur": "^4.1.4",

View File

@@ -7,8 +7,7 @@ import { promises as fsp } from 'fs';
import ora from 'ora';
import kleur from 'kleur';
//Replace package name with '../../api/build/index.js' to test unpublished changes in the API
import { ImagePool, preprocessors, encoders } from '@squoosh/api';
import { ImagePool, preprocessors, encoders } from '@squoosh/lib';
function clamp(v, min, max) {
if (v < min) return min;
@@ -55,9 +54,9 @@ function progressTracker(results) {
for (const result of results.values()) {
out += `\n ${kleur.cyan(result.file)}: ${prettyPrintSize(result.size)}`;
for (const { outputFile, size: outputSize, infoText } of result.outputs) {
out += `\n ${kleur.dim('└')} ${kleur.cyan(outputFile.padEnd(5))}${prettyPrintSize(
outputSize,
)}`;
out += `\n ${kleur.dim('└')} ${kleur.cyan(
outputFile.padEnd(5),
)}${prettyPrintSize(outputSize)}`;
const percent = ((outputSize / result.size) * 100).toPrecision(3);
out += ` (${kleur[outputSize > result.size ? 'red' : 'green'](
percent + '%',
@@ -76,7 +75,7 @@ async function getInputFiles(paths) {
for (const inputPath of paths) {
const files = (await fsp.lstat(inputPath)).isDirectory()
? (await fsp.readdir(inputPath)).map(file => path.join(inputPath, file))
? (await fsp.readdir(inputPath)).map((file) => path.join(inputPath, file))
: [inputPath];
for (const file of files) {
try {
@@ -135,17 +134,21 @@ async function processFiles(files) {
if (!program.opts()[preprocessorName]) {
continue;
}
preprocessOptions[preprocessorName] = JSON5.parse(program.opts()[preprocessorName]);
preprocessOptions[preprocessorName] = JSON5.parse(
program.opts()[preprocessorName],
);
}
for(const image of decodedFiles){
for (const image of decodedFiles) {
image.preprocess(preprocessOptions);
}
await Promise.all(decodedFiles.map( (image) => image.decoded ));
await Promise.all(decodedFiles.map((image) => image.decoded));
progress.progressOffset = decoded;
progress.setStatus('Encoding ' + kleur.dim(`(${imagePool.workerPool.numWorkers} threads)`));
progress.setStatus(
'Encoding ' + kleur.dim(`(${imagePool.workerPool.numWorkers} threads)`),
);
progress.setProgress(0, files.length);
const jobs = [];
@@ -155,34 +158,37 @@ async function processFiles(files) {
const originalFile = results.get(image).file;
const encodeOptions = {
optimizerButteraugliTarget: Number(program.opts().optimizerButteraugliTarget),
optimizerButteraugliTarget: Number(
program.opts().optimizerButteraugliTarget,
),
maxOptimizerRounds: Number(program.opts().maxOptimizerRounds),
}
};
for (const encName of Object.keys(encoders)) {
if (!program.opts()[encName]) {
continue;
}
const encParam = program.opts()[encName];
const encConfig = encParam.toLowerCase() === 'auto' ? 'auto' : JSON5.parse(encParam);
const encConfig =
encParam.toLowerCase() === 'auto' ? 'auto' : JSON5.parse(encParam);
encodeOptions[encName] = encConfig;
}
jobsStarted++;
const job = image.encode(encodeOptions)
.then(async () => {
jobsFinished++;
const outputPath = path.join(program.opts().outputDir, program.opts().suffix + path.basename(originalFile, path.extname(originalFile)));
for(const output of Object.values(image.encodedWith)){
const outputFile = `${outputPath}.${(await output).extension}`;
await fsp.writeFile(outputFile, (await output).binary);
results.get(image).outputs.push(
Object.assign(
await output,
{outputFile},
)
);
}
progress.setProgress(jobsFinished, jobsStarted);
});
const job = image.encode(encodeOptions).then(async () => {
jobsFinished++;
const outputPath = path.join(
program.opts().outputDir,
program.opts().suffix +
path.basename(originalFile, path.extname(originalFile)),
);
for (const output of Object.values(image.encodedWith)) {
const outputFile = `${outputPath}.${(await output).extension}`;
await fsp.writeFile(outputFile, (await output).binary);
results
.get(image)
.outputs.push(Object.assign(await output, { outputFile }));
}
progress.setProgress(jobsFinished, jobsStarted);
});
jobs.push(job);
}
@@ -194,8 +200,6 @@ async function processFiles(files) {
progress.finish('Squoosh results:');
}
program
.name('squoosh-cli')
.arguments('<files...>')
@@ -226,5 +230,3 @@ for (const [key, value] of Object.entries(encoders)) {
}
program.parse(process.argv);

View File

@@ -30,7 +30,7 @@ const imagePath = 'path/to/image.png';
const image = imagePool.ingestImage(imagePath);
```
These `ingestImage` function can take anything the node [`readFile`][readFile] function can take, uncluding a buffer and `FileHandle`.
These `ingestImage` function can take anything the node [`readFile`][readfile] function can take, uncluding a buffer and `FileHandle`.
The returned `image` object is a representation of the original image, that you can now preprocess, encode, and extract information about.
@@ -91,12 +91,14 @@ This example iterates through all encoded versions of the image and writes them
```js
const newImagePath = '/path/to/image.'; //extension is added automatically
for(const encodedImage of Object.values(image.encodedWith)){
fs.writeFile(newImagePath + (await encodedImage).extension, (await encodedImage).binary);
for (const encodedImage of Object.values(image.encodedWith)) {
fs.writeFile(
newImagePath + (await encodedImage).extension,
(await encodedImage).binary,
);
}
```
## Extracting image information
Information about a decoded image is available at `Image.decoded`. It looks something like this:
@@ -121,7 +123,6 @@ console.log(await image.decoded);
Information about an encoded image can be found at `Image.encodedWith[encoderName]`. It looks something like this:
```js
console.log(await image.encodedWith.jxl);
// Returns:
@@ -159,4 +160,4 @@ const encodeOptions: {
[squoosh]: https://squoosh.app
[codecs.js]: https://github.com/GoogleChromeLabs/squoosh/blob/dev/cli/src/codecs.js
[butteraugli]: https://github.com/google/butteraugli
[readFile]: https://nodejs.org/api/fs.html#fs_fspromises_readfile_path_options
[readfile]: https://nodejs.org/api/fs.html#fs_fspromises_readfile_path_options

View File

@@ -10,11 +10,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { promises as fs } from "fs";
import { basename } from "path";
import { promises as fs } from 'fs';
import { basename } from 'path';
const defaultOpts = {
prefix: "asset-url"
prefix: 'asset-url',
};
export default function assetPlugin(opts) {
@@ -23,16 +23,16 @@ export default function assetPlugin(opts) {
/** @type {Map<string, Buffer>} */
let assetIdToSourceBuffer;
const prefix = opts.prefix + ":";
const prefix = opts.prefix + ':';
return {
name: "asset-plugin",
name: 'asset-plugin',
buildStart() {
assetIdToSourceBuffer = new Map();
},
augmentChunkHash(info) {
// Get the sources for all assets imported by this chunk.
const buffers = Object.keys(info.modules)
.map(moduleId => assetIdToSourceBuffer.get(moduleId))
.map((moduleId) => assetIdToSourceBuffer.get(moduleId))
.filter(Boolean);
if (buffers.length === 0) return;
@@ -56,20 +56,20 @@ export default function assetPlugin(opts) {
throw Error(`Cannot find ${realId}`);
}
// Add an additional .js to the end so it ends up with .js at the end in the _virtual folder.
return prefix + resolveResult.id + ".js";
return prefix + resolveResult.id + '.js';
},
async load(id) {
if (!id.startsWith(prefix)) return;
const realId = id.slice(prefix.length, -".js".length);
const realId = id.slice(prefix.length, -'.js'.length);
const source = await fs.readFile(realId);
assetIdToSourceBuffer.set(id, source);
this.addWatchFile(realId);
return `export default import.meta.ROLLUP_FILE_URL_${this.emitFile({
type: "asset",
type: 'asset',
source,
name: basename(realId)
name: basename(realId),
})}`;
}
},
};
}

View File

@@ -5,8 +5,8 @@ export default function autojsonPlugin() {
name: 'autojson-plugin',
async load(id) {
if (id.endsWith('.json') && !id.startsWith('json:')) {
return 'export default ' + await fsp.readFile(id, 'utf8');
return 'export default ' + (await fsp.readFile(id, 'utf8'));
}
}
},
};
};
}

View File

@@ -1,5 +1,5 @@
{
"name": "@squoosh/api",
"name": "@squoosh/lib",
"version": "0.1.0",
"lockfileVersion": 1,
"requires": true,

View File

@@ -1,7 +1,7 @@
{
"name": "@squoosh/api",
"name": "@squoosh/lib",
"version": "0.1.0",
"description": "An API for Squoosh",
"description": "A Node library for Squoosh",
"public": true,
"main": "/build/index.js",
"files": [

View File

@@ -1,7 +1,7 @@
import { instantiateEmscriptenWasm } from "./emscripten-utils.js";
import { instantiateEmscriptenWasm } from './emscripten-utils.js';
import visdif from "../../codecs/visdif/visdif.js";
import visdifWasm from "asset-url:../../codecs/visdif/visdif.wasm";
import visdif from '../../codecs/visdif/visdif.js';
import visdifWasm from 'asset-url:../../codecs/visdif/visdif.wasm';
// `measure` is a (async) function that takes exactly one numeric parameter and
// returns a value. The function is assumed to be monotonic (an increase in `parameter`
@@ -11,7 +11,7 @@ import visdifWasm from "asset-url:../../codecs/visdif/visdif.wasm";
export async function binarySearch(
measureGoal,
measure,
{ min = 0, max = 100, epsilon = 0.1, maxRounds = 8 } = {}
{ min = 0, max = 100, epsilon = 0.1, maxRounds = 8 } = {},
) {
let parameter = (max - min) / 2 + min;
let delta = (max - min) / 4;
@@ -36,14 +36,14 @@ export async function autoOptimize(
bitmapIn,
encode,
decode,
{ butteraugliDistanceGoal = 1.4, ...otherOpts } = {}
{ butteraugliDistanceGoal = 1.4, ...otherOpts } = {},
) {
const { VisDiff } = await instantiateEmscriptenWasm(visdif, visdifWasm);
const comparator = new VisDiff(
bitmapIn.data,
bitmapIn.width,
bitmapIn.height
bitmapIn.height,
);
let bitmapOut;
@@ -53,18 +53,18 @@ export async function autoOptimize(
// increase the metric value. So multipliy Butteraugli values by -1.
const { parameter } = await binarySearch(
-1 * butteraugliDistanceGoal,
async quality => {
async (quality) => {
binaryOut = await encode(bitmapIn, quality);
bitmapOut = await decode(binaryOut);
return -1 * comparator.distance(bitmapOut.data);
},
otherOpts
otherOpts,
);
comparator.delete();
return {
bitmap: bitmapOut,
binary: binaryOut,
quality: parameter
quality: parameter,
};
}

View File

@@ -344,7 +344,11 @@ export const codecs = {
await oxipngPromise;
return {
encode: (buffer, width, height, opts) => {
const simplePng = pngEncDec.encode(new Uint8Array(buffer), width, height);
const simplePng = pngEncDec.encode(
new Uint8Array(buffer),
width,
height,
);
return oxipng.optimise(simplePng, opts.level);
},
};

View File

@@ -2,12 +2,11 @@ import { isMainThread } from 'worker_threads';
import { cpus } from 'os';
import { promises as fsp } from 'fs';
import { codecs as encoders, preprocessors} from './codecs.js';
import { codecs as encoders, preprocessors } from './codecs.js';
import WorkerPool from './worker_pool.js';
import { autoOptimize } from './auto-optimizer.js';
export { ImagePool, encoders, preprocessors};
export { ImagePool, encoders, preprocessors };
async function decodeFile({ file }) {
const buffer = await fsp.readFile(file);
@@ -21,9 +20,7 @@ async function decodeFile({ file }) {
if (!key) {
throw Error(`${file} has an unsupported format`);
}
const rgba = (await encoders[key].dec()).decode(
new Uint8Array(buffer),
);
const rgba = (await encoders[key].dec()).decode(new Uint8Array(buffer));
return {
bitmap: rgba,
size: buffer.length,
@@ -115,9 +112,9 @@ function handleJob(params) {
* Represents an ingested image.
*/
class Image {
constructor (workerPool, file) {
constructor(workerPool, file) {
this.workerPool = workerPool;
this.decoded = workerPool.dispatchJob({operation: 'decode', file});
this.decoded = workerPool.dispatchJob({ operation: 'decode', file });
this.encodedWith = {};
}
@@ -126,7 +123,7 @@ class Image {
* @param {object} preprocessOptions - An object with preprocessors to use, and their settings.
* @returns {Promise<undefined>} - A promise that resolves when all preprocessors have completed their work.
*/
async preprocess (preprocessOptions = {}) {
async preprocess(preprocessOptions = {}) {
for (const [name, options] of Object.entries(preprocessOptions)) {
if (!Object.keys(preprocessors).includes(name)) {
throw Error(`Invalid preprocessor "${name}"`);
@@ -151,7 +148,7 @@ class Image {
* @param {object} encodeOptions - An object with encoders to use, and their settings.
* @returns {Promise<undefined>} - A promise that resolves when the image has been encoded with all the specified encoders.
*/
async encode (encodeOptions = {}){
async encode(encodeOptions = {}) {
const { bitmap } = await this.decoded;
for (const [encName, options] of Object.entries(encodeOptions)) {
if (!Object.keys(encoders).includes(encName)) {
@@ -161,11 +158,7 @@ class Image {
const encConfig =
typeof options === 'string'
? options
: Object.assign(
{},
encRef.defaultEncoderOptions,
options,
);
: Object.assign({}, encRef.defaultEncoderOptions, options);
this.encodedWith[encName] = this.workerPool.dispatchJob({
operation: 'encode',
bitmap,
@@ -174,9 +167,7 @@ class Image {
optimizerButteraugliTarget: Number(
encodeOptions.optimizerButteraugliTarget,
),
maxOptimizerRounds: Number(
encodeOptions.maxOptimizerRounds
),
maxOptimizerRounds: Number(encodeOptions.maxOptimizerRounds),
});
}
await Promise.all(Object.values(this.encodedWith));
@@ -191,7 +182,7 @@ class ImagePool {
* Create a new pool.
* @param {number} [threads] - Number of concurrent image processes to run in the pool. Defaults to the number of CPU cores in the system.
*/
constructor (threads) {
constructor(threads) {
this.workerPool = new WorkerPool(threads || cpus().length, __filename);
}
@@ -200,7 +191,7 @@ class ImagePool {
* @param {string | Buffer | URL | object} image - The image or path to the image that should be ingested and decoded.
* @returns {Image} - A custom class reference to the decoded image.
*/
ingestImage (image) {
ingestImage(image) {
return new Image(this.workerPool, image);
}
@@ -208,7 +199,7 @@ class ImagePool {
* Closes the underlying image processing pipeline. The already processed images will still be there, but no new processing can start.
* @returns {Promise<undefined>} - A promise that resolves when the underlying pipeline has closed.
*/
async close () {
async close() {
await this.workerPool.join();
}
}