mirror of
https://github.com/GoogleChromeLabs/squoosh.git
synced 2025-11-12 00:37:19 +00:00
Add ImageQuant support
This commit is contained in:
@@ -26,14 +26,21 @@ const pngEncDecPromise = pngEncDec.default(
|
|||||||
fsp.readFile(pathify(pngEncDecWasm))
|
fsp.readFile(pathify(pngEncDecWasm))
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// OxiPNG
|
||||||
import * as oxipng from "../../codecs/oxipng/pkg/squoosh_oxipng.js";
|
import * as oxipng from "../../codecs/oxipng/pkg/squoosh_oxipng.js";
|
||||||
import oxipngWasm from "asset-url:../../codecs/oxipng/pkg/squoosh_oxipng_bg.wasm";
|
import oxipngWasm from "asset-url:../../codecs/oxipng/pkg/squoosh_oxipng_bg.wasm";
|
||||||
const oxipngPromise = oxipng.default(fsp.readFile(pathify(oxipngWasm)));
|
const oxipngPromise = oxipng.default(fsp.readFile(pathify(oxipngWasm)));
|
||||||
|
|
||||||
|
// Resize
|
||||||
import * as resize from "../../codecs/resize/pkg/squoosh_resize.js";
|
import * as resize from "../../codecs/resize/pkg/squoosh_resize.js";
|
||||||
import resizeWasm from "asset-url:../../codecs/resize/pkg/squoosh_resize_bg.wasm";
|
import resizeWasm from "asset-url:../../codecs/resize/pkg/squoosh_resize_bg.wasm";
|
||||||
const resizePromise = resize.default(fsp.readFile(pathify(resizeWasm)));
|
const resizePromise = resize.default(fsp.readFile(pathify(resizeWasm)));
|
||||||
|
|
||||||
|
// ImageQuant
|
||||||
|
import imageQuant from "../../codecs/imagequant/imagequant.js";
|
||||||
|
import imageQuantWasm from "asset-url:../../codecs/imagequant/imagequant.wasm";
|
||||||
|
const imageQuantPromise = instantiateEmscriptenWasm(imageQuant, imageQuantWasm);
|
||||||
|
|
||||||
// Our decoders currently rely on a `ImageData` global.
|
// Our decoders currently rely on a `ImageData` global.
|
||||||
import ImageData from "./image_data.js";
|
import ImageData from "./image_data.js";
|
||||||
globalThis.ImageData = ImageData;
|
globalThis.ImageData = ImageData;
|
||||||
@@ -114,12 +121,28 @@ export const preprocessors = {
|
|||||||
};
|
};
|
||||||
},
|
},
|
||||||
defaultOptions: {
|
defaultOptions: {
|
||||||
// This will be set to 'vector' if the input is SVG.
|
|
||||||
method: "lanczos3",
|
method: "lanczos3",
|
||||||
fitMethod: "stretch",
|
fitMethod: "stretch",
|
||||||
premultiply: true,
|
premultiply: true,
|
||||||
linearRGB: true
|
linearRGB: true
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
quant: {
|
||||||
|
name: "ImageQuant",
|
||||||
|
description: "Reduce the number of colors used (aka. paletting)",
|
||||||
|
instantiate: async () => {
|
||||||
|
const imageQuant = await imageQuantPromise;
|
||||||
|
return (buffer, width, height, { numColors, dither }) =>
|
||||||
|
new ImageData(
|
||||||
|
imageQuant.quantize(buffer, width, height, numColors, dither),
|
||||||
|
width,
|
||||||
|
height
|
||||||
|
);
|
||||||
|
},
|
||||||
|
defaultOptions: {
|
||||||
|
numColors: 255,
|
||||||
|
dither: 1.0
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -5,10 +5,10 @@ import { cpus } from "os";
|
|||||||
import { extname, join, basename } from "path";
|
import { extname, join, basename } from "path";
|
||||||
import { promises as fsp } from "fs";
|
import { promises as fsp } from "fs";
|
||||||
import { version } from "json:../package.json";
|
import { version } from "json:../package.json";
|
||||||
import ora from 'ora';
|
import ora from "ora";
|
||||||
import kleur from 'kleur';
|
import kleur from "kleur";
|
||||||
|
|
||||||
import {codecs as supportedFormats, preprocessors} from "./codecs.js";
|
import { codecs as supportedFormats, preprocessors } from "./codecs.js";
|
||||||
import WorkerPool from "./worker_pool.js";
|
import WorkerPool from "./worker_pool.js";
|
||||||
import { autoOptimize } from "./auto-optimizer.js";
|
import { autoOptimize } from "./auto-optimizer.js";
|
||||||
|
|
||||||
@@ -47,13 +47,14 @@ async function decodeFile(file) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
async function preprocessImage({
|
async function preprocessImage({ preprocessorName, options, file }) {
|
||||||
preprocessorName,
|
|
||||||
options,
|
|
||||||
file
|
|
||||||
}) {
|
|
||||||
const preprocessor = await preprocessors[preprocessorName].instantiate();
|
const preprocessor = await preprocessors[preprocessorName].instantiate();
|
||||||
file.bitmap= await preprocessor(file.bitmap.data, file.bitmap.width, file.bitmap.height, options);
|
file.bitmap = await preprocessor(
|
||||||
|
file.bitmap.data,
|
||||||
|
file.bitmap.width,
|
||||||
|
file.bitmap.height,
|
||||||
|
options
|
||||||
|
);
|
||||||
return file;
|
return file;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -120,7 +121,7 @@ async function encodeFile({
|
|||||||
// both decoding and encoding go through the worker pool
|
// both decoding and encoding go through the worker pool
|
||||||
function handleJob(params) {
|
function handleJob(params) {
|
||||||
const { operation } = params;
|
const { operation } = params;
|
||||||
switch(operation) {
|
switch (operation) {
|
||||||
case "encode":
|
case "encode":
|
||||||
return encodeFile(params);
|
return encodeFile(params);
|
||||||
case "decode":
|
case "decode":
|
||||||
@@ -138,37 +139,44 @@ function progressTracker(results) {
|
|||||||
tracker.spinner = spinner;
|
tracker.spinner = spinner;
|
||||||
tracker.progressOffset = 0;
|
tracker.progressOffset = 0;
|
||||||
tracker.totalOffset = 0;
|
tracker.totalOffset = 0;
|
||||||
let status = '';
|
let status = "";
|
||||||
tracker.setStatus = (text) => {
|
tracker.setStatus = text => {
|
||||||
status = text || '';
|
status = text || "";
|
||||||
update();
|
update();
|
||||||
};
|
};
|
||||||
let progress = '';
|
let progress = "";
|
||||||
tracker.setProgress = (done, total) => {
|
tracker.setProgress = (done, total) => {
|
||||||
spinner.prefixText = kleur.dim(`${done}/${total}`);
|
spinner.prefixText = kleur.dim(`${done}/${total}`);
|
||||||
const completeness = (tracker.progressOffset + done) / (tracker.totalOffset + total);
|
const completeness =
|
||||||
progress = kleur.cyan(`▐${'▨'.repeat(completeness*10|0).padEnd(10, '╌')}▌ `);
|
(tracker.progressOffset + done) / (tracker.totalOffset + total);
|
||||||
|
progress = kleur.cyan(
|
||||||
|
`▐${"▨".repeat((completeness * 10) | 0).padEnd(10, "╌")}▌ `
|
||||||
|
);
|
||||||
update();
|
update();
|
||||||
};
|
};
|
||||||
function update() {
|
function update() {
|
||||||
spinner.text = progress + kleur.bold(status) + getResultsText();
|
spinner.text = progress + kleur.bold(status) + getResultsText();
|
||||||
}
|
}
|
||||||
tracker.finish = (text) => {
|
tracker.finish = text => {
|
||||||
spinner.succeed(kleur.bold(text) + getResultsText());
|
spinner.succeed(kleur.bold(text) + getResultsText());
|
||||||
}
|
};
|
||||||
function getResultsText() {
|
function getResultsText() {
|
||||||
let out = '';
|
let out = "";
|
||||||
for (const [filename, result] of results.entries()) {
|
for (const [filename, result] of results.entries()) {
|
||||||
out += `\n ${kleur.cyan(filename)}: ${prettyPrintSize(result.size)}`;
|
out += `\n ${kleur.cyan(filename)}: ${prettyPrintSize(result.size)}`;
|
||||||
for (const { outputFile, outputSize, infoText } of result.outputs) {
|
for (const { outputFile, outputSize, infoText } of result.outputs) {
|
||||||
const name = (program.suffix + extname(outputFile)).padEnd(5);
|
const name = (program.suffix + extname(outputFile)).padEnd(5);
|
||||||
out += `\n ${kleur.dim('└')} ${kleur.cyan(name)} → ${prettyPrintSize(outputSize)}`;
|
out += `\n ${kleur.dim("└")} ${kleur.cyan(name)} → ${prettyPrintSize(
|
||||||
|
outputSize
|
||||||
|
)}`;
|
||||||
const percent = ((outputSize / result.size) * 100).toPrecision(3);
|
const percent = ((outputSize / result.size) * 100).toPrecision(3);
|
||||||
out += ` (${kleur[outputSize>result.size?'red':'green'](percent+'%')})`;
|
out += ` (${kleur[outputSize > result.size ? "red" : "green"](
|
||||||
|
percent + "%"
|
||||||
|
)})`;
|
||||||
if (infoText) out += kleur.yellow(infoText);
|
if (infoText) out += kleur.yellow(infoText);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return out || '\n';
|
return out || "\n";
|
||||||
}
|
}
|
||||||
spinner.start();
|
spinner.start();
|
||||||
return tracker;
|
return tracker;
|
||||||
@@ -180,7 +188,7 @@ async function processFiles(files) {
|
|||||||
const results = new Map();
|
const results = new Map();
|
||||||
const progress = progressTracker(results);
|
const progress = progressTracker(results);
|
||||||
|
|
||||||
progress.setStatus('Decoding...');
|
progress.setStatus("Decoding...");
|
||||||
progress.totalOffset = files.length;
|
progress.totalOffset = files.length;
|
||||||
progress.setProgress(0, files.length);
|
progress.setProgress(0, files.length);
|
||||||
|
|
||||||
@@ -189,8 +197,12 @@ async function processFiles(files) {
|
|||||||
await fsp.mkdir(program.outputDir, { recursive: true });
|
await fsp.mkdir(program.outputDir, { recursive: true });
|
||||||
|
|
||||||
let decoded = 0;
|
let decoded = 0;
|
||||||
let decodedFiles = await Promise.all(files.map(async file => {
|
let decodedFiles = await Promise.all(
|
||||||
const result = await workerPool.dispatchJob({ operation: 'decode', file });
|
files.map(async file => {
|
||||||
|
const result = await workerPool.dispatchJob({
|
||||||
|
operation: "decode",
|
||||||
|
file
|
||||||
|
});
|
||||||
results.set(file, {
|
results.set(file, {
|
||||||
file: result.file,
|
file: result.file,
|
||||||
size: result.size,
|
size: result.size,
|
||||||
@@ -198,10 +210,11 @@ async function processFiles(files) {
|
|||||||
});
|
});
|
||||||
progress.setProgress(++decoded, files.length);
|
progress.setProgress(++decoded, files.length);
|
||||||
return result;
|
return result;
|
||||||
}));
|
})
|
||||||
|
);
|
||||||
|
|
||||||
for (const [preprocessorName, value] of Object.entries(preprocessors)) {
|
for (const [preprocessorName, value] of Object.entries(preprocessors)) {
|
||||||
if(!program[preprocessorName]) {
|
if (!program[preprocessorName]) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
const preprocessorParam = program[preprocessorName];
|
const preprocessorParam = program[preprocessorName];
|
||||||
@@ -211,22 +224,23 @@ async function processFiles(files) {
|
|||||||
JSON5.parse(preprocessorParam)
|
JSON5.parse(preprocessorParam)
|
||||||
);
|
);
|
||||||
|
|
||||||
decodedFiles = await Promise.all(decodedFiles.map(async file => {
|
decodedFiles = await Promise.all(
|
||||||
|
decodedFiles.map(async file => {
|
||||||
return workerPool.dispatchJob({
|
return workerPool.dispatchJob({
|
||||||
file,
|
file,
|
||||||
operation: "preprocess",
|
operation: "preprocess",
|
||||||
preprocessorName,
|
preprocessorName,
|
||||||
options: preprocessorOptions
|
options: preprocessorOptions
|
||||||
});
|
});
|
||||||
}));
|
})
|
||||||
|
);
|
||||||
|
|
||||||
for (const { file, bitmap, size } of decodedFiles) {
|
for (const { file, bitmap, size } of decodedFiles) {
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
progress.progressOffset = decoded;
|
progress.progressOffset = decoded;
|
||||||
progress.setStatus('Encoding ' + kleur.dim(`(${parallelism} threads)`));
|
progress.setStatus("Encoding " + kleur.dim(`(${parallelism} threads)`));
|
||||||
progress.setProgress(0, files.length);
|
progress.setProgress(0, files.length);
|
||||||
|
|
||||||
const jobs = [];
|
const jobs = [];
|
||||||
@@ -254,17 +268,19 @@ async function processFiles(files) {
|
|||||||
jobsStarted++;
|
jobsStarted++;
|
||||||
const p = workerPool
|
const p = workerPool
|
||||||
.dispatchJob({
|
.dispatchJob({
|
||||||
operation: 'encode',
|
operation: "encode",
|
||||||
file,
|
file,
|
||||||
size,
|
size,
|
||||||
bitmap,
|
bitmap,
|
||||||
outputFile,
|
outputFile,
|
||||||
encName,
|
encName,
|
||||||
encConfig,
|
encConfig,
|
||||||
optimizerButteraugliTarget: Number(program.optimizerButteraugliTarget),
|
optimizerButteraugliTarget: Number(
|
||||||
|
program.optimizerButteraugliTarget
|
||||||
|
),
|
||||||
maxOptimizerRounds: Number(program.maxOptimizerRounds)
|
maxOptimizerRounds: Number(program.maxOptimizerRounds)
|
||||||
})
|
})
|
||||||
.then((output) => {
|
.then(output => {
|
||||||
jobsFinished++;
|
jobsFinished++;
|
||||||
results.get(file).outputs.push(output);
|
results.get(file).outputs.push(output);
|
||||||
progress.setProgress(jobsFinished, jobsStarted);
|
progress.setProgress(jobsFinished, jobsStarted);
|
||||||
@@ -278,7 +294,7 @@ async function processFiles(files) {
|
|||||||
// Wait for all jobs to finish
|
// Wait for all jobs to finish
|
||||||
await workerPool.join();
|
await workerPool.join();
|
||||||
await Promise.all(jobs);
|
await Promise.all(jobs);
|
||||||
progress.finish('Squoosh results:');
|
progress.finish("Squoosh results:");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isMainThread) {
|
if (isMainThread) {
|
||||||
@@ -291,21 +307,18 @@ if (isMainThread) {
|
|||||||
.option(
|
.option(
|
||||||
"--max-optimizer-rounds <rounds>",
|
"--max-optimizer-rounds <rounds>",
|
||||||
"Maximum number of compressions to use for auto optimizations",
|
"Maximum number of compressions to use for auto optimizations",
|
||||||
'6'
|
"6"
|
||||||
)
|
)
|
||||||
.option(
|
.option(
|
||||||
"--optimizer-butteraugli-target <butteraugli distance>",
|
"--optimizer-butteraugli-target <butteraugli distance>",
|
||||||
"Target Butteraugli distance for auto optimizer",
|
"Target Butteraugli distance for auto optimizer",
|
||||||
'1.4'
|
"1.4"
|
||||||
)
|
)
|
||||||
.action(processFiles);
|
.action(processFiles);
|
||||||
|
|
||||||
// Create a CLI option for each supported preprocessor
|
// Create a CLI option for each supported preprocessor
|
||||||
for (const [key, value] of Object.entries(preprocessors)) {
|
for (const [key, value] of Object.entries(preprocessors)) {
|
||||||
program.option(
|
program.option(`--${key} [config]`, value.description);
|
||||||
`--${key} [config]`,
|
|
||||||
value.description
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
// Create a CLI option for each supported encoder
|
// Create a CLI option for each supported encoder
|
||||||
for (const [key, value] of Object.entries(supportedFormats)) {
|
for (const [key, value] of Object.entries(supportedFormats)) {
|
||||||
|
|||||||
Reference in New Issue
Block a user