mirror of
https://github.com/GoogleChromeLabs/squoosh.git
synced 2025-11-12 00:37:19 +00:00
Use workers for parallelization
This commit is contained in:
79
cli/codecs.js
Normal file
79
cli/codecs.js
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
|
||||||
|
module.exports = {
|
||||||
|
mozjpeg: {
|
||||||
|
name: "MozJPEG",
|
||||||
|
extension: "jpg",
|
||||||
|
detectors: [/^\xFF\xD8\xFF/],
|
||||||
|
dec: require("../codecs/mozjpeg/dec/mozjpeg_dec.js"),
|
||||||
|
enc: require("../codecs/mozjpeg/enc/mozjpeg_enc.js"),
|
||||||
|
defaultEncoderOptions: {
|
||||||
|
quality: 75,
|
||||||
|
baseline: false,
|
||||||
|
arithmetic: false,
|
||||||
|
progressive: true,
|
||||||
|
optimize_coding: true,
|
||||||
|
smoothing: 0,
|
||||||
|
color_space: 3 /*YCbCr*/,
|
||||||
|
quant_table: 3,
|
||||||
|
trellis_multipass: false,
|
||||||
|
trellis_opt_zero: false,
|
||||||
|
trellis_opt_table: false,
|
||||||
|
trellis_loops: 1,
|
||||||
|
auto_subsample: true,
|
||||||
|
chroma_subsample: 2,
|
||||||
|
separate_chroma_quality: false,
|
||||||
|
chroma_quality: 75
|
||||||
|
}
|
||||||
|
},
|
||||||
|
webp: {
|
||||||
|
name: "WebP",
|
||||||
|
extension: "webp",
|
||||||
|
detectors: [/^RIFF....WEBPVP8[LX ]/],
|
||||||
|
dec: require("../codecs/webp/dec/webp_dec.js"),
|
||||||
|
enc: require("../codecs/webp/enc/webp_enc.js"),
|
||||||
|
defaultEncoderOptions: {
|
||||||
|
quality: 75,
|
||||||
|
target_size: 0,
|
||||||
|
target_PSNR: 0,
|
||||||
|
method: 4,
|
||||||
|
sns_strength: 50,
|
||||||
|
filter_strength: 60,
|
||||||
|
filter_sharpness: 0,
|
||||||
|
filter_type: 1,
|
||||||
|
partitions: 0,
|
||||||
|
segments: 4,
|
||||||
|
pass: 1,
|
||||||
|
show_compressed: 0,
|
||||||
|
preprocessing: 0,
|
||||||
|
autofilter: 0,
|
||||||
|
partition_limit: 0,
|
||||||
|
alpha_compression: 1,
|
||||||
|
alpha_filtering: 1,
|
||||||
|
alpha_quality: 100,
|
||||||
|
lossless: 0,
|
||||||
|
exact: 0,
|
||||||
|
image_hint: 0,
|
||||||
|
emulate_jpeg_size: 0,
|
||||||
|
thread_level: 0,
|
||||||
|
low_memory: 0,
|
||||||
|
near_lossless: 100,
|
||||||
|
use_delta_palette: 0,
|
||||||
|
use_sharp_yuv: 0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
avif: {
|
||||||
|
name: "AVIF",
|
||||||
|
extension: "avif",
|
||||||
|
detectors: [/^\x00\x00\x00 ftypavif\x00\x00\x00\x00/],
|
||||||
|
dec: require("../codecs/avif/dec/avif_dec.js"),
|
||||||
|
enc: require("../codecs/avif/enc/avif_enc.js"),
|
||||||
|
defaultEncoderOptions: {
|
||||||
|
minQuantizer: 16,
|
||||||
|
maxQuantizer: 16,
|
||||||
|
tileColsLog2: 0,
|
||||||
|
tileRowsLog2: 0,
|
||||||
|
speed: 10,
|
||||||
|
subsample: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
210
cli/index.js
210
cli/index.js
@@ -1,93 +1,21 @@
|
|||||||
const { program } = require("commander");
|
const { program } = require("commander");
|
||||||
const JSON5 = require("json5");
|
const JSON5 = require("json5");
|
||||||
//const { Worker, isMainThread, parentPort } = require('worker_threads');
|
const {
|
||||||
//const {cpus} = require("os");
|
threadId,
|
||||||
|
Worker,
|
||||||
|
isMainThread,
|
||||||
|
parentPort
|
||||||
|
} = require("worker_threads");
|
||||||
|
const { cpus } = require("os");
|
||||||
const path = require("path");
|
const path = require("path");
|
||||||
const fsp = require("fs").promises;
|
const fsp = require("fs").promises;
|
||||||
|
|
||||||
const visdifModule = require("../codecs/visdif/visdif.js");
|
const visdifModule = require("../codecs/visdif/visdif.js");
|
||||||
|
const supportedFormats = require("./codecs.js");
|
||||||
|
|
||||||
// Our decoders currently rely on this.
|
// Our decoders currently rely on a `ImageData` global.
|
||||||
globalThis.ImageData = require("./image_data.js");
|
globalThis.ImageData = require("./image_data.js");
|
||||||
|
|
||||||
const supportedFormats = {
|
|
||||||
mozjpeg: {
|
|
||||||
name: "MozJPEG",
|
|
||||||
extension: "jpg",
|
|
||||||
detectors: [/^\xFF\xD8\xFF/],
|
|
||||||
dec: require("../codecs/mozjpeg/dec/mozjpeg_dec.js"),
|
|
||||||
enc: require("../codecs/mozjpeg/enc/mozjpeg_enc.js"),
|
|
||||||
defaultEncoderOptions: {
|
|
||||||
quality: 75,
|
|
||||||
baseline: false,
|
|
||||||
arithmetic: false,
|
|
||||||
progressive: true,
|
|
||||||
optimize_coding: true,
|
|
||||||
smoothing: 0,
|
|
||||||
color_space: 3 /*YCbCr*/,
|
|
||||||
quant_table: 3,
|
|
||||||
trellis_multipass: false,
|
|
||||||
trellis_opt_zero: false,
|
|
||||||
trellis_opt_table: false,
|
|
||||||
trellis_loops: 1,
|
|
||||||
auto_subsample: true,
|
|
||||||
chroma_subsample: 2,
|
|
||||||
separate_chroma_quality: false,
|
|
||||||
chroma_quality: 75
|
|
||||||
}
|
|
||||||
},
|
|
||||||
webp: {
|
|
||||||
name: "WebP",
|
|
||||||
extension: "webp",
|
|
||||||
detectors: [/^RIFF....WEBPVP8[LX ]/],
|
|
||||||
dec: require("../codecs/webp/dec/webp_dec.js"),
|
|
||||||
enc: require("../codecs/webp/enc/webp_enc.js"),
|
|
||||||
defaultEncoderOptions: {
|
|
||||||
quality: 75,
|
|
||||||
target_size: 0,
|
|
||||||
target_PSNR: 0,
|
|
||||||
method: 4,
|
|
||||||
sns_strength: 50,
|
|
||||||
filter_strength: 60,
|
|
||||||
filter_sharpness: 0,
|
|
||||||
filter_type: 1,
|
|
||||||
partitions: 0,
|
|
||||||
segments: 4,
|
|
||||||
pass: 1,
|
|
||||||
show_compressed: 0,
|
|
||||||
preprocessing: 0,
|
|
||||||
autofilter: 0,
|
|
||||||
partition_limit: 0,
|
|
||||||
alpha_compression: 1,
|
|
||||||
alpha_filtering: 1,
|
|
||||||
alpha_quality: 100,
|
|
||||||
lossless: 0,
|
|
||||||
exact: 0,
|
|
||||||
image_hint: 0,
|
|
||||||
emulate_jpeg_size: 0,
|
|
||||||
thread_level: 0,
|
|
||||||
low_memory: 0,
|
|
||||||
near_lossless: 100,
|
|
||||||
use_delta_palette: 0,
|
|
||||||
use_sharp_yuv: 0
|
|
||||||
}
|
|
||||||
},
|
|
||||||
avif: {
|
|
||||||
name: "AVIF",
|
|
||||||
extension: "avif",
|
|
||||||
detectors: [/^\x00\x00\x00 ftypavif\x00\x00\x00\x00/],
|
|
||||||
dec: require("../codecs/avif/dec/avif_dec.js"),
|
|
||||||
enc: require("../codecs/avif/enc/avif_enc.js"),
|
|
||||||
defaultEncoderOptions: {
|
|
||||||
minQuantizer: 16,
|
|
||||||
maxQuantizer: 16,
|
|
||||||
tileColsLog2: 0,
|
|
||||||
tileRowsLog2: 0,
|
|
||||||
speed: 10,
|
|
||||||
subsample: 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
async function decodeFile(file) {
|
async function decodeFile(file) {
|
||||||
const buffer = await fsp.readFile(file);
|
const buffer = await fsp.readFile(file);
|
||||||
const firstChunk = buffer.slice(0, 16);
|
const firstChunk = buffer.slice(0, 16);
|
||||||
@@ -106,6 +34,30 @@ async function decodeFile(file) {
|
|||||||
return rgba;
|
return rgba;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function uuid() {
|
||||||
|
return Array.from({ length: 16 }, () =>
|
||||||
|
Math.floor(Math.random() * 256).toString(16)
|
||||||
|
).join("");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Adds a unique ID to the message payload and waits
|
||||||
|
// for the worker to respond with that id as a signal
|
||||||
|
// that the job is done.
|
||||||
|
function jobPromise(worker, msg) {
|
||||||
|
return new Promise(resolve => {
|
||||||
|
const id = uuid();
|
||||||
|
worker.postMessage(Object.assign(msg, { id }));
|
||||||
|
worker.on("message", function f(msg) {
|
||||||
|
if (msg.id !== id) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
worker.off("message", f);
|
||||||
|
resolve(msg);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
const butteraugliGoal = 1.4;
|
const butteraugliGoal = 1.4;
|
||||||
const maxRounds = 8;
|
const maxRounds = 8;
|
||||||
async function optimize(bitmapIn, encode, decode) {
|
async function optimize(bitmapIn, encode, decode) {
|
||||||
@@ -153,63 +105,81 @@ async function optimize(bitmapIn, encode, decode) {
|
|||||||
quality,
|
quality,
|
||||||
attempts
|
attempts
|
||||||
};
|
};
|
||||||
}
|
}*/
|
||||||
|
|
||||||
//if(isMainThread) {
|
async function processFiles(files) {
|
||||||
program
|
|
||||||
.version(require("./package.json").version)
|
|
||||||
.arguments("<files...>")
|
|
||||||
.option("-d, --output-dir <dir>", "Output directory", ".");
|
|
||||||
|
|
||||||
// Create a CLI option for each supported encoder
|
|
||||||
for (const [key, value] of Object.entries(supportedFormats)) {
|
|
||||||
program.option(
|
|
||||||
`--${key} [config]`,
|
|
||||||
`Use ${value.name} to generate a .${value.extension} file with the given configuration`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
program.action(async files => {
|
|
||||||
// Create output directory
|
// Create output directory
|
||||||
await fsp.mkdir(program.outputDir, { recursive: true });
|
await fsp.mkdir(program.outputDir, { recursive: true });
|
||||||
//const pool = Array.from({length: cpus().length}, () => new Worker(process.argv[1]));
|
const pool = Array.from(
|
||||||
|
{ length: cpus().length },
|
||||||
|
() => new Worker(process.argv[1])
|
||||||
|
);
|
||||||
let i = 0;
|
let i = 0;
|
||||||
|
const jobs = [];
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
const ext = path.extname(file);
|
const ext = path.extname(file);
|
||||||
const base = path.basename(file, ext);
|
const base = path.basename(file, ext);
|
||||||
const bitmapIn = await decodeFile(file);
|
const bitmap = await decodeFile(file);
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(supportedFormats)) {
|
for (const [encName, value] of Object.entries(supportedFormats)) {
|
||||||
if (!program[key]) {
|
if (!program[encName]) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
const encConfig = Object.assign(
|
const encConfig = Object.assign(
|
||||||
{},
|
{},
|
||||||
value.defaultEncoderOptions,
|
value.defaultEncoderOptions,
|
||||||
JSON5.parse(program[key])
|
JSON5.parse(program[encName])
|
||||||
);
|
|
||||||
const encoder = await value.enc();
|
|
||||||
const out = encoder.encode(
|
|
||||||
bitmapIn.data.buffer,
|
|
||||||
bitmapIn.width,
|
|
||||||
bitmapIn.height,
|
|
||||||
encConfig
|
|
||||||
);
|
);
|
||||||
const outputFile = path.join(
|
const outputFile = path.join(
|
||||||
program.outputDir,
|
program.outputDir,
|
||||||
`${base}.${value.extension}`
|
`${base}.${value.extension}`
|
||||||
);
|
);
|
||||||
await fsp.writeFile(outputFile, out);
|
jobs.push(
|
||||||
|
jobPromise(pool[i], {
|
||||||
|
bitmap,
|
||||||
|
outputFile,
|
||||||
|
encName,
|
||||||
|
encConfig
|
||||||
|
})
|
||||||
|
);
|
||||||
|
i = (i + 1) % pool.length;
|
||||||
}
|
}
|
||||||
// pool[i].postMessage({
|
|
||||||
// inFile: file,
|
|
||||||
// outFile: path.join(program.outputDir, base,kkkkk
|
|
||||||
// })
|
|
||||||
}
|
}
|
||||||
});
|
// Wait for all jobs to finish
|
||||||
|
await Promise.allSettled(jobs);
|
||||||
|
pool.forEach(worker => worker.terminate());
|
||||||
|
}
|
||||||
|
|
||||||
program.parse(process.argv);
|
if (isMainThread) {
|
||||||
//} else {
|
program
|
||||||
// parentPort.on("message", async ({inFile, outFile, encoder, config}) => {
|
.version(require("./package.json").version)
|
||||||
// });
|
.arguments("<files...>")
|
||||||
//}
|
.option("-d, --output-dir <dir>", "Output directory", ".")
|
||||||
|
.action(processFiles);
|
||||||
|
|
||||||
|
// Create a CLI option for each supported encoder
|
||||||
|
for (const [key, value] of Object.entries(supportedFormats)) {
|
||||||
|
program.option(
|
||||||
|
`--${key} [config]`,
|
||||||
|
`Use ${value.name} to generate a .${value.extension} file with the given configuration`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
program.parse(process.argv);
|
||||||
|
} else {
|
||||||
|
parentPort.on(
|
||||||
|
"message",
|
||||||
|
async ({ id, bitmap, outputFile, encName, encConfig, done }) => {
|
||||||
|
const encoder = await supportedFormats[encName].enc();
|
||||||
|
const out = encoder.encode(
|
||||||
|
bitmap.data.buffer,
|
||||||
|
bitmap.width,
|
||||||
|
bitmap.height,
|
||||||
|
encConfig
|
||||||
|
);
|
||||||
|
await fsp.writeFile(outputFile, out);
|
||||||
|
// Signal we are done with this job
|
||||||
|
parentPort.postMessage({ id });
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user