forked from external-repos/squoosh
Compare commits
33 Commits
one-pass-p
...
web-codecs
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
128096afd9 | ||
|
|
58661078e2 | ||
|
|
cbfa503fcb | ||
|
|
96b6dc8e6e | ||
|
|
4033d1c965 | ||
|
|
f779e13bc8 | ||
|
|
2f00fe2b1b | ||
|
|
118885cd26 | ||
|
|
af80643809 | ||
|
|
1aba7b51ee | ||
|
|
b0a7b21b0b | ||
|
|
8dfe35aa77 | ||
|
|
1a891072c0 | ||
|
|
720cb98872 | ||
|
|
4e1dcb819c | ||
|
|
c36f4bebb8 | ||
|
|
c04bb54f0d | ||
|
|
4890c56abb | ||
|
|
392aced394 | ||
|
|
8bcaeb2f78 | ||
|
|
c417bd0a7a | ||
|
|
eb8204d69b | ||
|
|
25754b91b7 | ||
|
|
875c24525b | ||
|
|
50ed5febd3 | ||
|
|
10c5082499 | ||
|
|
f0fb891498 | ||
|
|
b9b6e57581 | ||
|
|
ff9dea465f | ||
|
|
912c1fac08 | ||
|
|
ad0d46de3e | ||
|
|
8ed50d8f0c | ||
|
|
b50402e3b3 |
@@ -3,9 +3,9 @@
|
||||
[Squoosh] is an image compression web app that allows you to dive into the advanced options provided
|
||||
by various image compressors.
|
||||
|
||||
# CLI
|
||||
# API & CLI
|
||||
|
||||
[Squoosh now has a CLI](https://github.com/GoogleChromeLabs/squoosh/tree/dev/cli) that allows you to compress many images at once.
|
||||
Squoosh now has [an API](https://github.com/GoogleChromeLabs/squoosh/tree/dev/libsquoosh) and [a CLI](https://github.com/GoogleChromeLabs/squoosh/tree/dev/cli) that allows you to compress many images at once.
|
||||
|
||||
# Privacy
|
||||
|
||||
|
||||
@@ -55,5 +55,5 @@ $ npx @squoosh/cli --wp2 auto test.png
|
||||
```
|
||||
|
||||
[squoosh]: https://squoosh.app
|
||||
[codecs.js]: https://github.com/GoogleChromeLabs/squoosh/blob/dev/cli/src/codecs.js
|
||||
[codecs.js]: https://github.com/GoogleChromeLabs/squoosh/blob/dev/libsquoosh/src/codecs.js
|
||||
[butteraugli]: https://github.com/google/butteraugli
|
||||
|
||||
2445
cli/package-lock.json
generated
2445
cli/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,32 +1,24 @@
|
||||
{
|
||||
"name": "@squoosh/cli",
|
||||
"version": "0.6.0",
|
||||
"version": "0.7.0",
|
||||
"description": "A CLI for Squoosh",
|
||||
"public": true,
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"squoosh-cli": "build/index.js",
|
||||
"@squoosh/cli": "build/index.js"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "rollup -c"
|
||||
"squoosh-cli": "src/index.js",
|
||||
"@squoosh/cli": "src/index.js"
|
||||
},
|
||||
"files": [
|
||||
"/src/index.js"
|
||||
],
|
||||
"keywords": [],
|
||||
"author": "Google Chrome Developers <chromium-dev@google.com>",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"web-streams-polyfill": "^3.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.11.6",
|
||||
"@babel/preset-env": "^7.11.5",
|
||||
"@rollup/plugin-babel": "^5.2.1",
|
||||
"@rollup/plugin-commonjs": "^15.0.0",
|
||||
"@rollup/plugin-node-resolve": "^9.0.0",
|
||||
"commander": "^6.0.0",
|
||||
"json5": "^2.1.3",
|
||||
"kleur": "^4.1.3",
|
||||
"ora": "^5.1.0",
|
||||
"rollup": "^2.26.11",
|
||||
"rollup-plugin-terser": "^7.0.2"
|
||||
"@squoosh/lib": "^0.2.0",
|
||||
"commander": "^7.2.0",
|
||||
"json5": "^2.2.0",
|
||||
"kleur": "^4.1.4",
|
||||
"ora": "^5.4.0"
|
||||
}
|
||||
}
|
||||
|
||||
326
cli/src/index.js
Normal file → Executable file
326
cli/src/index.js
Normal file → Executable file
@@ -1,17 +1,13 @@
|
||||
import { program } from 'commander';
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { program } from 'commander/esm.mjs';
|
||||
import JSON5 from 'json5';
|
||||
import { isMainThread } from 'worker_threads';
|
||||
import { cpus } from 'os';
|
||||
import { extname, join, basename } from 'path';
|
||||
import path from 'path';
|
||||
import { promises as fsp } from 'fs';
|
||||
import { resolve as resolvePath } from 'path';
|
||||
import { version } from 'json:../package.json';
|
||||
import ora from 'ora';
|
||||
import kleur from 'kleur';
|
||||
|
||||
import { codecs as supportedFormats, preprocessors } from './codecs.js';
|
||||
import WorkerPool from './worker_pool.js';
|
||||
import { autoOptimize } from './auto-optimizer.js';
|
||||
import { ImagePool, preprocessors, encoders } from '@squoosh/lib';
|
||||
|
||||
function clamp(v, min, max) {
|
||||
if (v < min) return min;
|
||||
@@ -26,114 +22,6 @@ function prettyPrintSize(size) {
|
||||
return (size / 2 ** (10 * index)).toFixed(2) + suffix[index];
|
||||
}
|
||||
|
||||
async function decodeFile(file) {
|
||||
const buffer = await fsp.readFile(file);
|
||||
const firstChunk = buffer.slice(0, 16);
|
||||
const firstChunkString = Array.from(firstChunk)
|
||||
.map((v) => String.fromCodePoint(v))
|
||||
.join('');
|
||||
const key = Object.entries(supportedFormats).find(([name, { detectors }]) =>
|
||||
detectors.some((detector) => detector.exec(firstChunkString)),
|
||||
)?.[0];
|
||||
if (!key) {
|
||||
throw Error(`${file} has an unsupported format`);
|
||||
}
|
||||
const rgba = (await supportedFormats[key].dec()).decode(
|
||||
new Uint8Array(buffer),
|
||||
);
|
||||
return {
|
||||
file,
|
||||
bitmap: rgba,
|
||||
size: buffer.length,
|
||||
};
|
||||
}
|
||||
|
||||
async function preprocessImage({ preprocessorName, options, file }) {
|
||||
const preprocessor = await preprocessors[preprocessorName].instantiate();
|
||||
file.bitmap = await preprocessor(
|
||||
file.bitmap.data,
|
||||
file.bitmap.width,
|
||||
file.bitmap.height,
|
||||
options,
|
||||
);
|
||||
return file;
|
||||
}
|
||||
|
||||
async function encodeFile({
|
||||
file,
|
||||
size,
|
||||
bitmap: bitmapIn,
|
||||
outputFile,
|
||||
encName,
|
||||
encConfig,
|
||||
optimizerButteraugliTarget,
|
||||
maxOptimizerRounds,
|
||||
}) {
|
||||
let out, infoText;
|
||||
const encoder = await supportedFormats[encName].enc();
|
||||
if (encConfig === 'auto') {
|
||||
const optionToOptimize = supportedFormats[encName].autoOptimize.option;
|
||||
const decoder = await supportedFormats[encName].dec();
|
||||
const encode = (bitmapIn, quality) =>
|
||||
encoder.encode(
|
||||
bitmapIn.data,
|
||||
bitmapIn.width,
|
||||
bitmapIn.height,
|
||||
Object.assign({}, supportedFormats[encName].defaultEncoderOptions, {
|
||||
[optionToOptimize]: quality,
|
||||
}),
|
||||
);
|
||||
const decode = (binary) => decoder.decode(binary);
|
||||
const { bitmap, binary, quality } = await autoOptimize(
|
||||
bitmapIn,
|
||||
encode,
|
||||
decode,
|
||||
{
|
||||
min: supportedFormats[encName].autoOptimize.min,
|
||||
max: supportedFormats[encName].autoOptimize.max,
|
||||
butteraugliDistanceGoal: optimizerButteraugliTarget,
|
||||
maxRounds: maxOptimizerRounds,
|
||||
},
|
||||
);
|
||||
out = binary;
|
||||
const opts = {
|
||||
// 5 significant digits is enough
|
||||
[optionToOptimize]: Math.round(quality * 10000) / 10000,
|
||||
};
|
||||
infoText = ` using --${encName} '${JSON5.stringify(opts)}'`;
|
||||
} else {
|
||||
out = encoder.encode(
|
||||
bitmapIn.data.buffer,
|
||||
bitmapIn.width,
|
||||
bitmapIn.height,
|
||||
encConfig,
|
||||
);
|
||||
}
|
||||
await fsp.writeFile(outputFile, out);
|
||||
return {
|
||||
infoText,
|
||||
inputSize: size,
|
||||
inputFile: file,
|
||||
outputFile,
|
||||
outputSize: out.length,
|
||||
};
|
||||
}
|
||||
|
||||
// both decoding and encoding go through the worker pool
|
||||
function handleJob(params) {
|
||||
const { operation } = params;
|
||||
switch (operation) {
|
||||
case 'encode':
|
||||
return encodeFile(params);
|
||||
case 'decode':
|
||||
return decodeFile(params.file);
|
||||
case 'preprocess':
|
||||
return preprocessImage(params);
|
||||
default:
|
||||
throw Error(`Invalid job "${operation}"`);
|
||||
}
|
||||
}
|
||||
|
||||
function progressTracker(results) {
|
||||
const spinner = ora();
|
||||
const tracker = {};
|
||||
@@ -163,13 +51,12 @@ function progressTracker(results) {
|
||||
};
|
||||
function getResultsText() {
|
||||
let out = '';
|
||||
for (const [filename, result] of results.entries()) {
|
||||
out += `\n ${kleur.cyan(filename)}: ${prettyPrintSize(result.size)}`;
|
||||
for (const { outputFile, outputSize, infoText } of result.outputs) {
|
||||
const name = (program.suffix + extname(outputFile)).padEnd(5);
|
||||
out += `\n ${kleur.dim('└')} ${kleur.cyan(name)} → ${prettyPrintSize(
|
||||
outputSize,
|
||||
)}`;
|
||||
for (const result of results.values()) {
|
||||
out += `\n ${kleur.cyan(result.file)}: ${prettyPrintSize(result.size)}`;
|
||||
for (const { outputFile, size: outputSize, infoText } of result.outputs) {
|
||||
out += `\n ${kleur.dim('└')} ${kleur.cyan(
|
||||
outputFile.padEnd(5),
|
||||
)} → ${prettyPrintSize(outputSize)}`;
|
||||
const percent = ((outputSize / result.size) * 100).toPrecision(3);
|
||||
out += ` (${kleur[outputSize > result.size ? 'red' : 'green'](
|
||||
percent + '%',
|
||||
@@ -186,17 +73,17 @@ function progressTracker(results) {
|
||||
async function getInputFiles(paths) {
|
||||
const validFiles = [];
|
||||
|
||||
for (const path of paths) {
|
||||
const files = (await fsp.lstat(path)).isDirectory()
|
||||
? (await fsp.readdir(path)).map(file => join(path, file))
|
||||
: [path];
|
||||
for (const inputPath of paths) {
|
||||
const files = (await fsp.lstat(inputPath)).isDirectory()
|
||||
? (await fsp.readdir(inputPath, {withFileTypes: true})).filter(dirent => dirent.isFile()).map(dirent => path.join(inputPath, dirent.name))
|
||||
: [inputPath];
|
||||
for (const file of files) {
|
||||
try {
|
||||
await fsp.stat(file);
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
console.warn(
|
||||
`Warning: Input file does not exist: ${resolvePath(file)}`,
|
||||
`Warning: Input file does not exist: ${path.resolve(file)}`,
|
||||
);
|
||||
continue;
|
||||
} else {
|
||||
@@ -214,7 +101,7 @@ async function getInputFiles(paths) {
|
||||
async function processFiles(files) {
|
||||
files = await getInputFiles(files);
|
||||
|
||||
const parallelism = cpus().length;
|
||||
const imagePool = new ImagePool();
|
||||
|
||||
const results = new Map();
|
||||
const progress = progressTracker(results);
|
||||
@@ -223,140 +110,123 @@ async function processFiles(files) {
|
||||
progress.totalOffset = files.length;
|
||||
progress.setProgress(0, files.length);
|
||||
|
||||
const workerPool = new WorkerPool(parallelism, __filename);
|
||||
// Create output directory
|
||||
await fsp.mkdir(program.outputDir, { recursive: true });
|
||||
await fsp.mkdir(program.opts().outputDir, { recursive: true });
|
||||
|
||||
let decoded = 0;
|
||||
let decodedFiles = await Promise.all(
|
||||
files.map(async (file) => {
|
||||
const result = await workerPool.dispatchJob({
|
||||
operation: 'decode',
|
||||
const image = imagePool.ingestImage(file);
|
||||
await image.decoded;
|
||||
results.set(image, {
|
||||
file,
|
||||
});
|
||||
results.set(file, {
|
||||
file: result.file,
|
||||
size: result.size,
|
||||
size: (await image.decoded).size,
|
||||
outputs: [],
|
||||
});
|
||||
progress.setProgress(++decoded, files.length);
|
||||
return result;
|
||||
return image;
|
||||
}),
|
||||
);
|
||||
|
||||
for (const [preprocessorName, value] of Object.entries(preprocessors)) {
|
||||
if (!program[preprocessorName]) {
|
||||
const preprocessOptions = {};
|
||||
|
||||
for (const preprocessorName of Object.keys(preprocessors)) {
|
||||
if (!program.opts()[preprocessorName]) {
|
||||
continue;
|
||||
}
|
||||
const preprocessorParam = program[preprocessorName];
|
||||
const preprocessorOptions = Object.assign(
|
||||
{},
|
||||
value.defaultOptions,
|
||||
JSON5.parse(preprocessorParam),
|
||||
);
|
||||
|
||||
decodedFiles = await Promise.all(
|
||||
decodedFiles.map(async (file) => {
|
||||
return workerPool.dispatchJob({
|
||||
file,
|
||||
operation: 'preprocess',
|
||||
preprocessorName,
|
||||
options: preprocessorOptions,
|
||||
});
|
||||
}),
|
||||
preprocessOptions[preprocessorName] = JSON5.parse(
|
||||
program.opts()[preprocessorName],
|
||||
);
|
||||
}
|
||||
|
||||
for (const image of decodedFiles) {
|
||||
image.preprocess(preprocessOptions);
|
||||
}
|
||||
|
||||
await Promise.all(decodedFiles.map((image) => image.decoded));
|
||||
|
||||
progress.progressOffset = decoded;
|
||||
progress.setStatus('Encoding ' + kleur.dim(`(${parallelism} threads)`));
|
||||
progress.setStatus(
|
||||
'Encoding ' + kleur.dim(`(${imagePool.workerPool.numWorkers} threads)`),
|
||||
);
|
||||
progress.setProgress(0, files.length);
|
||||
|
||||
const jobs = [];
|
||||
let jobsStarted = 0;
|
||||
let jobsFinished = 0;
|
||||
for (const { file, bitmap, size } of decodedFiles) {
|
||||
const ext = extname(file);
|
||||
const base = basename(file, ext) + program.suffix;
|
||||
for (const image of decodedFiles) {
|
||||
const originalFile = results.get(image).file;
|
||||
|
||||
for (const [encName, value] of Object.entries(supportedFormats)) {
|
||||
if (!program[encName]) {
|
||||
const encodeOptions = {
|
||||
optimizerButteraugliTarget: Number(
|
||||
program.opts().optimizerButteraugliTarget,
|
||||
),
|
||||
maxOptimizerRounds: Number(program.opts().maxOptimizerRounds),
|
||||
};
|
||||
for (const encName of Object.keys(encoders)) {
|
||||
if (!program.opts()[encName]) {
|
||||
continue;
|
||||
}
|
||||
const encParam =
|
||||
typeof program[encName] === 'string' ? program[encName] : '{}';
|
||||
const encParam = program.opts()[encName];
|
||||
const encConfig =
|
||||
encParam.toLowerCase() === 'auto'
|
||||
? 'auto'
|
||||
: Object.assign(
|
||||
{},
|
||||
value.defaultEncoderOptions,
|
||||
JSON5.parse(encParam),
|
||||
);
|
||||
const outputFile = join(program.outputDir, `${base}.${value.extension}`);
|
||||
jobsStarted++;
|
||||
const p = workerPool
|
||||
.dispatchJob({
|
||||
operation: 'encode',
|
||||
file,
|
||||
size,
|
||||
bitmap,
|
||||
outputFile,
|
||||
encName,
|
||||
encConfig,
|
||||
optimizerButteraugliTarget: Number(
|
||||
program.optimizerButteraugliTarget,
|
||||
),
|
||||
maxOptimizerRounds: Number(program.maxOptimizerRounds),
|
||||
})
|
||||
.then((output) => {
|
||||
jobsFinished++;
|
||||
results.get(file).outputs.push(output);
|
||||
progress.setProgress(jobsFinished, jobsStarted);
|
||||
});
|
||||
jobs.push(p);
|
||||
encParam.toLowerCase() === 'auto' ? 'auto' : JSON5.parse(encParam);
|
||||
encodeOptions[encName] = encConfig;
|
||||
}
|
||||
jobsStarted++;
|
||||
const job = image.encode(encodeOptions).then(async () => {
|
||||
jobsFinished++;
|
||||
const outputPath = path.join(
|
||||
program.opts().outputDir,
|
||||
program.opts().suffix +
|
||||
path.basename(originalFile, path.extname(originalFile)),
|
||||
);
|
||||
for (const output of Object.values(image.encodedWith)) {
|
||||
const outputFile = `${outputPath}.${(await output).extension}`;
|
||||
await fsp.writeFile(outputFile, (await output).binary);
|
||||
results
|
||||
.get(image)
|
||||
.outputs.push(Object.assign(await output, { outputFile }));
|
||||
}
|
||||
progress.setProgress(jobsFinished, jobsStarted);
|
||||
});
|
||||
jobs.push(job);
|
||||
}
|
||||
|
||||
// update the progress to account for multi-format
|
||||
progress.setProgress(jobsFinished, jobsStarted);
|
||||
// Wait for all jobs to finish
|
||||
await workerPool.join();
|
||||
await Promise.all(jobs);
|
||||
await imagePool.close();
|
||||
progress.finish('Squoosh results:');
|
||||
}
|
||||
|
||||
if (isMainThread) {
|
||||
program
|
||||
.name('squoosh-cli')
|
||||
.version(version)
|
||||
.arguments('<files...>')
|
||||
.option('-d, --output-dir <dir>', 'Output directory', '.')
|
||||
.option('-s, --suffix <suffix>', 'Append suffix to output files', '')
|
||||
.option(
|
||||
'--max-optimizer-rounds <rounds>',
|
||||
'Maximum number of compressions to use for auto optimizations',
|
||||
'6',
|
||||
)
|
||||
.option(
|
||||
'--optimizer-butteraugli-target <butteraugli distance>',
|
||||
'Target Butteraugli distance for auto optimizer',
|
||||
'1.4',
|
||||
)
|
||||
.action(processFiles);
|
||||
program
|
||||
.name('squoosh-cli')
|
||||
.arguments('<files...>')
|
||||
.option('-d, --output-dir <dir>', 'Output directory', '.')
|
||||
.option('-s, --suffix <suffix>', 'Append suffix to output files', '')
|
||||
.option(
|
||||
'--max-optimizer-rounds <rounds>',
|
||||
'Maximum number of compressions to use for auto optimizations',
|
||||
'6',
|
||||
)
|
||||
.option(
|
||||
'--optimizer-butteraugli-target <butteraugli distance>',
|
||||
'Target Butteraugli distance for auto optimizer',
|
||||
'1.4',
|
||||
)
|
||||
.action(processFiles);
|
||||
|
||||
// Create a CLI option for each supported preprocessor
|
||||
for (const [key, value] of Object.entries(preprocessors)) {
|
||||
program.option(`--${key} [config]`, value.description);
|
||||
}
|
||||
// Create a CLI option for each supported encoder
|
||||
for (const [key, value] of Object.entries(supportedFormats)) {
|
||||
program.option(
|
||||
`--${key} [config]`,
|
||||
`Use ${value.name} to generate a .${value.extension} file with the given configuration`,
|
||||
);
|
||||
}
|
||||
|
||||
program.parse(process.argv);
|
||||
} else {
|
||||
WorkerPool.useThisThreadAsWorker(handleJob);
|
||||
// Create a CLI option for each supported preprocessor
|
||||
for (const [key, value] of Object.entries(preprocessors)) {
|
||||
program.option(`--${key} [config]`, value.description);
|
||||
}
|
||||
// Create a CLI option for each supported encoder
|
||||
for (const [key, value] of Object.entries(encoders)) {
|
||||
program.option(
|
||||
`--${key} [config]`,
|
||||
`Use ${value.name} to generate a .${value.extension} file with the given configuration`,
|
||||
);
|
||||
}
|
||||
|
||||
program.parse(process.argv);
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# libavif and libaom versions are from
|
||||
# https://docs.google.com/document/d/1wEEA5rRU7wT54k41u3qyZIZHDCJArIMzLuzsrLAwaK8/edit
|
||||
CODEC_URL = https://github.com/AOMediaCodec/libavif/archive/d37ef74127986184500e571bf1f9793cc0bdef50.tar.gz
|
||||
CODEC_URL = https://github.com/AOMediaCodec/libavif/archive/1c39e772c2c0d687691dd4b589a12c323f5f767d.tar.gz
|
||||
CODEC_PACKAGE = node_modules/libavif.tar.gz
|
||||
|
||||
LIBAOM_URL = https://aomedia.googlesource.com/aom/+archive/0a5da45c7f942908974f5ab8e107c9fa82048ae7.tar.gz
|
||||
LIBAOM_URL = https://aomedia.googlesource.com/aom/+archive/v3.1.0.tar.gz
|
||||
LIBAOM_PACKAGE = node_modules/libaom.tar.gz
|
||||
|
||||
export CODEC_DIR = node_modules/libavif
|
||||
|
||||
@@ -29,8 +29,10 @@ struct AvifOptions {
|
||||
bool chromaDeltaQ;
|
||||
// 0-7
|
||||
int sharpness;
|
||||
// Target ssim rather than psnr
|
||||
bool targetSsim;
|
||||
// 0 = auto
|
||||
// 1 = PSNR
|
||||
// 2 = SSIM
|
||||
int tune;
|
||||
// 0-50
|
||||
int denoiseLevel;
|
||||
};
|
||||
@@ -98,7 +100,7 @@ val encode(std::string buffer, int width, int height, AvifOptions options) {
|
||||
std::to_string(options.cqAlphaLevel).c_str());
|
||||
}
|
||||
|
||||
if (options.targetSsim) {
|
||||
if (options.tune == 2 || (options.tune == 0 && options.cqLevel <= 32)) {
|
||||
avifEncoderSetCodecSpecificOption(encoder, "tune", "ssim");
|
||||
}
|
||||
|
||||
@@ -136,7 +138,7 @@ EMSCRIPTEN_BINDINGS(my_module) {
|
||||
.field("speed", &AvifOptions::speed)
|
||||
.field("chromaDeltaQ", &AvifOptions::chromaDeltaQ)
|
||||
.field("sharpness", &AvifOptions::sharpness)
|
||||
.field("targetSsim", &AvifOptions::targetSsim)
|
||||
.field("tune", &AvifOptions::tune)
|
||||
.field("denoiseLevel", &AvifOptions::denoiseLevel)
|
||||
.field("subsample", &AvifOptions::subsample);
|
||||
|
||||
|
||||
8
codecs/avif/enc/avif_enc.d.ts
vendored
8
codecs/avif/enc/avif_enc.d.ts
vendored
@@ -1,3 +1,9 @@
|
||||
export const enum AVIFTune {
|
||||
auto,
|
||||
psnr,
|
||||
ssim,
|
||||
}
|
||||
|
||||
export interface EncodeOptions {
|
||||
cqLevel: number;
|
||||
denoiseLevel: number;
|
||||
@@ -8,7 +14,7 @@ export interface EncodeOptions {
|
||||
subsample: number;
|
||||
chromaDeltaQ: boolean;
|
||||
sharpness: number;
|
||||
targetSsim: boolean;
|
||||
tune: AVIFTune;
|
||||
}
|
||||
|
||||
export interface AVIFModule extends EmscriptenWasm.Module {
|
||||
|
||||
Binary file not shown.
4
codecs/avif/enc/avif_enc_mt.js
generated
4
codecs/avif/enc/avif_enc_mt.js
generated
@@ -18,7 +18,7 @@ function Ia(a){for(var b=0,c=0;c<a.length;++c){var d=a.charCodeAt(c);55296<=d&&5
|
||||
if(B)e=z.wasmMemory,l=z.buffer;else if(z.wasmMemory)e=z.wasmMemory;else if(e=new WebAssembly.Memory({initial:Ja/65536,maximum:32768,shared:!0}),!(e.buffer instanceof SharedArrayBuffer))throw D("requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag"),Error("bad memory");e&&(l=e.buffer);Ja=l.byteLength;n(l);var H,Ka=[],La=[],Ma=[],Na=[];
|
||||
function Oa(){var a=z.preRun.shift();Ka.unshift(a)}var I=0,Pa=null,Qa=null;z.preloadedImages={};z.preloadedAudios={};function E(a){if(z.onAbort)z.onAbort(a);B&&console.error("Pthread aborting at "+Error().stack);D(a);ya=!0;a=new WebAssembly.RuntimeError("abort("+a+"). Build with -s ASSERTIONS=1 for more info.");pa(a);throw a;}function Ra(){var a=K;return String.prototype.startsWith?a.startsWith("data:application/octet-stream;base64,"):0===a.indexOf("data:application/octet-stream;base64,")}var K="avif_enc_mt.wasm";
|
||||
Ra()||(K=sa(K));function Sa(){try{if(wa)return new Uint8Array(wa);if(ta)return ta(K);throw"both async and sync fetching of the wasm failed";}catch(a){E(a)}}function Ta(){return wa||"function"!==typeof fetch?Promise.resolve().then(Sa):fetch(K,{credentials:"same-origin"}).then(function(a){if(!a.ok)throw"failed to load wasm binary file at '"+K+"'";return a.arrayBuffer()}).catch(function(){return Sa()})}
|
||||
var Va={597269:function(a,b){setTimeout(function(){Ua(a,b)},0)},597347:function(){throw"Canceled!";}};function Wa(a){for(;0<a.length;){var b=a.shift();if("function"==typeof b)b(z);else{var c=b.Eb;"number"===typeof c?void 0===b.eb?H.get(c)():H.get(c)(b.eb):c(void 0===b.eb?null:b.eb)}}}function Xa(a,b,c){var d;-1!=a.indexOf("j")?d=c&&c.length?z["dynCall_"+a].apply(null,[b].concat(c)):z["dynCall_"+a].call(null,b):d=H.get(b).apply(null,c);return d}z.dynCall=Xa;var L=0,Ya=0,Za=0;
|
||||
var Va={597253:function(a,b){setTimeout(function(){Ua(a,b)},0)},597331:function(){throw"Canceled!";}};function Wa(a){for(;0<a.length;){var b=a.shift();if("function"==typeof b)b(z);else{var c=b.Eb;"number"===typeof c?void 0===b.eb?H.get(c)():H.get(c)(b.eb):c(void 0===b.eb?null:b.eb)}}}function Xa(a,b,c){var d;-1!=a.indexOf("j")?d=c&&c.length?z["dynCall_"+a].apply(null,[b].concat(c)):z["dynCall_"+a].call(null,b):d=H.get(b).apply(null,c);return d}z.dynCall=Xa;var L=0,Ya=0,Za=0;
|
||||
function $a(a,b,c){L=a|0;Za=b|0;Ya=c|0}z.registerPthreadPtr=$a;function ab(a,b){if(0>=a||a>aa().length||a&1||0>b)return-28;if(0==b)return 0;2147483647<=b&&(b=Infinity);var c=Atomics.load(w(),M.vb>>2),d=0;if(c==a&&Atomics.compareExchange(w(),M.vb>>2,c,0)==c&&(--b,d=1,0>=b))return 1;a=Atomics.notify(w(),a>>2,b);if(0<=a)return a+d;throw"Atomics.notify returned an unexpected value "+a;}z._emscripten_futex_wake=ab;
|
||||
function bb(a){if(B)throw"Internal Error! cleanupThread() can only ever be called from main application thread!";if(!a)throw"Internal Error! Null pthread_ptr in cleanupThread!";w()[a+12>>2]=0;(a=M.Ya[a])&&M.jb(a.worker)}
|
||||
var M={ac:1,kc:{yb:0,zb:0},Wa:[],$a:[],Lb:function(){for(var a=navigator.hardwareConcurrency,b=0;b<a;++b)M.pb()},Mb:function(){M.Ua=N(232);for(var a=0;58>a;++a)y()[M.Ua/4+a]=0;w()[M.Ua+12>>2]=M.Ua;a=M.Ua+156;w()[a>>2]=a;var b=N(512);for(a=0;128>a;++a)y()[b/4+a]=0;Atomics.store(y(),M.Ua+104>>2,b);Atomics.store(y(),M.Ua+40>>2,M.Ua);Atomics.store(y(),M.Ua+44>>2,42);M.tb();$a(M.Ua,!1,1);cb(M.Ua)},Nb:function(){M.tb();oa(z);M.receiveObjectTransfer=M.Sb;M.setThreadStatus=M.Tb;M.threadCancel=M.Yb;M.threadExit=
|
||||
@@ -86,7 +86,7 @@ z._emscripten_sync_run_in_main_thread_1=function(){return(z._emscripten_sync_run
|
||||
z._emscripten_sync_run_in_main_thread_3=function(){return(z._emscripten_sync_run_in_main_thread_3=z.asm.Ga).apply(null,arguments)};var Cc=z._emscripten_sync_run_in_main_thread_4=function(){return(Cc=z._emscripten_sync_run_in_main_thread_4=z.asm.Ha).apply(null,arguments)};z._emscripten_sync_run_in_main_thread_5=function(){return(z._emscripten_sync_run_in_main_thread_5=z.asm.Ia).apply(null,arguments)};
|
||||
z._emscripten_sync_run_in_main_thread_6=function(){return(z._emscripten_sync_run_in_main_thread_6=z.asm.Ja).apply(null,arguments)};z._emscripten_sync_run_in_main_thread_7=function(){return(z._emscripten_sync_run_in_main_thread_7=z.asm.Ka).apply(null,arguments)};
|
||||
var Wb=z._emscripten_run_in_main_runtime_thread_js=function(){return(Wb=z._emscripten_run_in_main_runtime_thread_js=z.asm.La).apply(null,arguments)},bc=z.__emscripten_call_on_thread=function(){return(bc=z.__emscripten_call_on_thread=z.asm.Ma).apply(null,arguments)};z._emscripten_tls_init=function(){return(z._emscripten_tls_init=z.asm.Na).apply(null,arguments)};z.dynCall_jiiiiiiiii=function(){return(z.dynCall_jiiiiiiiii=z.asm.Oa).apply(null,arguments)};
|
||||
z.dynCall_jiji=function(){return(z.dynCall_jiji=z.asm.Pa).apply(null,arguments)};z.dynCall_jiiiiiiii=function(){return(z.dynCall_jiiiiiiii=z.asm.Qa).apply(null,arguments)};z.dynCall_jiiiiii=function(){return(z.dynCall_jiiiiii=z.asm.Ra).apply(null,arguments)};z.dynCall_jiiiii=function(){return(z.dynCall_jiiiii=z.asm.Sa).apply(null,arguments)};z.dynCall_iiijii=function(){return(z.dynCall_iiijii=z.asm.Ta).apply(null,arguments)};var db=z._main_thread_futex=899124;
|
||||
z.dynCall_jiji=function(){return(z.dynCall_jiji=z.asm.Pa).apply(null,arguments)};z.dynCall_jiiiiiiii=function(){return(z.dynCall_jiiiiiiii=z.asm.Qa).apply(null,arguments)};z.dynCall_jiiiiii=function(){return(z.dynCall_jiiiiii=z.asm.Ra).apply(null,arguments)};z.dynCall_jiiiii=function(){return(z.dynCall_jiiiii=z.asm.Sa).apply(null,arguments)};z.dynCall_iiijii=function(){return(z.dynCall_iiijii=z.asm.Ta).apply(null,arguments)};var db=z._main_thread_futex=899108;
|
||||
function zc(a,b){var c=Y();try{H.get(a)(b)}catch(d){P(c);if(d!==d+0&&"longjmp"!==d)throw d;Z(1,0)}}function Bc(a,b,c,d,f){var g=Y();try{H.get(a)(b,c,d,f)}catch(h){P(g);if(h!==h+0&&"longjmp"!==h)throw h;Z(1,0)}}function Ac(a,b,c){var d=Y();try{H.get(a)(b,c)}catch(f){P(d);if(f!==f+0&&"longjmp"!==f)throw f;Z(1,0)}}function yc(a,b,c,d,f,g,h,k,m,q){var p=Y();try{return H.get(a)(b,c,d,f,g,h,k,m,q)}catch(v){P(p);if(v!==v+0&&"longjmp"!==v)throw v;Z(1,0)}}
|
||||
function vc(a,b,c){var d=Y();try{return H.get(a)(b,c)}catch(f){P(d);if(f!==f+0&&"longjmp"!==f)throw f;Z(1,0)}}function wc(a,b,c,d,f){var g=Y();try{return H.get(a)(b,c,d,f)}catch(h){P(g);if(h!==h+0&&"longjmp"!==h)throw h;Z(1,0)}}function uc(a,b){var c=Y();try{return H.get(a)(b)}catch(d){P(c);if(d!==d+0&&"longjmp"!==d)throw d;Z(1,0)}}function xc(a,b,c,d,f,g,h,k,m){var q=Y();try{return H.get(a)(b,c,d,f,g,h,k,m)}catch(p){P(q);if(p!==p+0&&"longjmp"!==p)throw p;Z(1,0)}}z.PThread=M;z.PThread=M;
|
||||
z._pthread_self=oc;z.wasmMemory=e;z.ExitStatus=Fc;var Gc;function Fc(a){this.name="ExitStatus";this.message="Program terminated with exit("+a+")";this.status=a}Qa=function Hc(){Gc||Ic();Gc||(Qa=Hc)};
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@@ -141,6 +141,7 @@ val encode(std::string image_in, int image_width, int image_height, MozJpegOptio
|
||||
jpeg_c_set_bool_param(&cinfo, JBOOLEAN_TRELLIS_EOB_OPT, opts.trellis_opt_zero);
|
||||
jpeg_c_set_bool_param(&cinfo, JBOOLEAN_TRELLIS_Q_OPT, opts.trellis_opt_table);
|
||||
jpeg_c_set_int_param(&cinfo, JINT_TRELLIS_NUM_LOOPS, opts.trellis_loops);
|
||||
jpeg_c_set_int_param(&cinfo, JINT_DC_SCAN_OPT_MODE, 0);
|
||||
|
||||
// A little hacky to build a string for this, but it means we can use
|
||||
// set_quality_ratings which does some useful heuristic stuff.
|
||||
|
||||
Binary file not shown.
Binary file not shown.
69
codecs/oxipng/Cargo.lock
generated
69
codecs/oxipng/Cargo.lock
generated
@@ -1,5 +1,7 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "adler"
|
||||
version = "0.2.3"
|
||||
@@ -232,6 +234,15 @@ dependencies = [
|
||||
"either",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.48"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc9f84f9b115ce7843d60706df1422a916680bfdfcbdb0447c5614ff9d7e4d78"
|
||||
dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "1.4.0"
|
||||
@@ -350,12 +361,6 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "13bd41f508810a131401606d54ac32a467c97172d74ba7662562ebba5ad07fa0"
|
||||
|
||||
[[package]]
|
||||
name = "oxipng"
|
||||
version = "4.0.3"
|
||||
@@ -401,9 +406,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.24"
|
||||
version = "1.0.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
|
||||
checksum = "a152013215dca273577e18d2bf00fa862b89b24169fb78c4c95aeb07992c9cec"
|
||||
dependencies = [
|
||||
"unicode-xid",
|
||||
]
|
||||
@@ -484,24 +489,28 @@ dependencies = [
|
||||
"pest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "spmc"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "02a8428da277a8e3a15271d79943e80ccc2ef254e78813a166a08d65e4c3ece5"
|
||||
|
||||
[[package]]
|
||||
name = "squoosh-oxipng"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"crossbeam-channel",
|
||||
"libdeflater",
|
||||
"log",
|
||||
"once_cell",
|
||||
"oxipng",
|
||||
"rayon",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-rayon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.58"
|
||||
version = "1.0.72"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cc60a3d73ea6594cd712d830cc1f0390fd71542d8c8cd24e70cc54cdfd5e05d5"
|
||||
checksum = "a1e8cdbefb79a9a5a65e0db8b47b723ee907b7c7f8496c76a1770b5c310bab82"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -522,9 +531,9 @@ checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.69"
|
||||
version = "0.2.73"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3cd364751395ca0f68cafb17666eee36b63077fb5ecd972bbcd74c90c4bf736e"
|
||||
checksum = "83240549659d187488f91f33c0f8547cbfef0b2088bc470c116d1d260ef623d9"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"wasm-bindgen-macro",
|
||||
@@ -532,9 +541,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-backend"
|
||||
version = "0.2.69"
|
||||
version = "0.2.73"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1114f89ab1f4106e5b55e688b828c0ab0ea593a1ea7c094b141b14cbaaec2d62"
|
||||
checksum = "ae70622411ca953215ca6d06d3ebeb1e915f0f6613e3b495122878d7ebec7dae"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"lazy_static",
|
||||
@@ -547,9 +556,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro"
|
||||
version = "0.2.69"
|
||||
version = "0.2.73"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a6ac8995ead1f084a8dea1e65f194d0973800c7f571f6edd70adf06ecf77084"
|
||||
checksum = "3e734d91443f177bfdb41969de821e15c516931c3c3db3d318fa1b68975d0f6f"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"wasm-bindgen-macro-support",
|
||||
@@ -557,9 +566,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro-support"
|
||||
version = "0.2.69"
|
||||
version = "0.2.73"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b5a48c72f299d80557c7c62e37e7225369ecc0c963964059509fbafe917c7549"
|
||||
checksum = "d53739ff08c8a68b0fdbcd54c372b8ab800b1449ab3c9d706503bc7dd1621b2c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -569,7 +578,19 @@ dependencies = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-shared"
|
||||
version = "0.2.69"
|
||||
name = "wasm-bindgen-rayon"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7e7811dd7f9398f14cc76efd356f98f03aa30419dea46aa810d71e819fc97158"
|
||||
checksum = "3069d2a42e7a7e3bfde668f84adb5fbc35701ca2b39b27a064cbd5ede4e78194"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"rayon",
|
||||
"spmc",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-shared"
|
||||
version = "0.2.73"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9a543ae66aa233d14bb765ed9af4a33e81b8b58d1584cf1b47ff8cd0b9e4489"
|
||||
|
||||
@@ -12,17 +12,15 @@ wasm-opt = ["-O", "--no-validation"]
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
oxipng = { version = "4.0.1", default-features = false, features = ["libdeflater"] }
|
||||
oxipng = { version = "4.0.3", default-features = false, features = ["libdeflater"] }
|
||||
libdeflater = { version = "0.7.1", features = ["freestanding"] }
|
||||
wasm-bindgen = "0.2.68"
|
||||
wasm-bindgen = "0.2.73"
|
||||
log = { version = "0.4.11", features = ["release_max_level_off"] }
|
||||
rayon = { version = "1.5.0", optional = true }
|
||||
once_cell = { version = "1.5.2", optional = true }
|
||||
crossbeam-channel = { version = "0.5.0", optional = true }
|
||||
wasm-bindgen-rayon = { version = "1.0", optional = true }
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
opt-level = "s"
|
||||
|
||||
[features]
|
||||
parallel = ["oxipng/parallel", "rayon", "crossbeam-channel", "once_cell"]
|
||||
parallel = ["oxipng/parallel", "wasm-bindgen-rayon"]
|
||||
|
||||
@@ -6,6 +6,4 @@ rm -rf pkg,{-parallel}
|
||||
export CFLAGS="${CFLAGS} -DUNALIGNED_ACCESS_IS_FAST=1"
|
||||
wasm-pack build -t web
|
||||
RUSTFLAGS='-C target-feature=+atomics,+bulk-memory' wasm-pack build -t web -d pkg-parallel -- -Z build-std=panic_abort,std --features=parallel
|
||||
# Workaround https://github.com/rustwasm/wasm-bindgen/issues/2133:
|
||||
sed -i "s|maybe_memory:|maybe_memory?:|" pkg-parallel/squoosh_oxipng.d.ts
|
||||
rm pkg{,-parallel}/.gitignore
|
||||
|
||||
98
codecs/oxipng/pkg-parallel/snippets/wasm-bindgen-rayon-3d2df09ebec17a22/src/workerHelpers.js
generated
Normal file
98
codecs/oxipng/pkg-parallel/snippets/wasm-bindgen-rayon-3d2df09ebec17a22/src/workerHelpers.js
generated
Normal file
@@ -0,0 +1,98 @@
|
||||
/**
|
||||
* Copyright 2021 Google Inc. All Rights Reserved.
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// Note: we use `wasm_bindgen_worker_`-prefixed message types to make sure
|
||||
// we can handle bundling into other files, which might happen to have their
|
||||
// own `postMessage`/`onmessage` communication channels.
|
||||
//
|
||||
// If we didn't take that into the account, we could send much simpler signals
|
||||
// like just `0` or whatever, but the code would be less resilient.
|
||||
|
||||
function waitForMsgType(target, type) {
|
||||
return new Promise(resolve => {
|
||||
target.addEventListener('message', function onMsg({ data }) {
|
||||
if (data == null || data.type !== type) return;
|
||||
target.removeEventListener('message', onMsg);
|
||||
resolve(data);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
waitForMsgType(self, 'wasm_bindgen_worker_init').then(async data => {
|
||||
// # Note 1
|
||||
// Our JS should have been generated in
|
||||
// `[out-dir]/snippets/wasm-bindgen-rayon-[hash]/workerHelpers.js`,
|
||||
// resolve the main module via `../../..`.
|
||||
//
|
||||
// This might need updating if the generated structure changes on wasm-bindgen
|
||||
// side ever in the future, but works well with bundlers today. The whole
|
||||
// point of this crate, after all, is to abstract away unstable features
|
||||
// and temporary bugs so that you don't need to deal with them in your code.
|
||||
//
|
||||
// # Note 2
|
||||
// This could be a regular import, but then some bundlers complain about
|
||||
// circular deps.
|
||||
//
|
||||
// Dynamic import could be cheap if this file was inlined into the parent,
|
||||
// which would require us just using `../../..` in `new Worker` below,
|
||||
// but that doesn't work because wasm-pack unconditionally adds
|
||||
// "sideEffects":false (see below).
|
||||
//
|
||||
// OTOH, even though it can't be inlined, it should be still reasonably
|
||||
// cheap since the requested file is already in cache (it was loaded by
|
||||
// the main thread).
|
||||
const pkg = await import('../../..');
|
||||
await pkg.default(data.module, data.memory);
|
||||
postMessage({ type: 'wasm_bindgen_worker_ready' });
|
||||
pkg.wbg_rayon_start_worker(data.receiver);
|
||||
});
|
||||
|
||||
export async function startWorkers(module, memory, builder) {
|
||||
const workerInit = {
|
||||
type: 'wasm_bindgen_worker_init',
|
||||
module,
|
||||
memory,
|
||||
receiver: builder.receiver()
|
||||
};
|
||||
|
||||
try {
|
||||
await Promise.all(
|
||||
Array.from({ length: builder.numThreads() }, () => {
|
||||
// Self-spawn into a new Worker.
|
||||
//
|
||||
// TODO: while `new URL('...', import.meta.url) becomes a semi-standard
|
||||
// way to get asset URLs relative to the module across various bundlers
|
||||
// and browser, ideally we should switch to `import.meta.resolve`
|
||||
// once it becomes a standard.
|
||||
//
|
||||
// Note: we could use `../../..` as the URL here to inline workerHelpers.js
|
||||
// into the parent entry instead of creating another split point -
|
||||
// this would be preferable from optimization perspective -
|
||||
// however, Webpack then eliminates all message handler code
|
||||
// because wasm-pack produces "sideEffects":false in package.json
|
||||
// unconditionally.
|
||||
//
|
||||
// The only way to work around that is to have side effect code
|
||||
// in an entry point such as Worker file itself.
|
||||
const worker = new Worker(new URL('./workerHelpers.js', import.meta.url), {
|
||||
type: 'module'
|
||||
});
|
||||
worker.postMessage(workerInit);
|
||||
return waitForMsgType(worker, 'wasm_bindgen_worker_ready');
|
||||
})
|
||||
);
|
||||
builder.build();
|
||||
} finally {
|
||||
builder.free();
|
||||
}
|
||||
}
|
||||
40
codecs/oxipng/pkg-parallel/squoosh_oxipng.d.ts
generated
vendored
40
codecs/oxipng/pkg-parallel/squoosh_oxipng.d.ts
generated
vendored
@@ -3,29 +3,48 @@
|
||||
/**
|
||||
* @param {Uint8Array} data
|
||||
* @param {number} level
|
||||
* @param {boolean} interlace
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
export function optimise(data: Uint8Array, level: number): Uint8Array;
|
||||
export function optimise(data: Uint8Array, level: number, interlace: boolean): Uint8Array;
|
||||
/**
|
||||
* @param {number} num
|
||||
* @returns {any}
|
||||
* @param {number} num_threads
|
||||
* @returns {Promise<any>}
|
||||
*/
|
||||
export function worker_initializer(num: number): any;
|
||||
export function initThreadPool(num_threads: number): Promise<any>;
|
||||
/**
|
||||
* @param {number} receiver
|
||||
*/
|
||||
export function wbg_rayon_start_worker(receiver: number): void;
|
||||
/**
|
||||
*/
|
||||
export function start_main_thread(): void;
|
||||
export class wbg_rayon_PoolBuilder {
|
||||
free(): void;
|
||||
/**
|
||||
* @returns {number}
|
||||
*/
|
||||
numThreads(): number;
|
||||
/**
|
||||
* @returns {number}
|
||||
*/
|
||||
receiver(): number;
|
||||
/**
|
||||
*/
|
||||
export function start_worker_thread(): void;
|
||||
build(): void;
|
||||
}
|
||||
|
||||
export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module;
|
||||
|
||||
export interface InitOutput {
|
||||
readonly optimise: (a: number, b: number, c: number, d: number) => void;
|
||||
readonly worker_initializer: (a: number) => number;
|
||||
readonly start_main_thread: () => void;
|
||||
readonly start_worker_thread: () => void;
|
||||
readonly optimise: (a: number, b: number, c: number, d: number, e: number) => void;
|
||||
readonly __wbg_wbg_rayon_poolbuilder_free: (a: number) => void;
|
||||
readonly wbg_rayon_poolbuilder_numThreads: (a: number) => number;
|
||||
readonly wbg_rayon_poolbuilder_receiver: (a: number) => number;
|
||||
readonly wbg_rayon_poolbuilder_build: (a: number) => void;
|
||||
readonly initThreadPool: (a: number) => number;
|
||||
readonly wbg_rayon_start_worker: (a: number) => void;
|
||||
readonly __wbindgen_export_0: WebAssembly.Memory;
|
||||
readonly __wbindgen_add_to_stack_pointer: (a: number) => number;
|
||||
readonly __wbindgen_malloc: (a: number) => number;
|
||||
readonly __wbindgen_free: (a: number, b: number) => void;
|
||||
readonly __wbindgen_start: () => void;
|
||||
@@ -41,4 +60,3 @@ export interface InitOutput {
|
||||
* @returns {Promise<InitOutput>}
|
||||
*/
|
||||
export default function init (module_or_path?: InitInput | Promise<InitInput>, maybe_memory?: WebAssembly.Memory): Promise<InitOutput>;
|
||||
|
||||
116
codecs/oxipng/pkg-parallel/squoosh_oxipng.js
generated
116
codecs/oxipng/pkg-parallel/squoosh_oxipng.js
generated
@@ -1,21 +1,6 @@
|
||||
import { startWorkers } from './snippets/wasm-bindgen-rayon-3d2df09ebec17a22/src/workerHelpers.js';
|
||||
|
||||
let wasm;
|
||||
let memory;
|
||||
|
||||
const heap = new Array(32).fill(undefined);
|
||||
|
||||
heap.push(undefined, null, true, false);
|
||||
|
||||
let heap_next = heap.length;
|
||||
|
||||
function addHeapObject(obj) {
|
||||
if (heap_next === heap.length) heap.push(heap.length + 1);
|
||||
const idx = heap_next;
|
||||
heap_next = heap[idx];
|
||||
|
||||
heap[idx] = obj;
|
||||
return idx;
|
||||
}
|
||||
|
||||
let cachedTextDecoder = new TextDecoder('utf-8', { ignoreBOM: true, fatal: true });
|
||||
|
||||
@@ -33,6 +18,21 @@ function getStringFromWasm0(ptr, len) {
|
||||
return cachedTextDecoder.decode(getUint8Memory0().slice(ptr, ptr + len));
|
||||
}
|
||||
|
||||
const heap = new Array(32).fill(undefined);
|
||||
|
||||
heap.push(undefined, null, true, false);
|
||||
|
||||
let heap_next = heap.length;
|
||||
|
||||
function addHeapObject(obj) {
|
||||
if (heap_next === heap.length) heap.push(heap.length + 1);
|
||||
const idx = heap_next;
|
||||
heap_next = heap[idx];
|
||||
|
||||
heap[idx] = obj;
|
||||
return idx;
|
||||
}
|
||||
|
||||
let WASM_VECTOR_LEN = 0;
|
||||
|
||||
function passArray8ToWasm0(arg, malloc) {
|
||||
@@ -56,22 +56,22 @@ function getArrayU8FromWasm0(ptr, len) {
|
||||
/**
|
||||
* @param {Uint8Array} data
|
||||
* @param {number} level
|
||||
* @param {boolean} interlace
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
export function optimise(data, level) {
|
||||
export function optimise(data, level, interlace) {
|
||||
try {
|
||||
const retptr = wasm.__wbindgen_export_1.value - 16;
|
||||
wasm.__wbindgen_export_1.value = retptr;
|
||||
const retptr = wasm.__wbindgen_add_to_stack_pointer(-16);
|
||||
var ptr0 = passArray8ToWasm0(data, wasm.__wbindgen_malloc);
|
||||
var len0 = WASM_VECTOR_LEN;
|
||||
wasm.optimise(retptr, ptr0, len0, level);
|
||||
wasm.optimise(retptr, ptr0, len0, level, interlace);
|
||||
var r0 = getInt32Memory0()[retptr / 4 + 0];
|
||||
var r1 = getInt32Memory0()[retptr / 4 + 1];
|
||||
var v1 = getArrayU8FromWasm0(r0, r1).slice();
|
||||
wasm.__wbindgen_free(r0, r1 * 1);
|
||||
return v1;
|
||||
} finally {
|
||||
wasm.__wbindgen_export_1.value += 16;
|
||||
wasm.__wbindgen_add_to_stack_pointer(16);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -89,29 +89,66 @@ function takeObject(idx) {
|
||||
return ret;
|
||||
}
|
||||
/**
|
||||
* @param {number} num
|
||||
* @returns {any}
|
||||
* @param {number} num_threads
|
||||
* @returns {Promise<any>}
|
||||
*/
|
||||
export function worker_initializer(num) {
|
||||
var ret = wasm.worker_initializer(num);
|
||||
export function initThreadPool(num_threads) {
|
||||
var ret = wasm.initThreadPool(num_threads);
|
||||
return takeObject(ret);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} receiver
|
||||
*/
|
||||
export function start_main_thread() {
|
||||
wasm.start_main_thread();
|
||||
export function wbg_rayon_start_worker(receiver) {
|
||||
wasm.wbg_rayon_start_worker(receiver);
|
||||
}
|
||||
|
||||
/**
|
||||
*/
|
||||
export function start_worker_thread() {
|
||||
wasm.start_worker_thread();
|
||||
export class wbg_rayon_PoolBuilder {
|
||||
|
||||
static __wrap(ptr) {
|
||||
const obj = Object.create(wbg_rayon_PoolBuilder.prototype);
|
||||
obj.ptr = ptr;
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
__destroy_into_raw() {
|
||||
const ptr = this.ptr;
|
||||
this.ptr = 0;
|
||||
|
||||
return ptr;
|
||||
}
|
||||
|
||||
free() {
|
||||
const ptr = this.__destroy_into_raw();
|
||||
wasm.__wbg_wbg_rayon_poolbuilder_free(ptr);
|
||||
}
|
||||
/**
|
||||
* @returns {number}
|
||||
*/
|
||||
numThreads() {
|
||||
var ret = wasm.wbg_rayon_poolbuilder_numThreads(this.ptr);
|
||||
return ret >>> 0;
|
||||
}
|
||||
/**
|
||||
* @returns {number}
|
||||
*/
|
||||
receiver() {
|
||||
var ret = wasm.wbg_rayon_poolbuilder_receiver(this.ptr);
|
||||
return ret;
|
||||
}
|
||||
/**
|
||||
*/
|
||||
build() {
|
||||
wasm.wbg_rayon_poolbuilder_build(this.ptr);
|
||||
}
|
||||
}
|
||||
|
||||
async function load(module, imports, maybe_memory) {
|
||||
async function load(module, imports) {
|
||||
if (typeof Response === 'function' && module instanceof Response) {
|
||||
memory = imports.wbg.memory = new WebAssembly.Memory({initial:17,maximum:16384,shared:true});
|
||||
if (typeof WebAssembly.instantiateStreaming === 'function') {
|
||||
try {
|
||||
return await WebAssembly.instantiateStreaming(module, imports);
|
||||
@@ -130,7 +167,6 @@ async function load(module, imports, maybe_memory) {
|
||||
return await WebAssembly.instantiate(bytes, imports);
|
||||
|
||||
} else {
|
||||
memory = imports.wbg.memory = maybe_memory;
|
||||
const instance = await WebAssembly.instantiate(module, imports);
|
||||
|
||||
if (instance instanceof WebAssembly.Instance) {
|
||||
@@ -144,10 +180,13 @@ async function load(module, imports, maybe_memory) {
|
||||
|
||||
async function init(input, maybe_memory) {
|
||||
if (typeof input === 'undefined') {
|
||||
input = import.meta.url.replace(/\.js$/, '_bg.wasm');
|
||||
input = new URL('squoosh_oxipng_bg.wasm', import.meta.url);
|
||||
}
|
||||
const imports = {};
|
||||
imports.wbg = {};
|
||||
imports.wbg.__wbindgen_throw = function(arg0, arg1) {
|
||||
throw new Error(getStringFromWasm0(arg0, arg1));
|
||||
};
|
||||
imports.wbg.__wbindgen_module = function() {
|
||||
var ret = init.__wbindgen_wasm_module;
|
||||
return addHeapObject(ret);
|
||||
@@ -156,19 +195,18 @@ async function init(input, maybe_memory) {
|
||||
var ret = wasm.__wbindgen_export_0;
|
||||
return addHeapObject(ret);
|
||||
};
|
||||
imports.wbg.__wbg_of_6510501edc06d65e = function(arg0, arg1) {
|
||||
var ret = Array.of(takeObject(arg0), takeObject(arg1));
|
||||
imports.wbg.__wbg_startWorkers_914655bb4d5bb5e1 = function(arg0, arg1, arg2) {
|
||||
var ret = startWorkers(takeObject(arg0), takeObject(arg1), wbg_rayon_PoolBuilder.__wrap(arg2));
|
||||
return addHeapObject(ret);
|
||||
};
|
||||
imports.wbg.__wbindgen_throw = function(arg0, arg1) {
|
||||
throw new Error(getStringFromWasm0(arg0, arg1));
|
||||
};
|
||||
|
||||
if (typeof input === 'string' || (typeof Request === 'function' && input instanceof Request) || (typeof URL === 'function' && input instanceof URL)) {
|
||||
input = fetch(input);
|
||||
}
|
||||
|
||||
const { instance, module } = await load(await input, imports, maybe_memory);
|
||||
imports.wbg.memory = maybe_memory || new WebAssembly.Memory({initial:17,maximum:16384,shared:true});
|
||||
|
||||
const { instance, module } = await load(await input, imports);
|
||||
|
||||
wasm = instance.exports;
|
||||
init.__wbindgen_wasm_module = module;
|
||||
|
||||
Binary file not shown.
12
codecs/oxipng/pkg-parallel/squoosh_oxipng_bg.wasm.d.ts
generated
vendored
12
codecs/oxipng/pkg-parallel/squoosh_oxipng_bg.wasm.d.ts
generated
vendored
@@ -1,10 +1,14 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
export function optimise(a: number, b: number, c: number, d: number): void;
|
||||
export function worker_initializer(a: number): number;
|
||||
export function start_main_thread(): void;
|
||||
export function start_worker_thread(): void;
|
||||
export function optimise(a: number, b: number, c: number, d: number, e: number): void;
|
||||
export function __wbg_wbg_rayon_poolbuilder_free(a: number): void;
|
||||
export function wbg_rayon_poolbuilder_numThreads(a: number): number;
|
||||
export function wbg_rayon_poolbuilder_receiver(a: number): number;
|
||||
export function wbg_rayon_poolbuilder_build(a: number): void;
|
||||
export function initThreadPool(a: number): number;
|
||||
export function wbg_rayon_start_worker(a: number): void;
|
||||
export const __wbindgen_export_0: WebAssembly.Memory;
|
||||
export function __wbindgen_add_to_stack_pointer(a: number): number;
|
||||
export function __wbindgen_malloc(a: number): number;
|
||||
export function __wbindgen_free(a: number, b: number): void;
|
||||
export function __wbindgen_start(): void;
|
||||
|
||||
7
codecs/oxipng/pkg/squoosh_oxipng.d.ts
generated
vendored
7
codecs/oxipng/pkg/squoosh_oxipng.d.ts
generated
vendored
@@ -3,15 +3,17 @@
|
||||
/**
|
||||
* @param {Uint8Array} data
|
||||
* @param {number} level
|
||||
* @param {boolean} interlace
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
export function optimise(data: Uint8Array, level: number): Uint8Array;
|
||||
export function optimise(data: Uint8Array, level: number, interlace: boolean): Uint8Array;
|
||||
|
||||
export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module;
|
||||
|
||||
export interface InitOutput {
|
||||
readonly memory: WebAssembly.Memory;
|
||||
readonly optimise: (a: number, b: number, c: number, d: number) => void;
|
||||
readonly optimise: (a: number, b: number, c: number, d: number, e: number) => void;
|
||||
readonly __wbindgen_add_to_stack_pointer: (a: number) => number;
|
||||
readonly __wbindgen_malloc: (a: number) => number;
|
||||
readonly __wbindgen_free: (a: number, b: number) => void;
|
||||
}
|
||||
@@ -25,4 +27,3 @@ export interface InitOutput {
|
||||
* @returns {Promise<InitOutput>}
|
||||
*/
|
||||
export default function init (module_or_path?: InitInput | Promise<InitInput>): Promise<InitOutput>;
|
||||
|
||||
16
codecs/oxipng/pkg/squoosh_oxipng.js
generated
16
codecs/oxipng/pkg/squoosh_oxipng.js
generated
@@ -40,28 +40,27 @@ function getArrayU8FromWasm0(ptr, len) {
|
||||
/**
|
||||
* @param {Uint8Array} data
|
||||
* @param {number} level
|
||||
* @param {boolean} interlace
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
export function optimise(data, level) {
|
||||
export function optimise(data, level, interlace) {
|
||||
try {
|
||||
const retptr = wasm.__wbindgen_export_0.value - 16;
|
||||
wasm.__wbindgen_export_0.value = retptr;
|
||||
const retptr = wasm.__wbindgen_add_to_stack_pointer(-16);
|
||||
var ptr0 = passArray8ToWasm0(data, wasm.__wbindgen_malloc);
|
||||
var len0 = WASM_VECTOR_LEN;
|
||||
wasm.optimise(retptr, ptr0, len0, level);
|
||||
wasm.optimise(retptr, ptr0, len0, level, interlace);
|
||||
var r0 = getInt32Memory0()[retptr / 4 + 0];
|
||||
var r1 = getInt32Memory0()[retptr / 4 + 1];
|
||||
var v1 = getArrayU8FromWasm0(r0, r1).slice();
|
||||
wasm.__wbindgen_free(r0, r1 * 1);
|
||||
return v1;
|
||||
} finally {
|
||||
wasm.__wbindgen_export_0.value += 16;
|
||||
wasm.__wbindgen_add_to_stack_pointer(16);
|
||||
}
|
||||
}
|
||||
|
||||
async function load(module, imports) {
|
||||
if (typeof Response === 'function' && module instanceof Response) {
|
||||
|
||||
if (typeof WebAssembly.instantiateStreaming === 'function') {
|
||||
try {
|
||||
return await WebAssembly.instantiateStreaming(module, imports);
|
||||
@@ -80,7 +79,6 @@ async function load(module, imports) {
|
||||
return await WebAssembly.instantiate(bytes, imports);
|
||||
|
||||
} else {
|
||||
|
||||
const instance = await WebAssembly.instantiate(module, imports);
|
||||
|
||||
if (instance instanceof WebAssembly.Instance) {
|
||||
@@ -94,7 +92,7 @@ async function load(module, imports) {
|
||||
|
||||
async function init(input) {
|
||||
if (typeof input === 'undefined') {
|
||||
input = import.meta.url.replace(/\.js$/, '_bg.wasm');
|
||||
input = new URL('squoosh_oxipng_bg.wasm', import.meta.url);
|
||||
}
|
||||
const imports = {};
|
||||
imports.wbg = {};
|
||||
@@ -106,6 +104,8 @@ async function init(input) {
|
||||
input = fetch(input);
|
||||
}
|
||||
|
||||
|
||||
|
||||
const { instance, module } = await load(await input, imports);
|
||||
|
||||
wasm = instance.exports;
|
||||
|
||||
Binary file not shown.
3
codecs/oxipng/pkg/squoosh_oxipng_bg.wasm.d.ts
generated
vendored
3
codecs/oxipng/pkg/squoosh_oxipng_bg.wasm.d.ts
generated
vendored
@@ -1,6 +1,7 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
export const memory: WebAssembly.Memory;
|
||||
export function optimise(a: number, b: number, c: number, d: number): void;
|
||||
export function optimise(a: number, b: number, c: number, d: number, e: number): void;
|
||||
export function __wbindgen_add_to_stack_pointer(a: number): number;
|
||||
export function __wbindgen_malloc(a: number): number;
|
||||
export function __wbindgen_free(a: number, b: number): void;
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
#[cfg(feature = "parallel")]
|
||||
pub use wasm_bindgen_rayon::init_thread_pool;
|
||||
|
||||
use oxipng::AlphaOptim;
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
#[cfg(feature = "parallel")]
|
||||
pub mod parallel;
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub fn optimise(data: &[u8], level: u8) -> Vec<u8> {
|
||||
pub fn optimise(data: &[u8], level: u8, interlace: bool) -> Vec<u8> {
|
||||
let mut options = oxipng::Options::from_preset(level);
|
||||
options.alphas.insert(AlphaOptim::Black);
|
||||
options.alphas.insert(AlphaOptim::White);
|
||||
@@ -13,6 +13,7 @@ pub fn optimise(data: &[u8], level: u8) -> Vec<u8> {
|
||||
options.alphas.insert(AlphaOptim::Down);
|
||||
options.alphas.insert(AlphaOptim::Left);
|
||||
options.alphas.insert(AlphaOptim::Right);
|
||||
options.interlace = Some(if interlace { 1 } else { 0 });
|
||||
|
||||
options.deflate = oxipng::Deflaters::Libdeflater;
|
||||
oxipng::optimize_from_memory(data, &options).unwrap_throw()
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
use crossbeam_channel::{bounded, Receiver, Sender};
|
||||
use once_cell::sync::OnceCell;
|
||||
use wasm_bindgen::prelude::*;
|
||||
use wasm_bindgen::JsValue;
|
||||
|
||||
#[wasm_bindgen]
|
||||
extern "C" {
|
||||
#[wasm_bindgen(js_namespace = Array, js_name = of)]
|
||||
fn array_of_2(a: JsValue, b: JsValue) -> JsValue;
|
||||
}
|
||||
|
||||
// This is one of the parts that work around Chromium incorrectly implementing postMessage:
|
||||
// https://bugs.chromium.org/p/chromium/issues/detail?id=1075645
|
||||
//
|
||||
// rayon::ThreadPoolBuilder (used below) executes spawn handler to populate the worker pool,
|
||||
// and then blocks the current thread until each worker unblocks its (opaque) lock.
|
||||
//
|
||||
// Normally, we could use postMessage directly inside the spawn handler to
|
||||
// post module + memory + threadPtr to each worker, and the block the current thread.
|
||||
//
|
||||
// However, that bug means that postMessage is currently delayed until the next event loop,
|
||||
// which will never spin since we block the current thread, and so the other workers will
|
||||
// never be able to unblock us.
|
||||
//
|
||||
// To work around this problem, we:
|
||||
// 1) Expose `worker_initializer` that returns module + memory pair (without threadPtr)
|
||||
// that workers can be initialised with to become native threads.
|
||||
// JavaScript can postMessage this pair in advance, and asynchronously wait for workers
|
||||
// to acknowledge the receipt.
|
||||
// 2) Create a global communication channel on the Rust side using crossbeam.
|
||||
// It will be used to send threadPtr to the pre-initialised workers
|
||||
// instead of postMessage.
|
||||
// 3) Provide a separate `start_main_thread` that expects all workers to be ready,
|
||||
// and just uses the provided channel to send `threadPtr`s using the
|
||||
// shared memory and blocks the current thread until they're all grabbed.
|
||||
// 4) Provide a `worker_initializer` that is expected to be invoked from various workers,
|
||||
// reads one `threadPtr` from the shared channel and starts running it.
|
||||
static CHANNEL: OnceCell<(Sender<rayon::ThreadBuilder>, Receiver<rayon::ThreadBuilder>)> =
|
||||
OnceCell::new();
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub fn worker_initializer(num: usize) -> JsValue {
|
||||
CHANNEL.get_or_init(|| bounded(num));
|
||||
array_of_2(wasm_bindgen::module(), wasm_bindgen::memory())
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub fn start_main_thread() {
|
||||
let (sender, _) = CHANNEL.get().unwrap();
|
||||
|
||||
rayon::ThreadPoolBuilder::new()
|
||||
.num_threads(sender.capacity().unwrap())
|
||||
.spawn_handler(|thread| Ok(sender.send(thread).unwrap_throw()))
|
||||
.build_global()
|
||||
.unwrap_throw()
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub fn start_worker_thread() {
|
||||
let (_, receiver) = CHANNEL.get().unwrap();
|
||||
receiver.recv().unwrap_throw().run()
|
||||
}
|
||||
@@ -137,10 +137,24 @@ export default function (inputOptions, outputOptions, resolveFileUrl) {
|
||||
const dependencies = getDependencies(clientOutput, clientEntry);
|
||||
|
||||
if (property.startsWith(allSrcPlaceholder)) {
|
||||
const depCodes = dependencies.map(
|
||||
(name) => clientOutput.find((item) => item.fileName === name).code,
|
||||
);
|
||||
return JSON.stringify([clientEntry.code, ...depCodes].join(';'));
|
||||
const allModules = [
|
||||
clientEntry,
|
||||
...dependencies.map((name) =>
|
||||
clientOutput.find((item) => item.fileName === name),
|
||||
),
|
||||
];
|
||||
|
||||
const inlineDefines = [
|
||||
...allModules.map(
|
||||
(item) =>
|
||||
`self.nextDefineUri=location.origin+${resolveFileUrl(item)};${
|
||||
item.code
|
||||
}`,
|
||||
),
|
||||
'self.nextDefineUri=""',
|
||||
];
|
||||
|
||||
return JSON.stringify(inlineDefines.join(''));
|
||||
}
|
||||
|
||||
return (
|
||||
|
||||
@@ -48,7 +48,7 @@ const appendCssSource = `
|
||||
}
|
||||
`;
|
||||
|
||||
export default function (resolveFileUrl) {
|
||||
export default function () {
|
||||
/** @type {string[]} */
|
||||
let emittedCSSIds;
|
||||
/** @type {Map<string, string>} */
|
||||
|
||||
111
lib/omt.ejs
111
lib/omt.ejs
@@ -1,5 +1,5 @@
|
||||
/**
|
||||
* Copyright 2020 Google Inc. All Rights Reserved.
|
||||
* Copyright 2021 Google Inc. All Rights Reserved.
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
@@ -13,80 +13,63 @@
|
||||
|
||||
// If the loader is already loaded, just stop.
|
||||
if (!self.<%- amdFunctionName %>) {
|
||||
const singleRequire = async name => {
|
||||
if (name === 'require') return require;
|
||||
let url;
|
||||
if (name.startsWith(location.origin)) {
|
||||
url = name.slice(location.origin.length);
|
||||
} else {
|
||||
url = name.slice(1) + '.js';
|
||||
}
|
||||
if (!url.startsWith('/c/')) {
|
||||
url = '/c' + url;
|
||||
}
|
||||
name = './static' + url;
|
||||
if (registry[name]) return registry[name];
|
||||
let registry = {};
|
||||
|
||||
if (!registry[name]) {
|
||||
const singleRequire = (uri, parentUri) => {
|
||||
uri = uri.startsWith(location.origin) ? uri : new URL(uri + ".js", parentUri).href;
|
||||
return registry[uri] || (
|
||||
<% if (useEval) { %>
|
||||
const text = await fetch(url).then(resp => resp.text());
|
||||
eval(text);
|
||||
fetch(uri)
|
||||
.then(resp => resp.text())
|
||||
.then(code => {
|
||||
self.nextDefineUri = uri;
|
||||
eval(code);
|
||||
})
|
||||
<% } else { %>
|
||||
if ("document" in self) {
|
||||
await new Promise(resolve => {
|
||||
new Promise(resolve => {
|
||||
if ("document" in self) {
|
||||
const script = document.createElement("script");
|
||||
script.src = url;
|
||||
document.head.appendChild(script);
|
||||
script.src = uri;
|
||||
script.onload = resolve;
|
||||
});
|
||||
} else {
|
||||
importScripts(url);
|
||||
}
|
||||
document.head.appendChild(script);
|
||||
} else {
|
||||
self.nextDefineUri = uri;
|
||||
importScripts(uri);
|
||||
resolve();
|
||||
}
|
||||
})
|
||||
<% } %>
|
||||
}
|
||||
if (!registry[name]) {
|
||||
throw new Error(`Module ${name} didn’t register its module`);
|
||||
}
|
||||
return registry[name];
|
||||
.then(() => {
|
||||
let promise = registry[uri];
|
||||
if (!promise) {
|
||||
throw new Error(`Module ${uri} didn’t register its module`);
|
||||
}
|
||||
return promise;
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
const require = (names, resolve) => {
|
||||
Promise.all(names.map(singleRequire))
|
||||
.then(modules => resolve(modules.length === 1 ? modules[0] : modules));
|
||||
};
|
||||
|
||||
const registry = {
|
||||
require: Promise.resolve(require)
|
||||
};
|
||||
|
||||
self.<%- amdFunctionName %> = (moduleName, depsNames, factory) => {
|
||||
if (registry[moduleName]) {
|
||||
self.<%- amdFunctionName %> = (depsNames, factory) => {
|
||||
const uri = self.nextDefineUri || ("document" in self ? document.currentScript.src : "") || location.href;
|
||||
if (registry[uri]) {
|
||||
// Module is already loading or loaded.
|
||||
return;
|
||||
}
|
||||
registry[moduleName] = Promise.resolve().then(() => {
|
||||
let exports = {};
|
||||
const module = {
|
||||
uri: location.origin + moduleName.slice(1)
|
||||
};
|
||||
return Promise.all(
|
||||
depsNames.map(depName => {
|
||||
switch(depName) {
|
||||
case "exports":
|
||||
return exports;
|
||||
case "module":
|
||||
return module;
|
||||
default:
|
||||
return singleRequire(depName);
|
||||
}
|
||||
})
|
||||
).then(deps => {
|
||||
const facValue = factory(...deps);
|
||||
if (!exports.default) {
|
||||
exports.default = facValue;
|
||||
}
|
||||
return exports;
|
||||
});
|
||||
let exports = {};
|
||||
const require = depUri => singleRequire(depUri, uri);
|
||||
const specialDeps = {
|
||||
module: { uri },
|
||||
exports,
|
||||
require
|
||||
};
|
||||
// Note: Promise.resolve() is necessary to delay loading until all the
|
||||
// `define`s on the current page had a chance to execute first.
|
||||
// This allows to inline some deps on the main page.
|
||||
registry[uri] = Promise.resolve().then(() => Promise.all(depsNames.map(
|
||||
depName => specialDeps[depName] || require(depName)
|
||||
))).then(deps => {
|
||||
factory(...deps);
|
||||
return exports;
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
3
libsquoosh/.gitignore
vendored
Normal file
3
libsquoosh/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
node_modules
|
||||
build
|
||||
.DS_Store
|
||||
1
libsquoosh/.npmignore
Normal file
1
libsquoosh/.npmignore
Normal file
@@ -0,0 +1 @@
|
||||
node_modules
|
||||
0
libsquoosh/.npmrc
Normal file
0
libsquoosh/.npmrc
Normal file
163
libsquoosh/README.md
Normal file
163
libsquoosh/README.md
Normal file
@@ -0,0 +1,163 @@
|
||||
# libSquoosh
|
||||
|
||||
libSquoosh is an _experimental_ way to run all the codecs you know from the [Squoosh] web app directly inside your own JavaScript program. libSquoosh uses a worker pool to parallelize processing images. This way you can apply the same codec to many images at once.
|
||||
|
||||
libSquoosh is currently not the fastest image compression tool in town and doesn’t aim to be. It is, however, fast enough to compress many images sufficiently quick at once.
|
||||
|
||||
## Installation
|
||||
|
||||
libSquoosh can be installed to your local project with the following command:
|
||||
|
||||
```
|
||||
$ npm install @squoosh/lib
|
||||
```
|
||||
|
||||
You can start using the libSquoosh by adding these lines to the top of your JS program:
|
||||
|
||||
```js
|
||||
import { ImagePool } from '@squoosh/lib';
|
||||
const imagePool = new ImagePool();
|
||||
```
|
||||
|
||||
This will create an image pool with an underlying processing pipeline that you can use to ingest and encode images. The ImagePool constructor takes one argument that defines how many parallel operations it is allowed to run at any given time. By default, this number is set to the amount of CPU cores available in the system it is running on.
|
||||
|
||||
## Ingesting images
|
||||
|
||||
You can ingest a new image like so:
|
||||
|
||||
```js
|
||||
const imagePath = 'path/to/image.png';
|
||||
const image = imagePool.ingestImage(imagePath);
|
||||
```
|
||||
|
||||
The `ingestImage` function can take anything the node [`readFile`][readfile] function can take, uncluding a buffer and `FileHandle`.
|
||||
|
||||
The returned `image` object is a representation of the original image, that you can now preprocess, encode, and extract information about.
|
||||
|
||||
## Preprocessing and encoding images
|
||||
|
||||
When an image has been ingested, you can start preprocessing it and encoding it to other formats. This example will resize the image and then encode it to a `.jpg` and `.jxl` image:
|
||||
|
||||
```js
|
||||
await image.decoded; //Wait until the image is decoded before running preprocessors
|
||||
|
||||
const preprocessOptions: {
|
||||
resize: {
|
||||
enabled: true,
|
||||
width: 100,
|
||||
height: 50,
|
||||
}
|
||||
}
|
||||
await image.preprocess(preprocessOptions);
|
||||
|
||||
const encodeOptions: {
|
||||
mozjpeg: {}, //an empty object means 'use default settings'
|
||||
jxl: {
|
||||
quality: 90,
|
||||
},
|
||||
}
|
||||
await image.encode(encodeOptions);
|
||||
|
||||
```
|
||||
|
||||
The default values for each option can be found in the [`codecs.js`][codecs.js] file under `defaultEncoderOptions`. Every unspecified value will use the default value specified there. _Better documentation is needed here._
|
||||
|
||||
You can run your own code inbetween the different steps, if, for example, you want to change how much the image should be resized based on its original height. (See [Extracting image information](#extracting-image-information) to learn how to get the image dimensions).
|
||||
|
||||
## Closing the ImagePool
|
||||
|
||||
When you have encoded everything you need, it is recommended to close the processing pipeline in the ImagePool. This will not delete the images you have already encoded, but it will prevent you from ingesting and encoding new images.
|
||||
|
||||
Close the ImagePool pipeline with this line:
|
||||
|
||||
```js
|
||||
await imagePool.close();
|
||||
```
|
||||
|
||||
## Writing encoded images to the file system
|
||||
|
||||
When you have encoded an image, you normally want to write it to a file.
|
||||
|
||||
This example takes an image that has been encoded as a `jpg` and writes it to a file:
|
||||
|
||||
```js
|
||||
const rawEncodedImage = (await image.encodedWidth.mozjpeg).binary;
|
||||
|
||||
fs.writeFile('/path/to/new/image.jpg', rawEncodedImage);
|
||||
```
|
||||
|
||||
This example iterates through all encoded versions of the image and writes them to a specific path:
|
||||
|
||||
```js
|
||||
const newImagePath = '/path/to/image.'; //extension is added automatically
|
||||
|
||||
for (const encodedImage of Object.values(image.encodedWith)) {
|
||||
fs.writeFile(
|
||||
newImagePath + (await encodedImage).extension,
|
||||
(await encodedImage).binary,
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## Extracting image information
|
||||
|
||||
Information about a decoded image is available at `Image.decoded`. It looks something like this:
|
||||
|
||||
```js
|
||||
console.log(await image.decoded);
|
||||
// Returns:
|
||||
{
|
||||
bitmap: {
|
||||
data: Uint8ClampedArray(47736584) [
|
||||
225, 228, 237, 255, 225, 228, 237, 255, 225, 228, 237, 255,
|
||||
225, 228, 237, 255, 225, 228, 237, 255, 225, 228, 237, 255,
|
||||
225, 228, 237, 255,
|
||||
... //the entire raw image
|
||||
],
|
||||
width: 4606, //pixels
|
||||
height: 2591 //pixels
|
||||
},
|
||||
size: 2467795 //bytes
|
||||
}
|
||||
```
|
||||
|
||||
Information about an encoded image can be found at `Image.encodedWith[encoderName]`. It looks something like this:
|
||||
|
||||
```js
|
||||
console.log(await image.encodedWith.jxl);
|
||||
// Returns:
|
||||
{
|
||||
optionsUsed: {
|
||||
quality: 75,
|
||||
baseline: false,
|
||||
arithmetic: false,
|
||||
progressive: true,
|
||||
... //all the possible options for this encoder
|
||||
},
|
||||
binary: Uint8Array(1266975) [
|
||||
1, 0, 0, 1, 0, 1, 0, 0, 255, 219, 0, 132,
|
||||
113, 119, 156, 156, 209, 1, 8, 8, 8, 8, 9, 8,
|
||||
9, 10, 10, 9,
|
||||
... //the entire raw encoded image
|
||||
],
|
||||
extension: 'jxl',
|
||||
size: 1266975 //bytes
|
||||
}
|
||||
```
|
||||
|
||||
## Auto optimizer
|
||||
|
||||
libSquoosh has an _experimental_ auto optimizer that compresses an image as much as possible, trying to hit a specific [Butteraugli] target value. The higher the Butteraugli target value, the more artifacts can be introduced.
|
||||
|
||||
You can make use of the auto optimizer by using “auto” as the config object.
|
||||
|
||||
```js
|
||||
const encodeOptions: {
|
||||
mozjpeg: 'auto',
|
||||
}
|
||||
```
|
||||
|
||||
[squoosh]: https://squoosh.app
|
||||
[codecs.js]: https://github.com/GoogleChromeLabs/squoosh/blob/dev/libsquoosh/src/codecs.js
|
||||
[butteraugli]: https://github.com/google/butteraugli
|
||||
[readfile]: https://nodejs.org/api/fs.html#fs_fspromises_readfile_path_options
|
||||
@@ -10,11 +10,11 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
import { promises as fs } from "fs";
|
||||
import { basename } from "path";
|
||||
import { promises as fs } from 'fs';
|
||||
import { basename } from 'path';
|
||||
|
||||
const defaultOpts = {
|
||||
prefix: "asset-url"
|
||||
prefix: 'asset-url',
|
||||
};
|
||||
|
||||
export default function assetPlugin(opts) {
|
||||
@@ -23,16 +23,16 @@ export default function assetPlugin(opts) {
|
||||
/** @type {Map<string, Buffer>} */
|
||||
let assetIdToSourceBuffer;
|
||||
|
||||
const prefix = opts.prefix + ":";
|
||||
const prefix = opts.prefix + ':';
|
||||
return {
|
||||
name: "asset-plugin",
|
||||
name: 'asset-plugin',
|
||||
buildStart() {
|
||||
assetIdToSourceBuffer = new Map();
|
||||
},
|
||||
augmentChunkHash(info) {
|
||||
// Get the sources for all assets imported by this chunk.
|
||||
const buffers = Object.keys(info.modules)
|
||||
.map(moduleId => assetIdToSourceBuffer.get(moduleId))
|
||||
.map((moduleId) => assetIdToSourceBuffer.get(moduleId))
|
||||
.filter(Boolean);
|
||||
|
||||
if (buffers.length === 0) return;
|
||||
@@ -56,20 +56,20 @@ export default function assetPlugin(opts) {
|
||||
throw Error(`Cannot find ${realId}`);
|
||||
}
|
||||
// Add an additional .js to the end so it ends up with .js at the end in the _virtual folder.
|
||||
return prefix + resolveResult.id + ".js";
|
||||
return prefix + resolveResult.id + '.js';
|
||||
},
|
||||
async load(id) {
|
||||
if (!id.startsWith(prefix)) return;
|
||||
const realId = id.slice(prefix.length, -".js".length);
|
||||
const realId = id.slice(prefix.length, -'.js'.length);
|
||||
const source = await fs.readFile(realId);
|
||||
assetIdToSourceBuffer.set(id, source);
|
||||
this.addWatchFile(realId);
|
||||
|
||||
return `export default import.meta.ROLLUP_FILE_URL_${this.emitFile({
|
||||
type: "asset",
|
||||
type: 'asset',
|
||||
source,
|
||||
name: basename(realId)
|
||||
name: basename(realId),
|
||||
})}`;
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -5,8 +5,8 @@ export default function autojsonPlugin() {
|
||||
name: 'autojson-plugin',
|
||||
async load(id) {
|
||||
if (id.endsWith('.json') && !id.startsWith('json:')) {
|
||||
return 'export default ' + await fsp.readFile(id, 'utf8');
|
||||
return 'export default ' + (await fsp.readFile(id, 'utf8'));
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
};
|
||||
}
|
||||
1869
libsquoosh/package-lock.json
generated
Normal file
1869
libsquoosh/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
28
libsquoosh/package.json
Normal file
28
libsquoosh/package.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"name": "@squoosh/lib",
|
||||
"version": "0.2.1",
|
||||
"description": "A Node library for Squoosh",
|
||||
"public": true,
|
||||
"main": "/build/index.js",
|
||||
"files": [
|
||||
"/build/*"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "rollup -c"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "Google Chrome Developers <chromium-dev@google.com>",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"web-streams-polyfill": "^3.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.14.0",
|
||||
"@babel/preset-env": "^7.14.0",
|
||||
"@rollup/plugin-babel": "^5.3.0",
|
||||
"@rollup/plugin-commonjs": "^18.0.0",
|
||||
"@rollup/plugin-node-resolve": "^11.2.1",
|
||||
"rollup": "^2.46.0",
|
||||
"rollup-plugin-terser": "^7.0.2"
|
||||
}
|
||||
}
|
||||
@@ -13,9 +13,6 @@ export default {
|
||||
dir: 'build',
|
||||
format: 'cjs',
|
||||
assetFileNames: '[name]-[hash][extname]',
|
||||
// This is needed so the resulting `index.js` can be
|
||||
// executed by `npx`.
|
||||
banner: '#!/usr/bin/env node',
|
||||
},
|
||||
plugins: [
|
||||
resolve(),
|
||||
@@ -27,7 +24,7 @@ export default {
|
||||
babelrc: false,
|
||||
configFile: false,
|
||||
minified: process.env.DEBUG != '',
|
||||
comments: false,
|
||||
comments: true,
|
||||
presets: [
|
||||
[
|
||||
'@babel/preset-env',
|
||||
@@ -41,5 +38,5 @@ export default {
|
||||
],
|
||||
}),
|
||||
],
|
||||
external: builtinModules,
|
||||
external: [...builtinModules, 'web-streams-polyfill'],
|
||||
};
|
||||
@@ -1,7 +1,7 @@
|
||||
import { instantiateEmscriptenWasm } from "./emscripten-utils.js";
|
||||
import { instantiateEmscriptenWasm } from './emscripten-utils.js';
|
||||
|
||||
import visdif from "../../codecs/visdif/visdif.js";
|
||||
import visdifWasm from "asset-url:../../codecs/visdif/visdif.wasm";
|
||||
import visdif from '../../codecs/visdif/visdif.js';
|
||||
import visdifWasm from 'asset-url:../../codecs/visdif/visdif.wasm';
|
||||
|
||||
// `measure` is a (async) function that takes exactly one numeric parameter and
|
||||
// returns a value. The function is assumed to be monotonic (an increase in `parameter`
|
||||
@@ -11,7 +11,7 @@ import visdifWasm from "asset-url:../../codecs/visdif/visdif.wasm";
|
||||
export async function binarySearch(
|
||||
measureGoal,
|
||||
measure,
|
||||
{ min = 0, max = 100, epsilon = 0.1, maxRounds = 8 } = {}
|
||||
{ min = 0, max = 100, epsilon = 0.1, maxRounds = 8 } = {},
|
||||
) {
|
||||
let parameter = (max - min) / 2 + min;
|
||||
let delta = (max - min) / 4;
|
||||
@@ -36,14 +36,14 @@ export async function autoOptimize(
|
||||
bitmapIn,
|
||||
encode,
|
||||
decode,
|
||||
{ butteraugliDistanceGoal = 1.4, ...otherOpts } = {}
|
||||
{ butteraugliDistanceGoal = 1.4, ...otherOpts } = {},
|
||||
) {
|
||||
const { VisDiff } = await instantiateEmscriptenWasm(visdif, visdifWasm);
|
||||
|
||||
const comparator = new VisDiff(
|
||||
bitmapIn.data,
|
||||
bitmapIn.width,
|
||||
bitmapIn.height
|
||||
bitmapIn.height,
|
||||
);
|
||||
|
||||
let bitmapOut;
|
||||
@@ -53,18 +53,18 @@ export async function autoOptimize(
|
||||
// increase the metric value. So multipliy Butteraugli values by -1.
|
||||
const { parameter } = await binarySearch(
|
||||
-1 * butteraugliDistanceGoal,
|
||||
async quality => {
|
||||
async (quality) => {
|
||||
binaryOut = await encode(bitmapIn, quality);
|
||||
bitmapOut = await decode(binaryOut);
|
||||
return -1 * comparator.distance(bitmapOut.data);
|
||||
},
|
||||
otherOpts
|
||||
otherOpts,
|
||||
);
|
||||
comparator.delete();
|
||||
|
||||
return {
|
||||
bitmap: bitmapOut,
|
||||
binary: binaryOut,
|
||||
quality: parameter
|
||||
quality: parameter,
|
||||
};
|
||||
}
|
||||
@@ -344,7 +344,11 @@ export const codecs = {
|
||||
await oxipngPromise;
|
||||
return {
|
||||
encode: (buffer, width, height, opts) => {
|
||||
const simplePng = pngEncDec.encode(new Uint8Array(buffer), width, height);
|
||||
const simplePng = pngEncDec.encode(
|
||||
new Uint8Array(buffer),
|
||||
width,
|
||||
height,
|
||||
);
|
||||
return oxipng.optimise(simplePng, opts.level);
|
||||
},
|
||||
};
|
||||
209
libsquoosh/src/index.js
Normal file
209
libsquoosh/src/index.js
Normal file
@@ -0,0 +1,209 @@
|
||||
import { isMainThread } from 'worker_threads';
|
||||
import { cpus } from 'os';
|
||||
import { promises as fsp } from 'fs';
|
||||
|
||||
import { codecs as encoders, preprocessors } from './codecs.js';
|
||||
import WorkerPool from './worker_pool.js';
|
||||
import { autoOptimize } from './auto-optimizer.js';
|
||||
|
||||
export { ImagePool, encoders, preprocessors };
|
||||
|
||||
async function decodeFile({ file }) {
|
||||
const buffer = await fsp.readFile(file);
|
||||
const firstChunk = buffer.slice(0, 16);
|
||||
const firstChunkString = Array.from(firstChunk)
|
||||
.map((v) => String.fromCodePoint(v))
|
||||
.join('');
|
||||
const key = Object.entries(encoders).find(([name, { detectors }]) =>
|
||||
detectors.some((detector) => detector.exec(firstChunkString)),
|
||||
)?.[0];
|
||||
if (!key) {
|
||||
throw Error(`${file} has an unsupported format`);
|
||||
}
|
||||
const rgba = (await encoders[key].dec()).decode(new Uint8Array(buffer));
|
||||
return {
|
||||
bitmap: rgba,
|
||||
size: buffer.length,
|
||||
};
|
||||
}
|
||||
|
||||
async function preprocessImage({ preprocessorName, options, image }) {
|
||||
const preprocessor = await preprocessors[preprocessorName].instantiate();
|
||||
image.bitmap = await preprocessor(
|
||||
image.bitmap.data,
|
||||
image.bitmap.width,
|
||||
image.bitmap.height,
|
||||
options,
|
||||
);
|
||||
return image;
|
||||
}
|
||||
|
||||
async function encodeImage({
|
||||
bitmap: bitmapIn,
|
||||
encName,
|
||||
encConfig,
|
||||
optimizerButteraugliTarget,
|
||||
maxOptimizerRounds,
|
||||
}) {
|
||||
let binary;
|
||||
let optionsUsed = encConfig;
|
||||
const encoder = await encoders[encName].enc();
|
||||
if (encConfig === 'auto') {
|
||||
const optionToOptimize = encoders[encName].autoOptimize.option;
|
||||
const decoder = await encoders[encName].dec();
|
||||
const encode = (bitmapIn, quality) =>
|
||||
encoder.encode(
|
||||
bitmapIn.data,
|
||||
bitmapIn.width,
|
||||
bitmapIn.height,
|
||||
Object.assign({}, encoders[encName].defaultEncoderOptions, {
|
||||
[optionToOptimize]: quality,
|
||||
}),
|
||||
);
|
||||
const decode = (binary) => decoder.decode(binary);
|
||||
const { binary: optimizedBinary, quality } = await autoOptimize(
|
||||
bitmapIn,
|
||||
encode,
|
||||
decode,
|
||||
{
|
||||
min: encoders[encName].autoOptimize.min,
|
||||
max: encoders[encName].autoOptimize.max,
|
||||
butteraugliDistanceGoal: optimizerButteraugliTarget,
|
||||
maxRounds: maxOptimizerRounds,
|
||||
},
|
||||
);
|
||||
binary = optimizedBinary;
|
||||
optionsUsed = {
|
||||
// 5 significant digits is enough
|
||||
[optionToOptimize]: Math.round(quality * 10000) / 10000,
|
||||
};
|
||||
} else {
|
||||
binary = encoder.encode(
|
||||
bitmapIn.data.buffer,
|
||||
bitmapIn.width,
|
||||
bitmapIn.height,
|
||||
encConfig,
|
||||
);
|
||||
}
|
||||
return {
|
||||
optionsUsed,
|
||||
binary,
|
||||
extension: encoders[encName].extension,
|
||||
size: binary.length,
|
||||
};
|
||||
}
|
||||
|
||||
// both decoding and encoding go through the worker pool
|
||||
function handleJob(params) {
|
||||
const { operation } = params;
|
||||
switch (operation) {
|
||||
case 'encode':
|
||||
return encodeImage(params);
|
||||
case 'decode':
|
||||
return decodeFile(params);
|
||||
case 'preprocess':
|
||||
return preprocessImage(params);
|
||||
default:
|
||||
throw Error(`Invalid job "${operation}"`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents an ingested image.
|
||||
*/
|
||||
class Image {
|
||||
constructor(workerPool, file) {
|
||||
this.workerPool = workerPool;
|
||||
this.decoded = workerPool.dispatchJob({ operation: 'decode', file });
|
||||
this.encodedWith = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Define one or several preprocessors to use on the image.
|
||||
* @param {object} preprocessOptions - An object with preprocessors to use, and their settings.
|
||||
* @returns {Promise<undefined>} - A promise that resolves when all preprocessors have completed their work.
|
||||
*/
|
||||
async preprocess(preprocessOptions = {}) {
|
||||
for (const [name, options] of Object.entries(preprocessOptions)) {
|
||||
if (!Object.keys(preprocessors).includes(name)) {
|
||||
throw Error(`Invalid preprocessor "${name}"`);
|
||||
}
|
||||
const preprocessorOptions = Object.assign(
|
||||
{},
|
||||
preprocessors[name].defaultOptions,
|
||||
options,
|
||||
);
|
||||
this.decoded = this.workerPool.dispatchJob({
|
||||
operation: 'preprocess',
|
||||
preprocessorName: name,
|
||||
image: await this.decoded,
|
||||
options: preprocessorOptions,
|
||||
});
|
||||
await this.decoded;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Define one or several encoders to use on the image.
|
||||
* @param {object} encodeOptions - An object with encoders to use, and their settings.
|
||||
* @returns {Promise<undefined>} - A promise that resolves when the image has been encoded with all the specified encoders.
|
||||
*/
|
||||
async encode(encodeOptions = {}) {
|
||||
const { bitmap } = await this.decoded;
|
||||
for (const [encName, options] of Object.entries(encodeOptions)) {
|
||||
if (!Object.keys(encoders).includes(encName)) {
|
||||
continue;
|
||||
}
|
||||
const encRef = encoders[encName];
|
||||
const encConfig =
|
||||
typeof options === 'string'
|
||||
? options
|
||||
: Object.assign({}, encRef.defaultEncoderOptions, options);
|
||||
this.encodedWith[encName] = this.workerPool.dispatchJob({
|
||||
operation: 'encode',
|
||||
bitmap,
|
||||
encName,
|
||||
encConfig,
|
||||
optimizerButteraugliTarget: Number(
|
||||
encodeOptions.optimizerButteraugliTarget,
|
||||
),
|
||||
maxOptimizerRounds: Number(encodeOptions.maxOptimizerRounds),
|
||||
});
|
||||
}
|
||||
await Promise.all(Object.values(this.encodedWith));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A pool where images can be ingested and squooshed.
|
||||
*/
|
||||
class ImagePool {
|
||||
/**
|
||||
* Create a new pool.
|
||||
* @param {number} [threads] - Number of concurrent image processes to run in the pool. Defaults to the number of CPU cores in the system.
|
||||
*/
|
||||
constructor(threads) {
|
||||
this.workerPool = new WorkerPool(threads || cpus().length, __filename);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ingest an image into the image pool.
|
||||
* @param {string | Buffer | URL | object} image - The image or path to the image that should be ingested and decoded.
|
||||
* @returns {Image} - A custom class reference to the decoded image.
|
||||
*/
|
||||
ingestImage(image) {
|
||||
return new Image(this.workerPool, image);
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the underlying image processing pipeline. The already processed images will still be there, but no new processing can start.
|
||||
* @returns {Promise<undefined>} - A promise that resolves when the underlying pipeline has closed.
|
||||
*/
|
||||
async close() {
|
||||
await this.workerPool.join();
|
||||
}
|
||||
}
|
||||
|
||||
if (!isMainThread) {
|
||||
WorkerPool.useThisThreadAsWorker(handleJob);
|
||||
}
|
||||
110
package-lock.json
generated
110
package-lock.json
generated
@@ -179,13 +179,25 @@
|
||||
}
|
||||
},
|
||||
"@surma/rollup-plugin-off-main-thread": {
|
||||
"version": "1.4.2",
|
||||
"resolved": "https://registry.npmjs.org/@surma/rollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-1.4.2.tgz",
|
||||
"integrity": "sha512-yBMPqmd1yEJo/280PAMkychuaALyQ9Lkb5q1ck3mjJrFuEobIfhnQ4J3mbvBoISmR3SWMWV+cGB/I0lCQee79A==",
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/@surma/rollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-2.2.1.tgz",
|
||||
"integrity": "sha512-7OU8wfyv18YPWVmecg2/0Jh+pm3lQbvPhIWHd1YQpoxPKPW/vsDNGBaCnMKsZbz29RjgCoXKugAjyagPncgdEw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"ejs": "^2.6.1",
|
||||
"ejs": "^3.1.6",
|
||||
"json5": "^2.2.0",
|
||||
"magic-string": "^0.25.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"json5": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz",
|
||||
"integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"minimist": "^1.2.5"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"@types/color-name": {
|
||||
@@ -405,6 +417,12 @@
|
||||
"integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==",
|
||||
"dev": true
|
||||
},
|
||||
"async": {
|
||||
"version": "0.9.2",
|
||||
"resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz",
|
||||
"integrity": "sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0=",
|
||||
"dev": true
|
||||
},
|
||||
"balanced-match": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
|
||||
@@ -1597,10 +1615,13 @@
|
||||
}
|
||||
},
|
||||
"ejs": {
|
||||
"version": "2.7.4",
|
||||
"resolved": "https://registry.npmjs.org/ejs/-/ejs-2.7.4.tgz",
|
||||
"integrity": "sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA==",
|
||||
"dev": true
|
||||
"version": "3.1.6",
|
||||
"resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.6.tgz",
|
||||
"integrity": "sha512-9lt9Zse4hPucPkoP7FHDF0LQAlGyF9JVpnClFLFH3aSSbxmyoqINRpp/9wePWJTUl4KOQwRL72Iw3InHPDkoGw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"jake": "^10.6.1"
|
||||
}
|
||||
},
|
||||
"electron-to-chromium": {
|
||||
"version": "1.3.538",
|
||||
@@ -1797,6 +1818,15 @@
|
||||
"integrity": "sha512-na2cwntTVgMsR+BZ2YBr/XQk941DKDw2LJKbV7g6TRdGBQ3rx8V53oEviG8zPWoBOySwK9w/SlZ/gb/F/48I8A==",
|
||||
"dev": true
|
||||
},
|
||||
"filelist": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.2.tgz",
|
||||
"integrity": "sha512-z7O0IS8Plc39rTCq6i6iHxk43duYOn8uFJiWSewIq0Bww1RNybVHSCjahmcC87ZqAm4OTvFzlzeGu3XAzG1ctQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"minimatch": "^3.0.4"
|
||||
}
|
||||
},
|
||||
"fill-range": {
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
|
||||
@@ -2306,6 +2336,70 @@
|
||||
"integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=",
|
||||
"dev": true
|
||||
},
|
||||
"jake": {
|
||||
"version": "10.8.2",
|
||||
"resolved": "https://registry.npmjs.org/jake/-/jake-10.8.2.tgz",
|
||||
"integrity": "sha512-eLpKyrfG3mzvGE2Du8VoPbeSkRry093+tyNjdYaBbJS9v17knImYGNXQCUV0gLxQtF82m3E8iRb/wdSQZLoq7A==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"async": "0.9.x",
|
||||
"chalk": "^2.4.2",
|
||||
"filelist": "^1.0.1",
|
||||
"minimatch": "^3.0.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"ansi-styles": {
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
|
||||
"integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"color-convert": "^1.9.0"
|
||||
}
|
||||
},
|
||||
"chalk": {
|
||||
"version": "2.4.2",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
|
||||
"integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"ansi-styles": "^3.2.1",
|
||||
"escape-string-regexp": "^1.0.5",
|
||||
"supports-color": "^5.3.0"
|
||||
}
|
||||
},
|
||||
"color-convert": {
|
||||
"version": "1.9.3",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
|
||||
"integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"color-name": "1.1.3"
|
||||
}
|
||||
},
|
||||
"color-name": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
|
||||
"integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=",
|
||||
"dev": true
|
||||
},
|
||||
"has-flag": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
|
||||
"integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=",
|
||||
"dev": true
|
||||
},
|
||||
"supports-color": {
|
||||
"version": "5.5.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
|
||||
"integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"has-flag": "^3.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"jest-worker": {
|
||||
"version": "26.3.0",
|
||||
"resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-26.3.0.tgz",
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"@rollup/plugin-commonjs": "^17.0.0",
|
||||
"@rollup/plugin-node-resolve": "^11.1.0",
|
||||
"@rollup/plugin-replace": "^2.3.4",
|
||||
"@surma/rollup-plugin-off-main-thread": "^1.4.2",
|
||||
"@surma/rollup-plugin-off-main-thread": "^2.2.1",
|
||||
"@types/dedent": "^0.7.0",
|
||||
"@types/mime-types": "^2.1.0",
|
||||
"@types/node": "^14.14.7",
|
||||
|
||||
@@ -37,13 +37,6 @@ function resolveFileUrl({ fileName }) {
|
||||
return JSON.stringify(fileName.replace(/^static\//, '/'));
|
||||
}
|
||||
|
||||
// With AMD output, Rollup always uses document.baseURI, which breaks in workers.
|
||||
// This fixes it:
|
||||
function resolveImportMeta(property, { chunkId }) {
|
||||
if (property !== 'url') return;
|
||||
return `new URL(${resolveFileUrl({ fileName: chunkId })}, location).href`;
|
||||
}
|
||||
|
||||
const dir = '.tmp/build';
|
||||
const staticPath = 'static/c/[name]-[hash][extname]';
|
||||
const jsPath = staticPath.replace('[extname]', '.js');
|
||||
@@ -62,6 +55,7 @@ export default async function ({ watch }) {
|
||||
path.join(__dirname, 'lib', 'omt.ejs'),
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
await del('.tmp/build');
|
||||
|
||||
const isProduction = !watch;
|
||||
@@ -83,7 +77,7 @@ export default async function ({ watch }) {
|
||||
]),
|
||||
urlPlugin(),
|
||||
dataURLPlugin(),
|
||||
cssPlugin(resolveFileUrl),
|
||||
cssPlugin(),
|
||||
];
|
||||
|
||||
return {
|
||||
@@ -105,12 +99,12 @@ export default async function ({ watch }) {
|
||||
},
|
||||
preserveModules: true,
|
||||
plugins: [
|
||||
{ resolveFileUrl, resolveImportMeta },
|
||||
{ resolveFileUrl },
|
||||
clientBundlePlugin(
|
||||
{
|
||||
external: ['worker_threads'],
|
||||
plugins: [
|
||||
{ resolveFileUrl, resolveImportMeta },
|
||||
{ resolveFileUrl },
|
||||
OMT({ loader: await omtLoaderPromise }),
|
||||
serviceWorkerPlugin({
|
||||
output: 'static/serviceworker.js',
|
||||
|
||||
@@ -32,7 +32,6 @@ import Results from './Results';
|
||||
import WorkerBridge from '../worker-bridge';
|
||||
import { resize } from 'features/processors/resize/client';
|
||||
import type SnackBarElement from 'shared/custom-els/snack-bar';
|
||||
import { Arrow, ExpandIcon } from '../icons';
|
||||
import { generateCliInvocation } from '../util/cli';
|
||||
|
||||
export type OutputType = EncoderType | 'identity';
|
||||
@@ -70,7 +69,6 @@ interface State {
|
||||
sides: [Side, Side];
|
||||
/** Source image load */
|
||||
loading: boolean;
|
||||
error?: string;
|
||||
mobileView: boolean;
|
||||
preprocessorState: PreprocessorState;
|
||||
encodedPreprocessorState?: PreprocessorState;
|
||||
@@ -109,9 +107,9 @@ async function decodeImage(
|
||||
if (mimeType === 'image/webp2') {
|
||||
return await workerBridge.wp2Decode(signal, blob);
|
||||
}
|
||||
// If it's not one of those types, fall through and try built-in decoding for a laugh.
|
||||
}
|
||||
return await abortable(signal, builtinDecode(blob));
|
||||
// Otherwise fall through and try built-in decoding for a laugh.
|
||||
return await builtinDecode(signal, blob, mimeType);
|
||||
} catch (err) {
|
||||
if (err.name === 'AbortError') throw err;
|
||||
console.log(err);
|
||||
@@ -259,11 +257,6 @@ function processorStateEquivalent(a: ProcessorState, b: ProcessorState) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// These are only used in the mobile view
|
||||
const resultTitles = ['Top', 'Bottom'] as const;
|
||||
// These are only used in the desktop view
|
||||
const buttonPositions = ['download-left', 'download-right'] as const;
|
||||
|
||||
const originalDocumentTitle = document.title;
|
||||
|
||||
function updateDocumentTitle(filename: string = ''): void {
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import * as WebCodecs from '../util/web-codecs';
|
||||
|
||||
/**
|
||||
* Compare two objects, returning a boolean indicating if
|
||||
* they have the same properties and strictly equal values.
|
||||
@@ -192,17 +195,35 @@ interface DrawableToImageDataOptions {
|
||||
sh?: number;
|
||||
}
|
||||
|
||||
function getWidth(
|
||||
drawable: ImageBitmap | HTMLImageElement | VideoFrame,
|
||||
): number {
|
||||
if ('displayWidth' in drawable) {
|
||||
return drawable.displayWidth;
|
||||
}
|
||||
return drawable.width;
|
||||
}
|
||||
|
||||
function getHeight(
|
||||
drawable: ImageBitmap | HTMLImageElement | VideoFrame,
|
||||
): number {
|
||||
if ('displayHeight' in drawable) {
|
||||
return drawable.displayHeight;
|
||||
}
|
||||
return drawable.height;
|
||||
}
|
||||
|
||||
export function drawableToImageData(
|
||||
drawable: ImageBitmap | HTMLImageElement,
|
||||
drawable: ImageBitmap | HTMLImageElement | VideoFrame,
|
||||
opts: DrawableToImageDataOptions = {},
|
||||
): ImageData {
|
||||
const {
|
||||
width = drawable.width,
|
||||
height = drawable.height,
|
||||
width = getWidth(drawable),
|
||||
height = getHeight(drawable),
|
||||
sx = 0,
|
||||
sy = 0,
|
||||
sw = drawable.width,
|
||||
sh = drawable.height,
|
||||
sw = getWidth(drawable),
|
||||
sh = getHeight(drawable),
|
||||
} = opts;
|
||||
|
||||
// Make canvas same size as image
|
||||
@@ -216,13 +237,25 @@ export function drawableToImageData(
|
||||
return ctx.getImageData(0, 0, width, height);
|
||||
}
|
||||
|
||||
export async function builtinDecode(blob: Blob): Promise<ImageData> {
|
||||
// Prefer createImageBitmap as it's the off-thread option for Firefox.
|
||||
const drawable =
|
||||
'createImageBitmap' in self
|
||||
? await createImageBitmap(blob)
|
||||
: await blobToImg(blob);
|
||||
export async function builtinDecode(
|
||||
signal: AbortSignal,
|
||||
blob: Blob,
|
||||
mimeType: string,
|
||||
): Promise<ImageData> {
|
||||
// If WebCodecs are supported, use that.
|
||||
if (await WebCodecs.isTypeSupported(mimeType)) {
|
||||
assertSignal(signal);
|
||||
try {
|
||||
return await abortable(signal, WebCodecs.decode(blob, mimeType));
|
||||
} catch (e) {}
|
||||
}
|
||||
assertSignal(signal);
|
||||
|
||||
// Prefer createImageBitmap as it's the off-thread option for Firefox.
|
||||
const drawable = await abortable<HTMLImageElement | ImageBitmap>(
|
||||
signal,
|
||||
'createImageBitmap' in self ? createImageBitmap(blob) : blobToImg(blob),
|
||||
);
|
||||
return drawableToImageData(drawable);
|
||||
}
|
||||
|
||||
|
||||
26
src/client/lazy-app/util/web-codecs/index.ts
Normal file
26
src/client/lazy-app/util/web-codecs/index.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { drawableToImageData } from 'client/lazy-app/util';
|
||||
|
||||
const hasImageDecoder = typeof ImageDecoder !== 'undefined';
|
||||
export async function isTypeSupported(mimeType: string): Promise<boolean> {
|
||||
if (!hasImageDecoder) {
|
||||
return false;
|
||||
}
|
||||
return ImageDecoder.isTypeSupported(mimeType);
|
||||
}
|
||||
export async function decode(
|
||||
blob: Blob | File,
|
||||
mimeType: string,
|
||||
): Promise<ImageData> {
|
||||
if (!hasImageDecoder) {
|
||||
throw Error(
|
||||
`This browser does not support ImageDecoder. This function should not have been called.`,
|
||||
);
|
||||
}
|
||||
const decoder = new ImageDecoder({
|
||||
type: mimeType,
|
||||
// Non-obvious way to turn an Blob into a ReadableStream
|
||||
data: new Response(blob).body!,
|
||||
});
|
||||
const { image } = await decoder.decode();
|
||||
return drawableToImageData(image);
|
||||
}
|
||||
60
src/client/lazy-app/util/web-codecs/missing-types.d.ts
vendored
Normal file
60
src/client/lazy-app/util/web-codecs/missing-types.d.ts
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
interface ImageDecoderInit {
|
||||
type: string;
|
||||
data: BufferSource | ReadableStream;
|
||||
premultiplyAlpha?: PremultiplyAlpha;
|
||||
colorSpaceConversion?: ColorSpaceConversion;
|
||||
desiredWidth?: number;
|
||||
desiredHeight?: number;
|
||||
preferAnimation?: boolean;
|
||||
}
|
||||
|
||||
interface ImageDecodeOptions {
|
||||
frameIndex: number;
|
||||
completeFramesOnly: boolean;
|
||||
}
|
||||
|
||||
interface ImageDecodeResult {
|
||||
image: VideoFrame;
|
||||
complete: boolean;
|
||||
}
|
||||
|
||||
// I didn’t do all the types because the class is kinda complex.
|
||||
// I focused on what we need.
|
||||
// See https://w3c.github.io/webcodecs/#videoframe
|
||||
declare class VideoFrame {
|
||||
displayWidth: number;
|
||||
displayHeight: number;
|
||||
}
|
||||
|
||||
// Add VideoFrame to canvas’ drawImage()
|
||||
interface CanvasDrawImage {
|
||||
drawImage(
|
||||
image: CanvasImageSource | VideoFrame,
|
||||
dx: number,
|
||||
dy: number,
|
||||
): void;
|
||||
drawImage(
|
||||
image: CanvasImageSource | VideoFrame,
|
||||
dx: number,
|
||||
dy: number,
|
||||
dw: number,
|
||||
dh: number,
|
||||
): void;
|
||||
drawImage(
|
||||
image: CanvasImageSource | VideoFrame,
|
||||
sx: number,
|
||||
sy: number,
|
||||
sw: number,
|
||||
sh: number,
|
||||
dx: number,
|
||||
dy: number,
|
||||
dw: number,
|
||||
dh: number,
|
||||
): void;
|
||||
}
|
||||
|
||||
declare class ImageDecoder {
|
||||
static isTypeSupported(type: string): Promise<boolean>;
|
||||
constructor(desc: ImageDecoderInit);
|
||||
decode(opts?: Partial<ImageDecodeOptions>): Promise<ImageDecodeResult>;
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { EncodeOptions, defaultOptions } from '../shared/meta';
|
||||
import { EncodeOptions, defaultOptions, AVIFTune } from '../shared/meta';
|
||||
import type WorkerBridge from 'client/lazy-app/worker-bridge';
|
||||
import { h, Component } from 'preact';
|
||||
import { preventDefault, shallowEqual } from 'client/lazy-app/util';
|
||||
@@ -37,7 +37,7 @@ interface State {
|
||||
sharpness: number;
|
||||
denoiseLevel: number;
|
||||
aqMode: number;
|
||||
tune: 'ssim' | 'psnr';
|
||||
tune: AVIFTune;
|
||||
}
|
||||
|
||||
const maxQuant = 63;
|
||||
@@ -82,7 +82,7 @@ export class Options extends Component<Props, State> {
|
||||
chromaDeltaQ: options.chromaDeltaQ,
|
||||
sharpness: options.sharpness,
|
||||
denoiseLevel: options.denoiseLevel,
|
||||
tune: options.targetSsim ? 'ssim' : 'psnr',
|
||||
tune: options.tune,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@ export class Options extends Component<Props, State> {
|
||||
chromaDeltaQ: optionState.chromaDeltaQ,
|
||||
sharpness: optionState.sharpness,
|
||||
denoiseLevel: optionState.denoiseLevel,
|
||||
targetSsim: optionState.tune === 'ssim',
|
||||
tune: optionState.tune,
|
||||
};
|
||||
|
||||
// Updating options, so we don't recalculate in getDerivedStateFromProps.
|
||||
@@ -268,13 +268,14 @@ export class Options extends Component<Props, State> {
|
||||
</Range>
|
||||
</div>
|
||||
<label class={style.optionTextFirst}>
|
||||
Tune for:
|
||||
Tuning:
|
||||
<Select
|
||||
value={tune}
|
||||
onChange={this._inputChange('tune', 'string')}
|
||||
onChange={this._inputChange('tune', 'number')}
|
||||
>
|
||||
<option value="psnr">PSNR</option>
|
||||
<option value="ssim">SSIM</option>
|
||||
<option value={AVIFTune.auto}>Auto</option>
|
||||
<option value={AVIFTune.psnr}>PSNR</option>
|
||||
<option value={AVIFTune.ssim}>SSIM</option>
|
||||
</Select>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
@@ -10,9 +10,9 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
import type { EncodeOptions } from 'codecs/avif/enc/avif_enc';
|
||||
import { EncodeOptions, AVIFTune } from 'codecs/avif/enc/avif_enc';
|
||||
|
||||
export { EncodeOptions };
|
||||
export { EncodeOptions, AVIFTune };
|
||||
|
||||
export const label = 'AVIF';
|
||||
export const mimeType = 'image/avif';
|
||||
@@ -27,5 +27,5 @@ export const defaultOptions: EncodeOptions = {
|
||||
subsample: 1,
|
||||
chromaDeltaQ: false,
|
||||
sharpness: 0,
|
||||
targetSsim: false,
|
||||
tune: AVIFTune.auto,
|
||||
};
|
||||
|
||||
@@ -14,28 +14,17 @@ import type { JXLModule } from 'codecs/jxl/enc/jxl_enc';
|
||||
import type { EncodeOptions } from '../shared/meta';
|
||||
|
||||
import { initEmscriptenModule } from 'features/worker-utils';
|
||||
import { threads, simd } from 'wasm-feature-detect';
|
||||
import { threads } from 'wasm-feature-detect';
|
||||
|
||||
import wasmUrl from 'url:codecs/jxl/enc/jxl_enc.wasm';
|
||||
|
||||
import wasmUrlWithMT from 'url:codecs/jxl/enc/jxl_enc_mt.wasm';
|
||||
import workerUrl from 'omt:codecs/jxl/enc/jxl_enc_mt.worker.js';
|
||||
|
||||
import wasmUrlWithMTAndSIMD from 'url:codecs/jxl/enc/jxl_enc_mt_simd.wasm';
|
||||
import workerUrlWithSIMD from 'omt:codecs/jxl/enc/jxl_enc_mt_simd.worker.js';
|
||||
|
||||
let emscriptenModule: Promise<JXLModule>;
|
||||
|
||||
async function init() {
|
||||
if (await threads()) {
|
||||
if (await simd()) {
|
||||
const jxlEncoder = await import('codecs/jxl/enc/jxl_enc_mt_simd');
|
||||
return initEmscriptenModule(
|
||||
jxlEncoder.default,
|
||||
wasmUrlWithMTAndSIMD,
|
||||
workerUrlWithSIMD,
|
||||
);
|
||||
}
|
||||
const jxlEncoder = await import('codecs/jxl/enc/jxl_enc_mt');
|
||||
return initEmscriptenModule(jxlEncoder.default, wasmUrlWithMT, workerUrl);
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import {
|
||||
canvasEncode,
|
||||
abortable,
|
||||
blobToArrayBuffer,
|
||||
inputFieldChecked,
|
||||
} from 'client/lazy-app/util';
|
||||
import { EncodeOptions } from '../shared/meta';
|
||||
import type WorkerBridge from 'client/lazy-app/worker-bridge';
|
||||
@@ -9,6 +10,7 @@ import { h, Component } from 'preact';
|
||||
import { inputFieldValueAsNumber, preventDefault } from 'client/lazy-app/util';
|
||||
import * as style from 'client/lazy-app/Compress/Options/style.css';
|
||||
import Range from 'client/lazy-app/Compress/Options/Range';
|
||||
import Checkbox from 'client/lazy-app/Compress/Options/Checkbox';
|
||||
|
||||
export async function encode(
|
||||
signal: AbortSignal,
|
||||
@@ -34,6 +36,7 @@ export class Options extends Component<Props, {}> {
|
||||
|
||||
const options: EncodeOptions = {
|
||||
level: inputFieldValueAsNumber(form.level),
|
||||
interlace: inputFieldChecked(form.interlace),
|
||||
};
|
||||
this.props.onChange(options);
|
||||
};
|
||||
@@ -41,6 +44,14 @@ export class Options extends Component<Props, {}> {
|
||||
render({ options }: Props) {
|
||||
return (
|
||||
<form class={style.optionsSection} onSubmit={preventDefault}>
|
||||
<label class={style.optionToggle}>
|
||||
Interlace
|
||||
<Checkbox
|
||||
name="interlace"
|
||||
checked={options.interlace}
|
||||
onChange={this.onChange}
|
||||
/>
|
||||
</label>
|
||||
<div class={style.optionOneCell}>
|
||||
<Range
|
||||
name="level"
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
*/
|
||||
export interface EncodeOptions {
|
||||
level: number;
|
||||
interlace: boolean;
|
||||
}
|
||||
|
||||
export const label = 'OxiPNG';
|
||||
@@ -20,4 +21,5 @@ export const extension = 'png';
|
||||
|
||||
export const defaultOptions: EncodeOptions = {
|
||||
level: 2,
|
||||
interlace: false,
|
||||
};
|
||||
|
||||
@@ -14,54 +14,17 @@ import initOxiWasmST, {
|
||||
optimise as optimiseST,
|
||||
} from 'codecs/oxipng/pkg/squoosh_oxipng';
|
||||
import initOxiWasmMT, {
|
||||
worker_initializer,
|
||||
start_main_thread,
|
||||
initThreadPool,
|
||||
optimise as optimiseMT,
|
||||
} from 'codecs/oxipng/pkg-parallel/squoosh_oxipng';
|
||||
import oxiWasmUrlST from 'url:codecs/oxipng/pkg/squoosh_oxipng_bg.wasm';
|
||||
import oxiWasmUrlMT from 'url:codecs/oxipng/pkg-parallel/squoosh_oxipng_bg.wasm';
|
||||
import { EncodeOptions } from '../shared/meta';
|
||||
import { threads } from 'wasm-feature-detect';
|
||||
import workerURL from 'omt:./sub-worker';
|
||||
import type { WorkerInit } from './sub-worker';
|
||||
|
||||
function initWorker(worker: Worker, workerInit: WorkerInit) {
|
||||
return new Promise<void>((resolve) => {
|
||||
worker.postMessage(workerInit);
|
||||
worker.addEventListener('message', () => resolve(), { once: true });
|
||||
});
|
||||
}
|
||||
|
||||
async function initMT() {
|
||||
const num = navigator.hardwareConcurrency;
|
||||
|
||||
// First, let browser fetch and spawn Workers for our pool in the background.
|
||||
// This is fairly expensive, so we want to start it as early as possible.
|
||||
const workers = Array.from({ length: num }, () => new Worker(workerURL));
|
||||
|
||||
// Meanwhile, asynchronously compile, instantiate and initialise Wasm on our main thread.
|
||||
await initOxiWasmMT(oxiWasmUrlMT);
|
||||
|
||||
// Get module+memory from the Wasm instance.
|
||||
//
|
||||
// Ideally we wouldn't go via Wasm bindings here, since both are just JS variables, but memory is
|
||||
// currently not exposed on the Wasm instance correctly by wasm-bindgen.
|
||||
const workerInit: WorkerInit = worker_initializer(num);
|
||||
|
||||
// Once done, we want to send module+memory to each Worker so that they instantiate Wasm too.
|
||||
// While doing so, we need to wait for Workers to acknowledge that they have received our message.
|
||||
// Ideally this shouldn't be necessary, but Chromium currently doesn't conform to the spec:
|
||||
// https://bugs.chromium.org/p/chromium/issues/detail?id=1075645
|
||||
//
|
||||
// If we didn't do this ping-pong game, the `start_main_thread` below would block the current
|
||||
// thread on an atomic before even *sending* the `postMessage` containing memory,
|
||||
// so Workers would never be able to unblock us back.
|
||||
await Promise.all(workers.map((worker) => initWorker(worker, workerInit)));
|
||||
|
||||
// Finally, instantiate rayon pool - this will use shared Wasm memory to send tasks to the
|
||||
// Workers and then block until they're all ready.
|
||||
start_main_thread();
|
||||
|
||||
await initThreadPool(navigator.hardwareConcurrency);
|
||||
return optimiseMT;
|
||||
}
|
||||
|
||||
@@ -77,9 +40,12 @@ export default async function encode(
|
||||
options: EncodeOptions,
|
||||
): Promise<ArrayBuffer> {
|
||||
if (!wasmReady) {
|
||||
wasmReady = (await threads()) ? initMT() : initST();
|
||||
wasmReady = threads().then((hasThreads: boolean) =>
|
||||
hasThreads ? initMT() : initST(),
|
||||
);
|
||||
}
|
||||
|
||||
const optimise = await wasmReady;
|
||||
return optimise(new Uint8Array(data), options.level).buffer;
|
||||
return optimise(new Uint8Array(data), options.level, options.interlace)
|
||||
.buffer;
|
||||
}
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
import initOxiPNG, {
|
||||
start_worker_thread,
|
||||
} from 'codecs/oxipng/pkg-parallel/squoosh_oxipng';
|
||||
|
||||
export type WorkerInit = [WebAssembly.Module, WebAssembly.Memory];
|
||||
|
||||
addEventListener(
|
||||
'message',
|
||||
async (event) => {
|
||||
// Tell the "main" thread that we've received the message.
|
||||
//
|
||||
// At this point, the "main" thread can run Wasm that
|
||||
// will synchronously block waiting on other atomics.
|
||||
//
|
||||
// Note that we don't need to wait for Wasm instantiation here - it's
|
||||
// better to start main thread as early as possible, and then it blocks
|
||||
// on a shared atomic anyway until Worker is fully ready.
|
||||
// @ts-ignore
|
||||
postMessage(null);
|
||||
|
||||
await initOxiPNG(...(event.data as WorkerInit));
|
||||
start_worker_thread();
|
||||
},
|
||||
{ once: true },
|
||||
);
|
||||
@@ -14,18 +14,12 @@ import type { WebPModule } from 'codecs/webp/enc/webp_enc';
|
||||
import type { EncodeOptions } from '../shared/meta';
|
||||
|
||||
import { initEmscriptenModule } from 'features/worker-utils';
|
||||
import { simd } from 'wasm-feature-detect';
|
||||
|
||||
import wasmUrl from 'url:codecs/webp/enc/webp_enc.wasm';
|
||||
import wasmUrlWithSIMD from 'url:codecs/webp/enc/webp_enc_simd.wasm';
|
||||
|
||||
let emscriptenModule: Promise<WebPModule>;
|
||||
|
||||
async function init() {
|
||||
if (await simd()) {
|
||||
const webpEncoder = await import('codecs/webp/enc/webp_enc_simd');
|
||||
return initEmscriptenModule(webpEncoder.default, wasmUrlWithSIMD);
|
||||
}
|
||||
const webpEncoder = await import('codecs/webp/enc/webp_enc');
|
||||
return initEmscriptenModule(webpEncoder.default, wasmUrl);
|
||||
}
|
||||
|
||||
@@ -14,28 +14,17 @@ import type { WP2Module } from 'codecs/wp2/enc/wp2_enc';
|
||||
import type { EncodeOptions } from '../shared/meta';
|
||||
|
||||
import { initEmscriptenModule } from 'features/worker-utils';
|
||||
import { threads, simd } from 'wasm-feature-detect';
|
||||
import { threads } from 'wasm-feature-detect';
|
||||
|
||||
import wasmUrl from 'url:codecs/wp2/enc/wp2_enc.wasm';
|
||||
|
||||
import wasmUrlWithMT from 'url:codecs/wp2/enc/wp2_enc_mt.wasm';
|
||||
import workerUrl from 'omt:codecs/wp2/enc/wp2_enc_mt.worker.js';
|
||||
|
||||
import wasmUrlWithMTAndSIMD from 'url:codecs/wp2/enc/wp2_enc_mt_simd.wasm';
|
||||
import workerUrlWithSIMD from 'omt:codecs/wp2/enc/wp2_enc_mt_simd.worker.js';
|
||||
|
||||
let emscriptenModule: Promise<WP2Module>;
|
||||
|
||||
async function init() {
|
||||
if (await threads()) {
|
||||
if (await simd()) {
|
||||
const wp2Encoder = await import('codecs/wp2/enc/wp2_enc_mt_simd');
|
||||
return initEmscriptenModule(
|
||||
wp2Encoder.default,
|
||||
wasmUrlWithMTAndSIMD,
|
||||
workerUrlWithSIMD,
|
||||
);
|
||||
}
|
||||
const wp2Encoder = await import('codecs/wp2/enc/wp2_enc_mt');
|
||||
return initEmscriptenModule(wp2Encoder.default, wasmUrlWithMT, workerUrl);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { threads, simd } from 'wasm-feature-detect';
|
||||
import { threads } from 'wasm-feature-detect';
|
||||
import webpDataUrl from 'data-url:./tiny.webp';
|
||||
import avifDataUrl from 'data-url:./tiny.avif';
|
||||
|
||||
@@ -75,9 +75,6 @@ import avifEncWasm from 'url:codecs/avif/enc/avif_enc.wasm';
|
||||
import * as avifEnc from 'entry-data:codecs/avif/enc/avif_enc.js';
|
||||
|
||||
// JXL
|
||||
import * as jxlEncMtSimdWorker from 'entry-data:codecs/jxl/enc/jxl_enc_mt_simd.worker.js';
|
||||
import * as jxlEncMtSimd from 'entry-data:codecs/jxl/enc/jxl_enc_mt_simd';
|
||||
import jxlEncMtSimdWasm from 'url:codecs/jxl/enc/jxl_enc_mt_simd.wasm';
|
||||
import * as jxlEncMtWorker from 'entry-data:codecs/jxl/enc/jxl_enc_mt.worker.js';
|
||||
import * as jxlEncMt from 'entry-data:codecs/jxl/enc/jxl_enc_mt';
|
||||
import jxlEncMtWasm from 'url:codecs/jxl/enc/jxl_enc_mt.wasm';
|
||||
@@ -85,20 +82,14 @@ import jxlEncWasm from 'url:codecs/jxl/enc/jxl_enc.wasm';
|
||||
import * as jxlEnc from 'entry-data:codecs/jxl/enc/jxl_enc';
|
||||
|
||||
// OXI
|
||||
import * as oxiMtWorker from 'entry-data:features/encoders/oxiPNG/worker/sub-worker';
|
||||
import oxiMtWasm from 'url:codecs/oxipng/pkg-parallel/squoosh_oxipng_bg.wasm';
|
||||
import oxiWasm from 'url:codecs/oxipng/pkg/squoosh_oxipng_bg.wasm';
|
||||
|
||||
// WebP
|
||||
import * as webpEncSimd from 'entry-data:codecs/webp/enc/webp_enc_simd';
|
||||
import webpEncSimdWasm from 'url:codecs/webp/enc/webp_enc_simd.wasm';
|
||||
import * as webpEnc from 'entry-data:codecs/webp/enc/webp_enc';
|
||||
import webpEncWasm from 'url:codecs/webp/enc/webp_enc.wasm';
|
||||
|
||||
// WP2
|
||||
import * as wp2EncMtSimdWorker from 'entry-data:codecs/wp2/enc/wp2_enc_mt_simd.worker.js';
|
||||
import * as wp2EncMtSimd from 'entry-data:codecs/wp2/enc/wp2_enc_mt_simd';
|
||||
import wp2EncMtSimdWasm from 'url:codecs/wp2/enc/wp2_enc_mt_simd.wasm';
|
||||
import * as wp2EncMtWorker from 'entry-data:codecs/wp2/enc/wp2_enc_mt.worker.js';
|
||||
import * as wp2EncMt from 'entry-data:codecs/wp2/enc/wp2_enc_mt';
|
||||
import wp2EncMtWasm from 'url:codecs/wp2/enc/wp2_enc_mt.wasm';
|
||||
@@ -106,14 +97,8 @@ import * as wp2Enc from 'entry-data:codecs/wp2/enc/wp2_enc';
|
||||
import wp2EncWasm from 'url:codecs/wp2/enc/wp2_enc.wasm';
|
||||
|
||||
export const theRest = (async () => {
|
||||
const [
|
||||
supportsThreads,
|
||||
supportsSimd,
|
||||
supportsWebP,
|
||||
supportsAvif,
|
||||
] = await Promise.all([
|
||||
const [supportsThreads, supportsWebP, supportsAvif] = await Promise.all([
|
||||
threads(),
|
||||
simd(),
|
||||
...[webpDataUrl, avifDataUrl].map(async (dataUrl) => {
|
||||
if (!self.createImageBitmap) return false;
|
||||
const response = await fetch(dataUrl);
|
||||
@@ -154,15 +139,7 @@ export const theRest = (async () => {
|
||||
}
|
||||
|
||||
// JXL
|
||||
if (supportsThreads && supportsSimd) {
|
||||
items.push(
|
||||
jxlEncMtSimdWorker.main,
|
||||
...jxlEncMtSimdWorker.deps,
|
||||
jxlEncMtSimd.main,
|
||||
...jxlEncMtSimd.deps,
|
||||
jxlEncMtSimdWasm,
|
||||
);
|
||||
} else if (supportsThreads) {
|
||||
if (supportsThreads) {
|
||||
items.push(
|
||||
jxlEncMtWorker.main,
|
||||
...jxlEncMtWorker.deps,
|
||||
@@ -176,28 +153,16 @@ export const theRest = (async () => {
|
||||
|
||||
// OXI
|
||||
if (supportsThreads) {
|
||||
items.push(oxiMtWorker.main, ...oxiMtWorker.deps, oxiMtWasm);
|
||||
items.push(oxiMtWasm);
|
||||
} else {
|
||||
items.push(oxiWasm);
|
||||
}
|
||||
|
||||
// WebP
|
||||
if (supportsSimd) {
|
||||
items.push(webpEncSimd.main, ...webpEncSimd.deps, webpEncSimdWasm);
|
||||
} else {
|
||||
items.push(webpEnc.main, ...webpEnc.deps, webpEncWasm);
|
||||
}
|
||||
items.push(webpEnc.main, ...webpEnc.deps, webpEncWasm);
|
||||
|
||||
// WP2
|
||||
if (supportsThreads && supportsSimd) {
|
||||
items.push(
|
||||
wp2EncMtSimdWorker.main,
|
||||
...wp2EncMtSimdWorker.deps,
|
||||
wp2EncMtSimd.main,
|
||||
...wp2EncMtSimd.deps,
|
||||
wp2EncMtSimdWasm,
|
||||
);
|
||||
} else if (supportsThreads) {
|
||||
if (supportsThreads) {
|
||||
items.push(
|
||||
wp2EncMtWorker.main,
|
||||
...wp2EncMtWorker.deps,
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
},
|
||||
"include": [
|
||||
"src/features/**/worker/**/*",
|
||||
"src/features/**/sub-worker/**/*",
|
||||
"src/features/**/shared/**/*",
|
||||
"src/features/worker-utils/**/*",
|
||||
"src/features-worker/**/*",
|
||||
|
||||
Reference in New Issue
Block a user