mirror of
https://github.com/GoogleChromeLabs/squoosh.git
synced 2025-11-13 01:07:18 +00:00
Introduce libSquoosh
This commit is contained in:
12
cli/package-lock.json
generated
12
cli/package-lock.json
generated
@@ -4,13 +4,6 @@
|
|||||||
"lockfileVersion": 1,
|
"lockfileVersion": 1,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@squoosh/api": {
|
|
||||||
"version": "file:../api/squoosh-api-0.1.0.tgz",
|
|
||||||
"integrity": "sha512-fraw9j1Qq4MKhiA3VF+8djKcvgV42qCWaMQvLjfkn3r7jpFjAlHhoyHNpkfLDunKY3M55BHpBdn2/ozXZWt8kw==",
|
|
||||||
"requires": {
|
|
||||||
"web-streams-polyfill": "^3.0.3"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ansi-regex": {
|
"ansi-regex": {
|
||||||
"version": "5.0.0",
|
"version": "5.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz",
|
||||||
@@ -247,11 +240,6 @@
|
|||||||
"requires": {
|
"requires": {
|
||||||
"defaults": "^1.0.3"
|
"defaults": "^1.0.3"
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"web-streams-polyfill": {
|
|
||||||
"version": "3.0.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.0.3.tgz",
|
|
||||||
"integrity": "sha512-d2H/t0eqRNM4w2WvmTdoeIvzAUSpK7JmATB8Nr2lb7nQ9BTIJVjbQ/TRFVEh2gUH1HwclPdoPtfMoFfetXaZnA=="
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,7 +15,7 @@
|
|||||||
"author": "Google Chrome Developers <chromium-dev@google.com>",
|
"author": "Google Chrome Developers <chromium-dev@google.com>",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@squoosh/api": "0.1.0",
|
"@squoosh/lib": "0.1.0",
|
||||||
"commander": "^7.2.0",
|
"commander": "^7.2.0",
|
||||||
"json5": "^2.2.0",
|
"json5": "^2.2.0",
|
||||||
"kleur": "^4.1.4",
|
"kleur": "^4.1.4",
|
||||||
|
|||||||
@@ -7,8 +7,7 @@ import { promises as fsp } from 'fs';
|
|||||||
import ora from 'ora';
|
import ora from 'ora';
|
||||||
import kleur from 'kleur';
|
import kleur from 'kleur';
|
||||||
|
|
||||||
//Replace package name with '../../api/build/index.js' to test unpublished changes in the API
|
import { ImagePool, preprocessors, encoders } from '@squoosh/lib';
|
||||||
import { ImagePool, preprocessors, encoders } from '@squoosh/api';
|
|
||||||
|
|
||||||
function clamp(v, min, max) {
|
function clamp(v, min, max) {
|
||||||
if (v < min) return min;
|
if (v < min) return min;
|
||||||
@@ -55,9 +54,9 @@ function progressTracker(results) {
|
|||||||
for (const result of results.values()) {
|
for (const result of results.values()) {
|
||||||
out += `\n ${kleur.cyan(result.file)}: ${prettyPrintSize(result.size)}`;
|
out += `\n ${kleur.cyan(result.file)}: ${prettyPrintSize(result.size)}`;
|
||||||
for (const { outputFile, size: outputSize, infoText } of result.outputs) {
|
for (const { outputFile, size: outputSize, infoText } of result.outputs) {
|
||||||
out += `\n ${kleur.dim('└')} ${kleur.cyan(outputFile.padEnd(5))} → ${prettyPrintSize(
|
out += `\n ${kleur.dim('└')} ${kleur.cyan(
|
||||||
outputSize,
|
outputFile.padEnd(5),
|
||||||
)}`;
|
)} → ${prettyPrintSize(outputSize)}`;
|
||||||
const percent = ((outputSize / result.size) * 100).toPrecision(3);
|
const percent = ((outputSize / result.size) * 100).toPrecision(3);
|
||||||
out += ` (${kleur[outputSize > result.size ? 'red' : 'green'](
|
out += ` (${kleur[outputSize > result.size ? 'red' : 'green'](
|
||||||
percent + '%',
|
percent + '%',
|
||||||
@@ -76,7 +75,7 @@ async function getInputFiles(paths) {
|
|||||||
|
|
||||||
for (const inputPath of paths) {
|
for (const inputPath of paths) {
|
||||||
const files = (await fsp.lstat(inputPath)).isDirectory()
|
const files = (await fsp.lstat(inputPath)).isDirectory()
|
||||||
? (await fsp.readdir(inputPath)).map(file => path.join(inputPath, file))
|
? (await fsp.readdir(inputPath)).map((file) => path.join(inputPath, file))
|
||||||
: [inputPath];
|
: [inputPath];
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
try {
|
try {
|
||||||
@@ -135,7 +134,9 @@ async function processFiles(files) {
|
|||||||
if (!program.opts()[preprocessorName]) {
|
if (!program.opts()[preprocessorName]) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
preprocessOptions[preprocessorName] = JSON5.parse(program.opts()[preprocessorName]);
|
preprocessOptions[preprocessorName] = JSON5.parse(
|
||||||
|
program.opts()[preprocessorName],
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const image of decodedFiles) {
|
for (const image of decodedFiles) {
|
||||||
@@ -145,7 +146,9 @@ async function processFiles(files) {
|
|||||||
await Promise.all(decodedFiles.map((image) => image.decoded));
|
await Promise.all(decodedFiles.map((image) => image.decoded));
|
||||||
|
|
||||||
progress.progressOffset = decoded;
|
progress.progressOffset = decoded;
|
||||||
progress.setStatus('Encoding ' + kleur.dim(`(${imagePool.workerPool.numWorkers} threads)`));
|
progress.setStatus(
|
||||||
|
'Encoding ' + kleur.dim(`(${imagePool.workerPool.numWorkers} threads)`),
|
||||||
|
);
|
||||||
progress.setProgress(0, files.length);
|
progress.setProgress(0, files.length);
|
||||||
|
|
||||||
const jobs = [];
|
const jobs = [];
|
||||||
@@ -155,31 +158,34 @@ async function processFiles(files) {
|
|||||||
const originalFile = results.get(image).file;
|
const originalFile = results.get(image).file;
|
||||||
|
|
||||||
const encodeOptions = {
|
const encodeOptions = {
|
||||||
optimizerButteraugliTarget: Number(program.opts().optimizerButteraugliTarget),
|
optimizerButteraugliTarget: Number(
|
||||||
|
program.opts().optimizerButteraugliTarget,
|
||||||
|
),
|
||||||
maxOptimizerRounds: Number(program.opts().maxOptimizerRounds),
|
maxOptimizerRounds: Number(program.opts().maxOptimizerRounds),
|
||||||
}
|
};
|
||||||
for (const encName of Object.keys(encoders)) {
|
for (const encName of Object.keys(encoders)) {
|
||||||
if (!program.opts()[encName]) {
|
if (!program.opts()[encName]) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
const encParam = program.opts()[encName];
|
const encParam = program.opts()[encName];
|
||||||
const encConfig = encParam.toLowerCase() === 'auto' ? 'auto' : JSON5.parse(encParam);
|
const encConfig =
|
||||||
|
encParam.toLowerCase() === 'auto' ? 'auto' : JSON5.parse(encParam);
|
||||||
encodeOptions[encName] = encConfig;
|
encodeOptions[encName] = encConfig;
|
||||||
}
|
}
|
||||||
jobsStarted++;
|
jobsStarted++;
|
||||||
const job = image.encode(encodeOptions)
|
const job = image.encode(encodeOptions).then(async () => {
|
||||||
.then(async () => {
|
|
||||||
jobsFinished++;
|
jobsFinished++;
|
||||||
const outputPath = path.join(program.opts().outputDir, program.opts().suffix + path.basename(originalFile, path.extname(originalFile)));
|
const outputPath = path.join(
|
||||||
|
program.opts().outputDir,
|
||||||
|
program.opts().suffix +
|
||||||
|
path.basename(originalFile, path.extname(originalFile)),
|
||||||
|
);
|
||||||
for (const output of Object.values(image.encodedWith)) {
|
for (const output of Object.values(image.encodedWith)) {
|
||||||
const outputFile = `${outputPath}.${(await output).extension}`;
|
const outputFile = `${outputPath}.${(await output).extension}`;
|
||||||
await fsp.writeFile(outputFile, (await output).binary);
|
await fsp.writeFile(outputFile, (await output).binary);
|
||||||
results.get(image).outputs.push(
|
results
|
||||||
Object.assign(
|
.get(image)
|
||||||
await output,
|
.outputs.push(Object.assign(await output, { outputFile }));
|
||||||
{outputFile},
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
progress.setProgress(jobsFinished, jobsStarted);
|
progress.setProgress(jobsFinished, jobsStarted);
|
||||||
});
|
});
|
||||||
@@ -194,8 +200,6 @@ async function processFiles(files) {
|
|||||||
progress.finish('Squoosh results:');
|
progress.finish('Squoosh results:');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
program
|
program
|
||||||
.name('squoosh-cli')
|
.name('squoosh-cli')
|
||||||
.arguments('<files...>')
|
.arguments('<files...>')
|
||||||
@@ -226,5 +230,3 @@ for (const [key, value] of Object.entries(encoders)) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
program.parse(process.argv);
|
program.parse(process.argv);
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
0
api/.gitignore → libsquoosh/.gitignore
vendored
0
api/.gitignore → libsquoosh/.gitignore
vendored
@@ -30,7 +30,7 @@ const imagePath = 'path/to/image.png';
|
|||||||
const image = imagePool.ingestImage(imagePath);
|
const image = imagePool.ingestImage(imagePath);
|
||||||
```
|
```
|
||||||
|
|
||||||
These `ingestImage` function can take anything the node [`readFile`][readFile] function can take, uncluding a buffer and `FileHandle`.
|
These `ingestImage` function can take anything the node [`readFile`][readfile] function can take, uncluding a buffer and `FileHandle`.
|
||||||
|
|
||||||
The returned `image` object is a representation of the original image, that you can now preprocess, encode, and extract information about.
|
The returned `image` object is a representation of the original image, that you can now preprocess, encode, and extract information about.
|
||||||
|
|
||||||
@@ -92,11 +92,13 @@ This example iterates through all encoded versions of the image and writes them
|
|||||||
const newImagePath = '/path/to/image.'; //extension is added automatically
|
const newImagePath = '/path/to/image.'; //extension is added automatically
|
||||||
|
|
||||||
for (const encodedImage of Object.values(image.encodedWith)) {
|
for (const encodedImage of Object.values(image.encodedWith)) {
|
||||||
fs.writeFile(newImagePath + (await encodedImage).extension, (await encodedImage).binary);
|
fs.writeFile(
|
||||||
|
newImagePath + (await encodedImage).extension,
|
||||||
|
(await encodedImage).binary,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## Extracting image information
|
## Extracting image information
|
||||||
|
|
||||||
Information about a decoded image is available at `Image.decoded`. It looks something like this:
|
Information about a decoded image is available at `Image.decoded`. It looks something like this:
|
||||||
@@ -121,7 +123,6 @@ console.log(await image.decoded);
|
|||||||
|
|
||||||
Information about an encoded image can be found at `Image.encodedWith[encoderName]`. It looks something like this:
|
Information about an encoded image can be found at `Image.encodedWith[encoderName]`. It looks something like this:
|
||||||
|
|
||||||
|
|
||||||
```js
|
```js
|
||||||
console.log(await image.encodedWith.jxl);
|
console.log(await image.encodedWith.jxl);
|
||||||
// Returns:
|
// Returns:
|
||||||
@@ -159,4 +160,4 @@ const encodeOptions: {
|
|||||||
[squoosh]: https://squoosh.app
|
[squoosh]: https://squoosh.app
|
||||||
[codecs.js]: https://github.com/GoogleChromeLabs/squoosh/blob/dev/cli/src/codecs.js
|
[codecs.js]: https://github.com/GoogleChromeLabs/squoosh/blob/dev/cli/src/codecs.js
|
||||||
[butteraugli]: https://github.com/google/butteraugli
|
[butteraugli]: https://github.com/google/butteraugli
|
||||||
[readFile]: https://nodejs.org/api/fs.html#fs_fspromises_readfile_path_options
|
[readfile]: https://nodejs.org/api/fs.html#fs_fspromises_readfile_path_options
|
||||||
@@ -10,11 +10,11 @@
|
|||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
import { promises as fs } from "fs";
|
import { promises as fs } from 'fs';
|
||||||
import { basename } from "path";
|
import { basename } from 'path';
|
||||||
|
|
||||||
const defaultOpts = {
|
const defaultOpts = {
|
||||||
prefix: "asset-url"
|
prefix: 'asset-url',
|
||||||
};
|
};
|
||||||
|
|
||||||
export default function assetPlugin(opts) {
|
export default function assetPlugin(opts) {
|
||||||
@@ -23,16 +23,16 @@ export default function assetPlugin(opts) {
|
|||||||
/** @type {Map<string, Buffer>} */
|
/** @type {Map<string, Buffer>} */
|
||||||
let assetIdToSourceBuffer;
|
let assetIdToSourceBuffer;
|
||||||
|
|
||||||
const prefix = opts.prefix + ":";
|
const prefix = opts.prefix + ':';
|
||||||
return {
|
return {
|
||||||
name: "asset-plugin",
|
name: 'asset-plugin',
|
||||||
buildStart() {
|
buildStart() {
|
||||||
assetIdToSourceBuffer = new Map();
|
assetIdToSourceBuffer = new Map();
|
||||||
},
|
},
|
||||||
augmentChunkHash(info) {
|
augmentChunkHash(info) {
|
||||||
// Get the sources for all assets imported by this chunk.
|
// Get the sources for all assets imported by this chunk.
|
||||||
const buffers = Object.keys(info.modules)
|
const buffers = Object.keys(info.modules)
|
||||||
.map(moduleId => assetIdToSourceBuffer.get(moduleId))
|
.map((moduleId) => assetIdToSourceBuffer.get(moduleId))
|
||||||
.filter(Boolean);
|
.filter(Boolean);
|
||||||
|
|
||||||
if (buffers.length === 0) return;
|
if (buffers.length === 0) return;
|
||||||
@@ -56,20 +56,20 @@ export default function assetPlugin(opts) {
|
|||||||
throw Error(`Cannot find ${realId}`);
|
throw Error(`Cannot find ${realId}`);
|
||||||
}
|
}
|
||||||
// Add an additional .js to the end so it ends up with .js at the end in the _virtual folder.
|
// Add an additional .js to the end so it ends up with .js at the end in the _virtual folder.
|
||||||
return prefix + resolveResult.id + ".js";
|
return prefix + resolveResult.id + '.js';
|
||||||
},
|
},
|
||||||
async load(id) {
|
async load(id) {
|
||||||
if (!id.startsWith(prefix)) return;
|
if (!id.startsWith(prefix)) return;
|
||||||
const realId = id.slice(prefix.length, -".js".length);
|
const realId = id.slice(prefix.length, -'.js'.length);
|
||||||
const source = await fs.readFile(realId);
|
const source = await fs.readFile(realId);
|
||||||
assetIdToSourceBuffer.set(id, source);
|
assetIdToSourceBuffer.set(id, source);
|
||||||
this.addWatchFile(realId);
|
this.addWatchFile(realId);
|
||||||
|
|
||||||
return `export default import.meta.ROLLUP_FILE_URL_${this.emitFile({
|
return `export default import.meta.ROLLUP_FILE_URL_${this.emitFile({
|
||||||
type: "asset",
|
type: 'asset',
|
||||||
source,
|
source,
|
||||||
name: basename(realId)
|
name: basename(realId),
|
||||||
})}`;
|
})}`;
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -5,8 +5,8 @@ export default function autojsonPlugin() {
|
|||||||
name: 'autojson-plugin',
|
name: 'autojson-plugin',
|
||||||
async load(id) {
|
async load(id) {
|
||||||
if (id.endsWith('.json') && !id.startsWith('json:')) {
|
if (id.endsWith('.json') && !id.startsWith('json:')) {
|
||||||
return 'export default ' + await fsp.readFile(id, 'utf8');
|
return 'export default ' + (await fsp.readFile(id, 'utf8'));
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
}
|
}
|
||||||
};
|
|
||||||
};
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"name": "@squoosh/api",
|
"name": "@squoosh/lib",
|
||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"lockfileVersion": 1,
|
"lockfileVersion": 1,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "@squoosh/api",
|
"name": "@squoosh/lib",
|
||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"description": "An API for Squoosh",
|
"description": "A Node library for Squoosh",
|
||||||
"public": true,
|
"public": true,
|
||||||
"main": "/build/index.js",
|
"main": "/build/index.js",
|
||||||
"files": [
|
"files": [
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
import { instantiateEmscriptenWasm } from "./emscripten-utils.js";
|
import { instantiateEmscriptenWasm } from './emscripten-utils.js';
|
||||||
|
|
||||||
import visdif from "../../codecs/visdif/visdif.js";
|
import visdif from '../../codecs/visdif/visdif.js';
|
||||||
import visdifWasm from "asset-url:../../codecs/visdif/visdif.wasm";
|
import visdifWasm from 'asset-url:../../codecs/visdif/visdif.wasm';
|
||||||
|
|
||||||
// `measure` is a (async) function that takes exactly one numeric parameter and
|
// `measure` is a (async) function that takes exactly one numeric parameter and
|
||||||
// returns a value. The function is assumed to be monotonic (an increase in `parameter`
|
// returns a value. The function is assumed to be monotonic (an increase in `parameter`
|
||||||
@@ -11,7 +11,7 @@ import visdifWasm from "asset-url:../../codecs/visdif/visdif.wasm";
|
|||||||
export async function binarySearch(
|
export async function binarySearch(
|
||||||
measureGoal,
|
measureGoal,
|
||||||
measure,
|
measure,
|
||||||
{ min = 0, max = 100, epsilon = 0.1, maxRounds = 8 } = {}
|
{ min = 0, max = 100, epsilon = 0.1, maxRounds = 8 } = {},
|
||||||
) {
|
) {
|
||||||
let parameter = (max - min) / 2 + min;
|
let parameter = (max - min) / 2 + min;
|
||||||
let delta = (max - min) / 4;
|
let delta = (max - min) / 4;
|
||||||
@@ -36,14 +36,14 @@ export async function autoOptimize(
|
|||||||
bitmapIn,
|
bitmapIn,
|
||||||
encode,
|
encode,
|
||||||
decode,
|
decode,
|
||||||
{ butteraugliDistanceGoal = 1.4, ...otherOpts } = {}
|
{ butteraugliDistanceGoal = 1.4, ...otherOpts } = {},
|
||||||
) {
|
) {
|
||||||
const { VisDiff } = await instantiateEmscriptenWasm(visdif, visdifWasm);
|
const { VisDiff } = await instantiateEmscriptenWasm(visdif, visdifWasm);
|
||||||
|
|
||||||
const comparator = new VisDiff(
|
const comparator = new VisDiff(
|
||||||
bitmapIn.data,
|
bitmapIn.data,
|
||||||
bitmapIn.width,
|
bitmapIn.width,
|
||||||
bitmapIn.height
|
bitmapIn.height,
|
||||||
);
|
);
|
||||||
|
|
||||||
let bitmapOut;
|
let bitmapOut;
|
||||||
@@ -53,18 +53,18 @@ export async function autoOptimize(
|
|||||||
// increase the metric value. So multipliy Butteraugli values by -1.
|
// increase the metric value. So multipliy Butteraugli values by -1.
|
||||||
const { parameter } = await binarySearch(
|
const { parameter } = await binarySearch(
|
||||||
-1 * butteraugliDistanceGoal,
|
-1 * butteraugliDistanceGoal,
|
||||||
async quality => {
|
async (quality) => {
|
||||||
binaryOut = await encode(bitmapIn, quality);
|
binaryOut = await encode(bitmapIn, quality);
|
||||||
bitmapOut = await decode(binaryOut);
|
bitmapOut = await decode(binaryOut);
|
||||||
return -1 * comparator.distance(bitmapOut.data);
|
return -1 * comparator.distance(bitmapOut.data);
|
||||||
},
|
},
|
||||||
otherOpts
|
otherOpts,
|
||||||
);
|
);
|
||||||
comparator.delete();
|
comparator.delete();
|
||||||
|
|
||||||
return {
|
return {
|
||||||
bitmap: bitmapOut,
|
bitmap: bitmapOut,
|
||||||
binary: binaryOut,
|
binary: binaryOut,
|
||||||
quality: parameter
|
quality: parameter,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -344,7 +344,11 @@ export const codecs = {
|
|||||||
await oxipngPromise;
|
await oxipngPromise;
|
||||||
return {
|
return {
|
||||||
encode: (buffer, width, height, opts) => {
|
encode: (buffer, width, height, opts) => {
|
||||||
const simplePng = pngEncDec.encode(new Uint8Array(buffer), width, height);
|
const simplePng = pngEncDec.encode(
|
||||||
|
new Uint8Array(buffer),
|
||||||
|
width,
|
||||||
|
height,
|
||||||
|
);
|
||||||
return oxipng.optimise(simplePng, opts.level);
|
return oxipng.optimise(simplePng, opts.level);
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -8,7 +8,6 @@ import { autoOptimize } from './auto-optimizer.js';
|
|||||||
|
|
||||||
export { ImagePool, encoders, preprocessors };
|
export { ImagePool, encoders, preprocessors };
|
||||||
|
|
||||||
|
|
||||||
async function decodeFile({ file }) {
|
async function decodeFile({ file }) {
|
||||||
const buffer = await fsp.readFile(file);
|
const buffer = await fsp.readFile(file);
|
||||||
const firstChunk = buffer.slice(0, 16);
|
const firstChunk = buffer.slice(0, 16);
|
||||||
@@ -21,9 +20,7 @@ async function decodeFile({ file }) {
|
|||||||
if (!key) {
|
if (!key) {
|
||||||
throw Error(`${file} has an unsupported format`);
|
throw Error(`${file} has an unsupported format`);
|
||||||
}
|
}
|
||||||
const rgba = (await encoders[key].dec()).decode(
|
const rgba = (await encoders[key].dec()).decode(new Uint8Array(buffer));
|
||||||
new Uint8Array(buffer),
|
|
||||||
);
|
|
||||||
return {
|
return {
|
||||||
bitmap: rgba,
|
bitmap: rgba,
|
||||||
size: buffer.length,
|
size: buffer.length,
|
||||||
@@ -161,11 +158,7 @@ class Image {
|
|||||||
const encConfig =
|
const encConfig =
|
||||||
typeof options === 'string'
|
typeof options === 'string'
|
||||||
? options
|
? options
|
||||||
: Object.assign(
|
: Object.assign({}, encRef.defaultEncoderOptions, options);
|
||||||
{},
|
|
||||||
encRef.defaultEncoderOptions,
|
|
||||||
options,
|
|
||||||
);
|
|
||||||
this.encodedWith[encName] = this.workerPool.dispatchJob({
|
this.encodedWith[encName] = this.workerPool.dispatchJob({
|
||||||
operation: 'encode',
|
operation: 'encode',
|
||||||
bitmap,
|
bitmap,
|
||||||
@@ -174,9 +167,7 @@ class Image {
|
|||||||
optimizerButteraugliTarget: Number(
|
optimizerButteraugliTarget: Number(
|
||||||
encodeOptions.optimizerButteraugliTarget,
|
encodeOptions.optimizerButteraugliTarget,
|
||||||
),
|
),
|
||||||
maxOptimizerRounds: Number(
|
maxOptimizerRounds: Number(encodeOptions.maxOptimizerRounds),
|
||||||
encodeOptions.maxOptimizerRounds
|
|
||||||
),
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
await Promise.all(Object.values(this.encodedWith));
|
await Promise.all(Object.values(this.encodedWith));
|
||||||
Reference in New Issue
Block a user