forked from external-repos/squoosh
Introduce libSquoosh
This commit is contained in:
12
cli/package-lock.json
generated
12
cli/package-lock.json
generated
@@ -4,13 +4,6 @@
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
"@squoosh/api": {
|
||||
"version": "file:../api/squoosh-api-0.1.0.tgz",
|
||||
"integrity": "sha512-fraw9j1Qq4MKhiA3VF+8djKcvgV42qCWaMQvLjfkn3r7jpFjAlHhoyHNpkfLDunKY3M55BHpBdn2/ozXZWt8kw==",
|
||||
"requires": {
|
||||
"web-streams-polyfill": "^3.0.3"
|
||||
}
|
||||
},
|
||||
"ansi-regex": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz",
|
||||
@@ -247,11 +240,6 @@
|
||||
"requires": {
|
||||
"defaults": "^1.0.3"
|
||||
}
|
||||
},
|
||||
"web-streams-polyfill": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.0.3.tgz",
|
||||
"integrity": "sha512-d2H/t0eqRNM4w2WvmTdoeIvzAUSpK7JmATB8Nr2lb7nQ9BTIJVjbQ/TRFVEh2gUH1HwclPdoPtfMoFfetXaZnA=="
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
"author": "Google Chrome Developers <chromium-dev@google.com>",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@squoosh/api": "0.1.0",
|
||||
"@squoosh/lib": "0.1.0",
|
||||
"commander": "^7.2.0",
|
||||
"json5": "^2.2.0",
|
||||
"kleur": "^4.1.4",
|
||||
|
||||
@@ -7,8 +7,7 @@ import { promises as fsp } from 'fs';
|
||||
import ora from 'ora';
|
||||
import kleur from 'kleur';
|
||||
|
||||
//Replace package name with '../../api/build/index.js' to test unpublished changes in the API
|
||||
import { ImagePool, preprocessors, encoders } from '@squoosh/api';
|
||||
import { ImagePool, preprocessors, encoders } from '@squoosh/lib';
|
||||
|
||||
function clamp(v, min, max) {
|
||||
if (v < min) return min;
|
||||
@@ -55,9 +54,9 @@ function progressTracker(results) {
|
||||
for (const result of results.values()) {
|
||||
out += `\n ${kleur.cyan(result.file)}: ${prettyPrintSize(result.size)}`;
|
||||
for (const { outputFile, size: outputSize, infoText } of result.outputs) {
|
||||
out += `\n ${kleur.dim('└')} ${kleur.cyan(outputFile.padEnd(5))} → ${prettyPrintSize(
|
||||
outputSize,
|
||||
)}`;
|
||||
out += `\n ${kleur.dim('└')} ${kleur.cyan(
|
||||
outputFile.padEnd(5),
|
||||
)} → ${prettyPrintSize(outputSize)}`;
|
||||
const percent = ((outputSize / result.size) * 100).toPrecision(3);
|
||||
out += ` (${kleur[outputSize > result.size ? 'red' : 'green'](
|
||||
percent + '%',
|
||||
@@ -76,7 +75,7 @@ async function getInputFiles(paths) {
|
||||
|
||||
for (const inputPath of paths) {
|
||||
const files = (await fsp.lstat(inputPath)).isDirectory()
|
||||
? (await fsp.readdir(inputPath)).map(file => path.join(inputPath, file))
|
||||
? (await fsp.readdir(inputPath)).map((file) => path.join(inputPath, file))
|
||||
: [inputPath];
|
||||
for (const file of files) {
|
||||
try {
|
||||
@@ -135,7 +134,9 @@ async function processFiles(files) {
|
||||
if (!program.opts()[preprocessorName]) {
|
||||
continue;
|
||||
}
|
||||
preprocessOptions[preprocessorName] = JSON5.parse(program.opts()[preprocessorName]);
|
||||
preprocessOptions[preprocessorName] = JSON5.parse(
|
||||
program.opts()[preprocessorName],
|
||||
);
|
||||
}
|
||||
|
||||
for (const image of decodedFiles) {
|
||||
@@ -145,7 +146,9 @@ async function processFiles(files) {
|
||||
await Promise.all(decodedFiles.map((image) => image.decoded));
|
||||
|
||||
progress.progressOffset = decoded;
|
||||
progress.setStatus('Encoding ' + kleur.dim(`(${imagePool.workerPool.numWorkers} threads)`));
|
||||
progress.setStatus(
|
||||
'Encoding ' + kleur.dim(`(${imagePool.workerPool.numWorkers} threads)`),
|
||||
);
|
||||
progress.setProgress(0, files.length);
|
||||
|
||||
const jobs = [];
|
||||
@@ -155,31 +158,34 @@ async function processFiles(files) {
|
||||
const originalFile = results.get(image).file;
|
||||
|
||||
const encodeOptions = {
|
||||
optimizerButteraugliTarget: Number(program.opts().optimizerButteraugliTarget),
|
||||
optimizerButteraugliTarget: Number(
|
||||
program.opts().optimizerButteraugliTarget,
|
||||
),
|
||||
maxOptimizerRounds: Number(program.opts().maxOptimizerRounds),
|
||||
}
|
||||
};
|
||||
for (const encName of Object.keys(encoders)) {
|
||||
if (!program.opts()[encName]) {
|
||||
continue;
|
||||
}
|
||||
const encParam = program.opts()[encName];
|
||||
const encConfig = encParam.toLowerCase() === 'auto' ? 'auto' : JSON5.parse(encParam);
|
||||
const encConfig =
|
||||
encParam.toLowerCase() === 'auto' ? 'auto' : JSON5.parse(encParam);
|
||||
encodeOptions[encName] = encConfig;
|
||||
}
|
||||
jobsStarted++;
|
||||
const job = image.encode(encodeOptions)
|
||||
.then(async () => {
|
||||
const job = image.encode(encodeOptions).then(async () => {
|
||||
jobsFinished++;
|
||||
const outputPath = path.join(program.opts().outputDir, program.opts().suffix + path.basename(originalFile, path.extname(originalFile)));
|
||||
const outputPath = path.join(
|
||||
program.opts().outputDir,
|
||||
program.opts().suffix +
|
||||
path.basename(originalFile, path.extname(originalFile)),
|
||||
);
|
||||
for (const output of Object.values(image.encodedWith)) {
|
||||
const outputFile = `${outputPath}.${(await output).extension}`;
|
||||
await fsp.writeFile(outputFile, (await output).binary);
|
||||
results.get(image).outputs.push(
|
||||
Object.assign(
|
||||
await output,
|
||||
{outputFile},
|
||||
)
|
||||
);
|
||||
results
|
||||
.get(image)
|
||||
.outputs.push(Object.assign(await output, { outputFile }));
|
||||
}
|
||||
progress.setProgress(jobsFinished, jobsStarted);
|
||||
});
|
||||
@@ -194,8 +200,6 @@ async function processFiles(files) {
|
||||
progress.finish('Squoosh results:');
|
||||
}
|
||||
|
||||
|
||||
|
||||
program
|
||||
.name('squoosh-cli')
|
||||
.arguments('<files...>')
|
||||
@@ -226,5 +230,3 @@ for (const [key, value] of Object.entries(encoders)) {
|
||||
}
|
||||
|
||||
program.parse(process.argv);
|
||||
|
||||
|
||||
|
||||
0
api/.gitignore → libsquoosh/.gitignore
vendored
0
api/.gitignore → libsquoosh/.gitignore
vendored
@@ -30,7 +30,7 @@ const imagePath = 'path/to/image.png';
|
||||
const image = imagePool.ingestImage(imagePath);
|
||||
```
|
||||
|
||||
These `ingestImage` function can take anything the node [`readFile`][readFile] function can take, uncluding a buffer and `FileHandle`.
|
||||
These `ingestImage` function can take anything the node [`readFile`][readfile] function can take, uncluding a buffer and `FileHandle`.
|
||||
|
||||
The returned `image` object is a representation of the original image, that you can now preprocess, encode, and extract information about.
|
||||
|
||||
@@ -92,11 +92,13 @@ This example iterates through all encoded versions of the image and writes them
|
||||
const newImagePath = '/path/to/image.'; //extension is added automatically
|
||||
|
||||
for (const encodedImage of Object.values(image.encodedWith)) {
|
||||
fs.writeFile(newImagePath + (await encodedImage).extension, (await encodedImage).binary);
|
||||
fs.writeFile(
|
||||
newImagePath + (await encodedImage).extension,
|
||||
(await encodedImage).binary,
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Extracting image information
|
||||
|
||||
Information about a decoded image is available at `Image.decoded`. It looks something like this:
|
||||
@@ -121,7 +123,6 @@ console.log(await image.decoded);
|
||||
|
||||
Information about an encoded image can be found at `Image.encodedWith[encoderName]`. It looks something like this:
|
||||
|
||||
|
||||
```js
|
||||
console.log(await image.encodedWith.jxl);
|
||||
// Returns:
|
||||
@@ -159,4 +160,4 @@ const encodeOptions: {
|
||||
[squoosh]: https://squoosh.app
|
||||
[codecs.js]: https://github.com/GoogleChromeLabs/squoosh/blob/dev/cli/src/codecs.js
|
||||
[butteraugli]: https://github.com/google/butteraugli
|
||||
[readFile]: https://nodejs.org/api/fs.html#fs_fspromises_readfile_path_options
|
||||
[readfile]: https://nodejs.org/api/fs.html#fs_fspromises_readfile_path_options
|
||||
@@ -10,11 +10,11 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
import { promises as fs } from "fs";
|
||||
import { basename } from "path";
|
||||
import { promises as fs } from 'fs';
|
||||
import { basename } from 'path';
|
||||
|
||||
const defaultOpts = {
|
||||
prefix: "asset-url"
|
||||
prefix: 'asset-url',
|
||||
};
|
||||
|
||||
export default function assetPlugin(opts) {
|
||||
@@ -23,16 +23,16 @@ export default function assetPlugin(opts) {
|
||||
/** @type {Map<string, Buffer>} */
|
||||
let assetIdToSourceBuffer;
|
||||
|
||||
const prefix = opts.prefix + ":";
|
||||
const prefix = opts.prefix + ':';
|
||||
return {
|
||||
name: "asset-plugin",
|
||||
name: 'asset-plugin',
|
||||
buildStart() {
|
||||
assetIdToSourceBuffer = new Map();
|
||||
},
|
||||
augmentChunkHash(info) {
|
||||
// Get the sources for all assets imported by this chunk.
|
||||
const buffers = Object.keys(info.modules)
|
||||
.map(moduleId => assetIdToSourceBuffer.get(moduleId))
|
||||
.map((moduleId) => assetIdToSourceBuffer.get(moduleId))
|
||||
.filter(Boolean);
|
||||
|
||||
if (buffers.length === 0) return;
|
||||
@@ -56,20 +56,20 @@ export default function assetPlugin(opts) {
|
||||
throw Error(`Cannot find ${realId}`);
|
||||
}
|
||||
// Add an additional .js to the end so it ends up with .js at the end in the _virtual folder.
|
||||
return prefix + resolveResult.id + ".js";
|
||||
return prefix + resolveResult.id + '.js';
|
||||
},
|
||||
async load(id) {
|
||||
if (!id.startsWith(prefix)) return;
|
||||
const realId = id.slice(prefix.length, -".js".length);
|
||||
const realId = id.slice(prefix.length, -'.js'.length);
|
||||
const source = await fs.readFile(realId);
|
||||
assetIdToSourceBuffer.set(id, source);
|
||||
this.addWatchFile(realId);
|
||||
|
||||
return `export default import.meta.ROLLUP_FILE_URL_${this.emitFile({
|
||||
type: "asset",
|
||||
type: 'asset',
|
||||
source,
|
||||
name: basename(realId)
|
||||
name: basename(realId),
|
||||
})}`;
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -5,8 +5,8 @@ export default function autojsonPlugin() {
|
||||
name: 'autojson-plugin',
|
||||
async load(id) {
|
||||
if (id.endsWith('.json') && !id.startsWith('json:')) {
|
||||
return 'export default ' + await fsp.readFile(id, 'utf8');
|
||||
return 'export default ' + (await fsp.readFile(id, 'utf8'));
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
};
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "@squoosh/api",
|
||||
"name": "@squoosh/lib",
|
||||
"version": "0.1.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@squoosh/api",
|
||||
"name": "@squoosh/lib",
|
||||
"version": "0.1.0",
|
||||
"description": "An API for Squoosh",
|
||||
"description": "A Node library for Squoosh",
|
||||
"public": true,
|
||||
"main": "/build/index.js",
|
||||
"files": [
|
||||
@@ -1,7 +1,7 @@
|
||||
import { instantiateEmscriptenWasm } from "./emscripten-utils.js";
|
||||
import { instantiateEmscriptenWasm } from './emscripten-utils.js';
|
||||
|
||||
import visdif from "../../codecs/visdif/visdif.js";
|
||||
import visdifWasm from "asset-url:../../codecs/visdif/visdif.wasm";
|
||||
import visdif from '../../codecs/visdif/visdif.js';
|
||||
import visdifWasm from 'asset-url:../../codecs/visdif/visdif.wasm';
|
||||
|
||||
// `measure` is a (async) function that takes exactly one numeric parameter and
|
||||
// returns a value. The function is assumed to be monotonic (an increase in `parameter`
|
||||
@@ -11,7 +11,7 @@ import visdifWasm from "asset-url:../../codecs/visdif/visdif.wasm";
|
||||
export async function binarySearch(
|
||||
measureGoal,
|
||||
measure,
|
||||
{ min = 0, max = 100, epsilon = 0.1, maxRounds = 8 } = {}
|
||||
{ min = 0, max = 100, epsilon = 0.1, maxRounds = 8 } = {},
|
||||
) {
|
||||
let parameter = (max - min) / 2 + min;
|
||||
let delta = (max - min) / 4;
|
||||
@@ -36,14 +36,14 @@ export async function autoOptimize(
|
||||
bitmapIn,
|
||||
encode,
|
||||
decode,
|
||||
{ butteraugliDistanceGoal = 1.4, ...otherOpts } = {}
|
||||
{ butteraugliDistanceGoal = 1.4, ...otherOpts } = {},
|
||||
) {
|
||||
const { VisDiff } = await instantiateEmscriptenWasm(visdif, visdifWasm);
|
||||
|
||||
const comparator = new VisDiff(
|
||||
bitmapIn.data,
|
||||
bitmapIn.width,
|
||||
bitmapIn.height
|
||||
bitmapIn.height,
|
||||
);
|
||||
|
||||
let bitmapOut;
|
||||
@@ -53,18 +53,18 @@ export async function autoOptimize(
|
||||
// increase the metric value. So multipliy Butteraugli values by -1.
|
||||
const { parameter } = await binarySearch(
|
||||
-1 * butteraugliDistanceGoal,
|
||||
async quality => {
|
||||
async (quality) => {
|
||||
binaryOut = await encode(bitmapIn, quality);
|
||||
bitmapOut = await decode(binaryOut);
|
||||
return -1 * comparator.distance(bitmapOut.data);
|
||||
},
|
||||
otherOpts
|
||||
otherOpts,
|
||||
);
|
||||
comparator.delete();
|
||||
|
||||
return {
|
||||
bitmap: bitmapOut,
|
||||
binary: binaryOut,
|
||||
quality: parameter
|
||||
quality: parameter,
|
||||
};
|
||||
}
|
||||
@@ -344,7 +344,11 @@ export const codecs = {
|
||||
await oxipngPromise;
|
||||
return {
|
||||
encode: (buffer, width, height, opts) => {
|
||||
const simplePng = pngEncDec.encode(new Uint8Array(buffer), width, height);
|
||||
const simplePng = pngEncDec.encode(
|
||||
new Uint8Array(buffer),
|
||||
width,
|
||||
height,
|
||||
);
|
||||
return oxipng.optimise(simplePng, opts.level);
|
||||
},
|
||||
};
|
||||
@@ -8,7 +8,6 @@ import { autoOptimize } from './auto-optimizer.js';
|
||||
|
||||
export { ImagePool, encoders, preprocessors };
|
||||
|
||||
|
||||
async function decodeFile({ file }) {
|
||||
const buffer = await fsp.readFile(file);
|
||||
const firstChunk = buffer.slice(0, 16);
|
||||
@@ -21,9 +20,7 @@ async function decodeFile({ file }) {
|
||||
if (!key) {
|
||||
throw Error(`${file} has an unsupported format`);
|
||||
}
|
||||
const rgba = (await encoders[key].dec()).decode(
|
||||
new Uint8Array(buffer),
|
||||
);
|
||||
const rgba = (await encoders[key].dec()).decode(new Uint8Array(buffer));
|
||||
return {
|
||||
bitmap: rgba,
|
||||
size: buffer.length,
|
||||
@@ -161,11 +158,7 @@ class Image {
|
||||
const encConfig =
|
||||
typeof options === 'string'
|
||||
? options
|
||||
: Object.assign(
|
||||
{},
|
||||
encRef.defaultEncoderOptions,
|
||||
options,
|
||||
);
|
||||
: Object.assign({}, encRef.defaultEncoderOptions, options);
|
||||
this.encodedWith[encName] = this.workerPool.dispatchJob({
|
||||
operation: 'encode',
|
||||
bitmap,
|
||||
@@ -174,9 +167,7 @@ class Image {
|
||||
optimizerButteraugliTarget: Number(
|
||||
encodeOptions.optimizerButteraugliTarget,
|
||||
),
|
||||
maxOptimizerRounds: Number(
|
||||
encodeOptions.maxOptimizerRounds
|
||||
),
|
||||
maxOptimizerRounds: Number(encodeOptions.maxOptimizerRounds),
|
||||
});
|
||||
}
|
||||
await Promise.all(Object.values(this.encodedWith));
|
||||
Reference in New Issue
Block a user