Compare commits

..

4 Commits

Author SHA1 Message Date
Surma
b6f69c57cf moar stuff 2019-02-07 17:35:48 +00:00
Surma
9e19c36b42 Add Xargo and config and stuff 2019-02-07 00:29:11 +00:00
Surma
cf45520be3 Cleanup and add pkg 2019-02-06 10:25:56 +00:00
Surma
8faf8a5b48 First attempt at Oxipng 2019-02-05 19:40:22 +00:00
54 changed files with 2839 additions and 2232 deletions

1
.nvmrc
View File

@@ -1 +0,0 @@
v10.15.1

View File

@@ -1,4 +1,7 @@
language: node_js
node_js:
- node
- 10
- 8
cache: npm
script: npm run build
after_success: npm run sizereport

0
codecs/oxipng/.cargo-ok Normal file
View File

5
codecs/oxipng/.gitignore vendored Normal file
View File

@@ -0,0 +1,5 @@
/target
**/*.rs.bk
Cargo.lock
bin/
wasm-pack.log

36
codecs/oxipng/Cargo.toml Normal file
View File

@@ -0,0 +1,36 @@
[package]
name = "test"
version = "0.1.0"
authors = ["Surma <surma@surma.link>"]
[lib]
path = "lib.rs"
crate-type = ["cdylib", "rlib"]
[features]
default = ["console_error_panic_hook"]
[dependencies]
cfg-if = "0.1.2"
wasm-bindgen = "0.2"
oxipng = "2.2.0"
# The `console_error_panic_hook` crate provides better debugging of panics by
# logging them with `console.error`. This is great for development, but requires
# all the `std::fmt` and `std::panicking` infrastructure, so isn't great for
# code size when deploying.
console_error_panic_hook = { version = "0.1.1", optional = true }
# `wee_alloc` is a tiny allocator for wasm that is only ~1K in code size
# compared to the default allocator's ~10K. It is slower than the default
# allocator, however.
#
# Unfortunately, `wee_alloc` requires nightly Rust when targeting wasm for now.
wee_alloc = { version = "0.4.2", optional = true }
[dev-dependencies]
wasm-bindgen-test = "0.2"
[profile.release]
# Tell `rustc` to optimize for small code size.
opt-level = "s"

52
codecs/oxipng/README.md Normal file
View File

@@ -0,0 +1,52 @@
# 🦀🕸️ `wasm-pack-template`
A template for kick starting a Rust and WebAssembly project using
[`wasm-pack`](https://github.com/rustwasm/wasm-pack).
This template is designed for compiling Rust libraries into WebAssembly and
publishing the resulting package to NPM.
* Want to use the published NPM package in a Website? [Check out
`create-wasm-app`.](https://github.com/rustwasm/create-wasm-app)
* Want to make a monorepo-style Website without publishing to NPM? Check out
[`rust-webpack-template`](https://github.com/rustwasm/rust-webpack-template)
and/or
[`rust-parcel-template`](https://github.com/rustwasm/rust-parcel-template).
## 🔋 Batteries Included
* [`wasm-bindgen`](https://github.com/rustwasm/wasm-bindgen) for communicating
between WebAssembly and JavaScript.
* [`console_error_panic_hook`](https://github.com/rustwasm/console_error_panic_hook)
for logging panic messages to the developer console.
* [`wee_alloc`](https://github.com/rustwasm/wee_alloc), an allocator optimized
for small code size.
## 🚴 Usage
### 🐑 Use `cargo generate` to Clone this Template
[Learn more about `cargo generate` here.](https://github.com/ashleygwilliams/cargo-generate)
```
cargo generate --git https://github.com/rustwasm/wasm-pack-template.git --name my-project
cd my-project
```
### 🛠️ Build with `wasm-pack build`
```
wasm-pack build
```
### 🔬 Test in Headless Browsers with `wasm-pack test`
```
wasm-pack test --headless --firefox
```
### 🎁 Publish to NPM with `wasm-pack publish`
```
wasm-pack publish
```

5
codecs/oxipng/Xargo.toml Normal file
View File

@@ -0,0 +1,5 @@
[target.wasm32-unknown-unknown.dependencies]
time = {}
[target.wasm32-unknown-unknown.dependencies.std]
features = ["wasm_syscall"]

View File

@@ -0,0 +1,52 @@
# 🦀🕸️ `wasm-pack-template`
A template for kick starting a Rust and WebAssembly project using
[`wasm-pack`](https://github.com/rustwasm/wasm-pack).
This template is designed for compiling Rust libraries into WebAssembly and
publishing the resulting package to NPM.
* Want to use the published NPM package in a Website? [Check out
`create-wasm-app`.](https://github.com/rustwasm/create-wasm-app)
* Want to make a monorepo-style Website without publishing to NPM? Check out
[`rust-webpack-template`](https://github.com/rustwasm/rust-webpack-template)
and/or
[`rust-parcel-template`](https://github.com/rustwasm/rust-parcel-template).
## 🔋 Batteries Included
* [`wasm-bindgen`](https://github.com/rustwasm/wasm-bindgen) for communicating
between WebAssembly and JavaScript.
* [`console_error_panic_hook`](https://github.com/rustwasm/console_error_panic_hook)
for logging panic messages to the developer console.
* [`wee_alloc`](https://github.com/rustwasm/wee_alloc), an allocator optimized
for small code size.
## 🚴 Usage
### 🐑 Use `cargo generate` to Clone this Template
[Learn more about `cargo generate` here.](https://github.com/ashleygwilliams/cargo-generate)
```
cargo generate --git https://github.com/rustwasm/wasm-pack-template.git --name my-project
cd my-project
```
### 🛠️ Build with `wasm-pack build`
```
wasm-pack build
```
### 🔬 Test in Headless Browsers with `wasm-pack test`
```
wasm-pack test --headless --firefox
```
### 🎁 Publish to NPM with `wasm-pack publish`
```
wasm-pack publish
```

BIN
codecs/oxipng/pkg/img.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

14
codecs/oxipng/pkg/lol.js Normal file
View File

@@ -0,0 +1,14 @@
const oxipng = require("./oxipng_wasm");
const repl = require("repl");
const fs = require("fs");
async function init() {
// const img = fs.readFileSync("img.png")
// const output = oxipng.compress(img, 0);
// fs.writeFileSync("output.png", output);
console.log(">>>", oxipng.doit());
const r = repl.start("node> ");
r.context.i = oxipng;
}
init();

3
codecs/oxipng/pkg/oxipng.d.ts vendored Normal file
View File

@@ -0,0 +1,3 @@
/* tslint:disable */
export function compress(arg0: Uint8Array, arg1: number): Uint8Array;

View File

@@ -0,0 +1,73 @@
/* tslint:disable */
var wasm;
const TextDecoder = require('util').TextDecoder;
let cachedTextDecoder = new TextDecoder('utf-8');
let cachegetUint8Memory = null;
function getUint8Memory() {
if (cachegetUint8Memory === null || cachegetUint8Memory.buffer !== wasm.memory.buffer) {
cachegetUint8Memory = new Uint8Array(wasm.memory.buffer);
}
return cachegetUint8Memory;
}
function getStringFromWasm(ptr, len) {
return cachedTextDecoder.decode(getUint8Memory().subarray(ptr, ptr + len));
}
module.exports.__wbg_log_64e6f53d8e6d5db5 = function(arg0, arg1) {
let varg0 = getStringFromWasm(arg0, arg1);
console.log(varg0);
};
let WASM_VECTOR_LEN = 0;
function passArray8ToWasm(arg) {
const ptr = wasm.__wbindgen_malloc(arg.length * 1);
getUint8Memory().set(arg, ptr / 1);
WASM_VECTOR_LEN = arg.length;
return ptr;
}
function getArrayU8FromWasm(ptr, len) {
return getUint8Memory().subarray(ptr / 1, ptr / 1 + len);
}
let cachedGlobalArgumentPtr = null;
function globalArgumentPtr() {
if (cachedGlobalArgumentPtr === null) {
cachedGlobalArgumentPtr = wasm.__wbindgen_global_argument_ptr();
}
return cachedGlobalArgumentPtr;
}
let cachegetUint32Memory = null;
function getUint32Memory() {
if (cachegetUint32Memory === null || cachegetUint32Memory.buffer !== wasm.memory.buffer) {
cachegetUint32Memory = new Uint32Array(wasm.memory.buffer);
}
return cachegetUint32Memory;
}
/**
* @param {Uint8Array} arg0
* @param {number} arg1
* @returns {Uint8Array}
*/
module.exports.compress = function(arg0, arg1) {
const ptr0 = passArray8ToWasm(arg0);
const len0 = WASM_VECTOR_LEN;
const retptr = globalArgumentPtr();
wasm.compress(retptr, ptr0, len0, arg1);
const mem = getUint32Memory();
const rustptr = mem[retptr / 4];
const rustlen = mem[retptr / 4 + 1];
const realRet = getArrayU8FromWasm(rustptr, rustlen).slice();
wasm.__wbindgen_free(rustptr, rustlen * 1);
return realRet;
};
wasm = require('./oxipng_bg');

6
codecs/oxipng/pkg/oxipng_bg.d.ts vendored Normal file
View File

@@ -0,0 +1,6 @@
/* tslint:disable */
export const memory: WebAssembly.Memory;
export function __wbindgen_global_argument_ptr(): number;
export function compress(a: number, b: number, c: number, d: number): void;
export function __wbindgen_malloc(a: number): number;
export function __wbindgen_free(a: number, b: number): void;

View File

@@ -0,0 +1,9 @@
const path = require('path').join(__dirname, 'oxipng_bg.wasm');
const bytes = require('fs').readFileSync(path);
let imports = {};
imports['./oxipng'] = require('./oxipng');
const wasmModule = new WebAssembly.Module(bytes);
const wasmInstance = new WebAssembly.Instance(wasmModule, imports);
module.exports = wasmInstance.exports;

Binary file not shown.

3
codecs/oxipng/pkg/oxipng_manual.d.ts vendored Normal file
View File

@@ -0,0 +1,3 @@
/* tslint:disable */
export function compress(arg0: Uint8Array, arg1: number): Promise<Uint8Array>;

View File

@@ -0,0 +1,77 @@
/* tslint:disable */
import wasmUrl from './oxipng_bg.wasm';
let wasm;
const instancePromise = WebAssembly.instantiateStreaming(fetch(wasmUrl), {
"./oxipng": {__wbg_log_64e6f53d8e6d5db5}
});
let cachedTextDecoder = new TextDecoder('utf-8');
let cachegetUint8Memory = null;
function getUint8Memory() {
if (cachegetUint8Memory === null || cachegetUint8Memory.buffer !== wasm.memory.buffer) {
cachegetUint8Memory = new Uint8Array(wasm.memory.buffer);
}
return cachegetUint8Memory;
}
function getStringFromWasm(ptr, len) {
return cachedTextDecoder.decode(getUint8Memory().subarray(ptr, ptr + len));
}
export function __wbg_log_64e6f53d8e6d5db5(arg0, arg1) {
let varg0 = getStringFromWasm(arg0, arg1);
console.log(varg0);
}
let WASM_VECTOR_LEN = 0;
function passArray8ToWasm(arg) {
const ptr = wasm.__wbindgen_malloc(arg.length * 1);
getUint8Memory().set(arg, ptr / 1);
WASM_VECTOR_LEN = arg.length;
return ptr;
}
function getArrayU8FromWasm(ptr, len) {
return getUint8Memory().subarray(ptr / 1, ptr / 1 + len);
}
let cachedGlobalArgumentPtr = null;
function globalArgumentPtr() {
if (cachedGlobalArgumentPtr === null) {
cachedGlobalArgumentPtr = wasm.__wbindgen_global_argument_ptr();
}
return cachedGlobalArgumentPtr;
}
let cachegetUint32Memory = null;
function getUint32Memory() {
if (cachegetUint32Memory === null || cachegetUint32Memory.buffer !== wasm.memory.buffer) {
cachegetUint32Memory = new Uint32Array(wasm.memory.buffer);
}
return cachegetUint32Memory;
}
/**
* @param {Uint8Array} arg0
* @param {number} arg1
* @returns {Uint8Array}
*/
export async function compress(arg0, arg1) {
wasm = (await instancePromise).instance.exports;
debugger;
const ptr0 = passArray8ToWasm(arg0);
const len0 = WASM_VECTOR_LEN;
const retptr = globalArgumentPtr();
wasm.compress(retptr, ptr0, len0, arg1);
const mem = getUint32Memory();
const rustptr = mem[retptr / 4];
const rustlen = mem[retptr / 4 + 1];
const realRet = getArrayU8FromWasm(rustptr, rustlen).slice();
wasm.__wbindgen_free(rustptr, rustlen * 1);
return realRet;
}

3
codecs/oxipng/pkg/oxipng_wasm.d.ts vendored Normal file
View File

@@ -0,0 +1,3 @@
/* tslint:disable */
export function doit(): number;

View File

@@ -0,0 +1,11 @@
/* tslint:disable */
var wasm;
/**
* @returns {number}
*/
module.exports.doit = function() {
return wasm.doit();
};
wasm = require('./oxipng_wasm_bg');

3
codecs/oxipng/pkg/oxipng_wasm_bg.d.ts vendored Normal file
View File

@@ -0,0 +1,3 @@
/* tslint:disable */
export const memory: WebAssembly.Memory;
export function doit(): number;

View File

@@ -0,0 +1,8 @@
const path = require('path').join(__dirname, 'oxipng_wasm_bg.wasm');
const bytes = require('fs').readFileSync(path);
let imports = {};
const wasmModule = new WebAssembly.Module(bytes);
const wasmInstance = new WebAssembly.Instance(wasmModule, imports);
module.exports = wasmInstance.exports;

Binary file not shown.

View File

@@ -0,0 +1,15 @@
{
"name": "oxipng-wasm",
"collaborators": [
"Surma <surma@surma.link>"
],
"version": "0.1.0",
"files": [
"oxipng_wasm_bg.wasm",
"oxipng_wasm.js",
"oxipng_wasm_bg.js",
"oxipng_wasm.d.ts"
],
"main": "oxipng_wasm.js",
"types": "oxipng_wasm.d.ts"
}

View File

42
codecs/oxipng/src/lib.rs Normal file
View File

@@ -0,0 +1,42 @@
extern crate cfg_if;
extern crate wasm_bindgen;
// extern crate oxipng;
mod utils;
use cfg_if::cfg_if;
use wasm_bindgen::prelude::*;
use std::time::{Instant};
cfg_if! {
// When the `wee_alloc` feature is enabled, use `wee_alloc` as the global
// allocator.
if #[cfg(feature = "wee_alloc")] {
extern crate wee_alloc;
#[global_allocator]
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
}
}
// #[wasm_bindgen]
// extern {
// #[wasm_bindgen(js_namespace = console)]
// fn log(s: &str);
// }
// #[wasm_bindgen]
// pub fn compress(img: Vec<u8>, level: u8) -> Vec<u8> {
// log(&format!("len: {}, level: {}", img.len(), level));
// let mut options = oxipng::Options::from_preset(level);
// options.threads = 0;
// let result = oxipng::optimize_from_memory(img.as_slice(), &options);
// match result {
// Ok(v) => v,
// Err(e) => e.to_string().as_bytes().to_vec()
// }
// }
#[wasm_bindgen]
pub fn doit() -> u32 {
let start = Instant::now();
start.elapsed().as_secs() as u32
}

View File

@@ -0,0 +1,17 @@
use cfg_if::cfg_if;
cfg_if! {
// When the `console_error_panic_hook` feature is enabled, we can call the
// `set_panic_hook` function at least once during initialization, and then
// we will get better error messages if our code ever panics.
//
// For more details see
// https://github.com/rustwasm/console_error_panic_hook#readme
if #[cfg(feature = "console_error_panic_hook")] {
extern crate console_error_panic_hook;
pub use self::console_error_panic_hook::set_once as set_panic_hook;
} else {
#[inline]
pub fn set_panic_hook() {}
}
}

1
codecs/oxipng/tmp/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
target

View File

@@ -0,0 +1,27 @@
[package]
name = "loltest"
version = "0.1.0"
authors = ["Surma <surma@surma.link>"]
[lib]
path = "lib.rs"
crate-type = ["cdylib", "rlib"]
[features]
default = ["console_error_panic_hook"]
[dependencies]
wasm-bindgen = "0.2"
# The `console_error_panic_hook` crate provides better debugging of panics by
# logging them with `console.error`. This is great for development, but requires
# all the `std::fmt` and `std::panicking` infrastructure, so isn't great for
# code size when deploying.
console_error_panic_hook = { version = "0.1.1", optional = true }
[dev-dependencies]
wasm-bindgen-test = "0.2"
[profile.release]
# Tell `rustc` to optimize for small code size.
opt-level = "s"

View File

@@ -0,0 +1,2 @@
[dependencies.std]
features = ["wasm_syscall"]

View File

@@ -0,0 +1,20 @@
Im trying to activate [the `wasm_syscall` feature][1] in Rusts stdlib for WebAssembly.
Here is my `Cargo.toml` and my `Xargo.toml`. But even with this setup the generated wasm file is still hard-coded to panic.
**HELP?**
My current command to compile is:
```
xargo build --target wasm32-unknown-unknown --release
```
If you have [`wasm2wat`][2] installed, you can verify the generate code via
```
wasm2wat target/wasm32-unknown-unknown/release/loltest.wasm | grep -A5 perform::
```
[1]: https://github.com/rust-lang/rust/blob/b139669f374eb5024a50eb13f116ff763b1c5935/src/libstd/sys/wasm/mod.rs#L309
[2]: https://github.com/WebAssembly/wabt

14
codecs/oxipng/tmp/lib.rs Normal file
View File

@@ -0,0 +1,14 @@
extern crate wasm_bindgen;
use wasm_bindgen::prelude::*;
use std::thread::spawn;
#[wasm_bindgen]
pub fn doit() {
// let child = spawn(move || -> u32 {
// 5
// });
// let result = child.join().unwrap();
let result = spawn();
println!("Result: {}", result);
}

47
codecs/oxipng/tmp/lol.js Normal file
View File

@@ -0,0 +1,47 @@
// const oxipng = require("./oxipng_wasm");
const repl = require("repl");
const fs = require("fs");
const dec = new TextDecoder();
let buffer = '';
async function init() {
const { instance } = await WebAssembly.instantiate(
fs.readFileSync("./target/wasm32-unknown-unknown/release/loltest.wasm"),
{
__wbindgen_placeholder__: {
__wbindgen_describe(v) {
console.log(`__wbindgen_desribe(${v})`);
}
},
env: {
// See https://github.com/rust-lang/rust/blob/master/src/libstd/sys/wasm/mod.rs
rust_wasm_syscall(syscall, ptr) {
switch(syscall) {
case 1: // Write
const [fd, dataPtr, len] = new Uint32Array(instance.exports.memory.buffer, ptr, 3 * 4);
const fragment = new Uint8Array(instance.exports.memory.buffer, dataPtr, len);
buffer += dec.decode(fragment);
const idx = buffer.indexOf('\n');
if(idx !== -1) {
console.log(buffer.slice(0, idx));
buffer = buffer.slice(idx);
}
return 1;
case 6: // Time
return 1;
default:
return 0; // False, unimplemented
}
}
}
}
);
try {
instance.exports.doit();
} catch{}
const r = repl.start("node> ");
r.context.i = instance;
}
init();

File diff suppressed because it is too large Load Diff

View File

@@ -1,2 +0,0 @@
target
Cargo.lock

View File

@@ -1,14 +0,0 @@
[package]
name = "rotate"
version = "0.1.0"
authors = ["Surma <surma@google.com>"]
edition = "2018"
[lib]
name = "rotate"
path = "rotate.rs"
crate-type = ["cdylib", "rlib"]
[profile.release]
lto = true
opt-level = "s"

View File

@@ -1,17 +0,0 @@
FROM ubuntu
RUN apt-get update && \
apt-get install -qqy git build-essential cmake python2.7
RUN git clone --recursive https://github.com/WebAssembly/wabt /usr/src/wabt
RUN mkdir -p /usr/src/wabt/build
WORKDIR /usr/src/wabt/build
RUN cmake .. -DCMAKE_INSTALL_PREFIX=/opt/wabt && \
make && \
make install
FROM rust
RUN rustup install nightly && \
rustup target add --toolchain nightly wasm32-unknown-unknown
COPY --from=0 /opt/wabt /opt/wabt
ENV PATH="/opt/wabt/bin:${PATH}"
WORKDIR /src

View File

@@ -1,45 +0,0 @@
// THIS IS NOT A NODE SCRIPT
// This is a d8 script. Please install jsvu[1] and install v8.
// Then run `npm run --silent benchmark`.
// [1]: https://github.com/GoogleChromeLabs/jsvu
async function init() {
// Adjustable constants.
const imageDimensions = 4096;
const iterations = new Array(100);
// Constants. Dont change.
const imageByteSize = imageDimensions * imageDimensions * 4;
const wasmPageSize = 64 * 1024;
const buffer = readbuffer("rotate.wasm");
const { instance } = await WebAssembly.instantiate(buffer);
const pagesAvailable = Math.floor(
instance.exports.memory.buffer.byteLength / wasmPageSize
);
const pagesNeeded = Math.floor((imageByteSize * 2 + 4) / wasmPageSize) + 1;
const additionalPagesNeeded = pagesNeeded - pagesAvailable;
if (additionalPagesNeeded > 0) {
instance.exports.memory.grow(additionalPagesNeeded);
}
[0, 90, 180, 270].forEach(rotation => {
print(`\n${rotation} degrees`);
print(`==============================`);
for (let i = 0; i < 100; i++) {
const start = Date.now();
instance.exports.rotate(imageDimensions, imageDimensions, rotation);
iterations[i] = Date.now() - start;
}
const average = iterations.reduce((sum, c) => sum + c) / iterations.length;
const stddev = Math.sqrt(
iterations
.map(i => Math.pow(i - average, 2))
.reduce((sum, c) => sum + c) / iterations.length
);
print(`n = ${iterations.length}`);
print(`Average: ${average}`);
print(`StdDev: ${stddev}`);
});
}
init().catch(e => console.error(e.stack));

View File

@@ -1,25 +0,0 @@
#!/bin/bash
set -e
echo "============================================="
echo "Compiling wasm"
echo "============================================="
(
rustup run nightly \
cargo build \
--target wasm32-unknown-unknown \
--release
cp target/wasm32-unknown-unknown/release/rotate.wasm .
wasm-strip rotate.wasm
)
echo "============================================="
echo "Compiling wasm done"
echo "============================================="
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
echo "Did you update your docker image?"
echo "Run \`docker pull ubuntu\`"
echo "Run \`docker pull rust\`"
echo "Run \`docker build -t squoosh-rotate .\`"
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"

View File

@@ -1,11 +0,0 @@
{
"name": "rotate",
"scripts": {
"build:image": "docker build -t squoosh-rotate .",
"build": "docker run --rm -v $(pwd):/src squoosh-rotate ./build.sh",
"benchmark": "echo File size after gzip && npm run benchmark:filesize && echo Optimizing && npm run -s benchmark:optimizing",
"benchmark:baseline": "v8 --liftoff --no-wasm-tier-up --no-opt ./benchmark.js",
"benchmark:optimizing": "v8 --no-liftoff --no-wasm-tier-up ./benchmark.js",
"benchmark:filesize": "cat rotate.wasm | gzip -c9n | wc -c"
}
}

View File

@@ -1,113 +0,0 @@
use std::slice::{from_raw_parts, from_raw_parts_mut};
// This function is taken from Zachary Dremann
// https://github.com/GoogleChromeLabs/squoosh/pull/462
trait HardUnwrap<T> {
fn unwrap_hard(self) -> T;
}
impl<T> HardUnwrap<T> for Option<T> {
#[cfg(not(debug_assertions))]
#[inline]
fn unwrap_hard(self) -> T {
match self {
Some(t) => t,
None => std::process::abort(),
}
}
#[cfg(debug_assertions)]
fn unwrap_hard(self) -> T {
self.unwrap()
}
}
const TILE_SIZE: usize = 16;
fn get_buffers<'a>(width: usize, height: usize) -> (&'a [u32], &'a mut [u32]) {
let num_pixels = width * height;
let in_b: &[u32];
let out_b: &mut [u32];
unsafe {
in_b = from_raw_parts::<u32>(8 as *const u32, num_pixels);
out_b = from_raw_parts_mut::<u32>((num_pixels * 4 + 8) as *mut u32, num_pixels);
}
return (in_b, out_b);
}
#[inline(never)]
fn rotate_0(width: usize, height: usize) {
let (in_b, out_b) = get_buffers(width, height);
for (in_p, out_p) in in_b.iter().zip(out_b.iter_mut()) {
*out_p = *in_p;
}
}
#[inline(never)]
fn rotate_90(width: usize, height: usize) {
let (in_b, out_b) = get_buffers(width, height);
let new_width = height;
let _new_height = width;
for y_start in (0..height).step_by(TILE_SIZE) {
for x_start in (0..width).step_by(TILE_SIZE) {
for y in y_start..(y_start + TILE_SIZE).min(height) {
let in_offset = y * width;
let in_bounds = if x_start + TILE_SIZE < width {
(in_offset + x_start)..(in_offset + x_start + TILE_SIZE)
} else {
(in_offset + x_start)..(in_offset + width)
};
let in_chunk = in_b.get(in_bounds).unwrap_hard();
for (x, in_p) in in_chunk.iter().enumerate() {
let new_x = (new_width - 1) - y;
let new_y = x + x_start;
*out_b.get_mut(new_y * new_width + new_x).unwrap_hard() = *in_p;
}
}
}
}
}
#[inline(never)]
fn rotate_180(width: usize, height: usize) {
let (in_b, out_b) = get_buffers(width, height);
for (in_p, out_p) in in_b.iter().zip(out_b.iter_mut().rev()) {
*out_p = *in_p;
}
}
#[inline(never)]
fn rotate_270(width: usize, height: usize) {
let (in_b, out_b) = get_buffers(width, height);
let new_width = height;
let new_height = width;
for y_start in (0..height).step_by(TILE_SIZE) {
for x_start in (0..width).step_by(TILE_SIZE) {
for y in y_start..(y_start + TILE_SIZE).min(height) {
let in_offset = y * width;
let in_bounds = if x_start + TILE_SIZE < width {
(in_offset + x_start)..(in_offset + x_start + TILE_SIZE)
} else {
(in_offset + x_start)..(in_offset + width)
};
let in_chunk = in_b.get(in_bounds).unwrap_hard();
for (x, in_p) in in_chunk.iter().enumerate() {
let new_x = y;
let new_y = new_height - 1 - (x_start + x);
*out_b.get_mut(new_y * new_width + new_x).unwrap_hard() = *in_p;
}
}
}
}
}
#[no_mangle]
fn rotate(width: usize, height: usize, rotate: usize) {
match rotate {
0 => rotate_0(width, height),
90 => rotate_90(width, height),
180 => rotate_180(width, height),
270 => rotate_270(width, height),
_ => std::process::abort(),
}
}

Binary file not shown.

View File

@@ -1,204 +0,0 @@
const path = require('path');
const { URL } = require('url');
const gzipSize = require('gzip-size');
const fetch = require('node-fetch');
const prettyBytes = require('pretty-bytes');
const escapeRE = require('escape-string-regexp');
const readdirp = require('readdirp');
const chalk = new require('chalk').constructor({ level: 4 });
function fetchTravis(path, options = {}) {
const url = new URL(path, 'https://api.travis-ci.org');
url.search = new URLSearchParams(options);
return fetch(url, {
headers: { 'Travis-API-Version': '3' },
});
}
function fetchTravisBuildInfo(user, repo, branch) {
return fetchTravis(`/repo/${encodeURIComponent(`${user}/${repo}`)}/builds`, {
'branch.name': branch,
state: 'passed',
limit: 1,
}).then(r => r.json());
}
function fetchTravisText(path) {
return fetchTravis(path).then(r => r.text());
}
/**
* Recursively-read a directory and turn it into an array of { name, size, gzipSize }
*/
async function dirToInfoArray(startPath, {
namePrefix = '',
} = {}) {
const results = await new Promise((resolve, reject) => {
readdirp({ root: startPath }, (err, results) => {
if (err) reject(err); else resolve(results);
});
});
return Promise.all(
results.files.map(async (entry) => ({
name: entry.path,
gzipSize: await gzipSize.file(entry.fullPath),
size: entry.stat.size,
})),
);
}
/**
* Try to treat two entries with different file name hashes as the same file.
*/
function findHashedMatch(name, buildInfo) {
const nameParts = /^(.+\.)[a-f0-9]+(\..+)$/.exec(name);
if (!nameParts) return;
const matchRe = new RegExp(`^${escapeRE(nameParts[1])}[a-f0-9]+${escapeRE(nameParts[2])}$`);
const matchingEntry = buildInfo.find(entry => matchRe.test(entry.name));
return matchingEntry;
}
const buildSizePrefix = '=== BUILD SIZES: ';
const buildSizePrefixRe = new RegExp(`^${escapeRE(buildSizePrefix)}(.+)$`, 'm');
async function getPreviousBuildInfo() {
const buildData = await fetchTravisBuildInfo('GoogleChromeLabs', 'squoosh', 'master');
const jobUrl = buildData.builds[0].jobs[0]['@href'];
const log = await fetchTravisText(jobUrl + '/log.txt');
const reResult = buildSizePrefixRe.exec(log);
if (!reResult) return;
return JSON.parse(reResult[1]);
}
/**
* Generate an array that represents the difference between builds.
* Returns an array of { beforeName, afterName, beforeSize, afterSize }.
* Sizes are gzipped size.
* Before/after properties are missing if resource isn't in the previous/new build.
*/
function getChanges(previousBuildInfo, buildInfo) {
const buildChanges = [];
const alsoInPreviousBuild = new Set();
for (const oldEntry of previousBuildInfo) {
const newEntry = buildInfo.find(entry => entry.name === oldEntry.name) ||
findHashedMatch(oldEntry.name, buildInfo);
// Entry is in previous build, but not the new build.
if (!newEntry) {
buildChanges.push({
beforeName: oldEntry.name,
beforeSize: oldEntry.gzipSize,
});
continue;
}
// Mark this entry so we know we've dealt with it.
alsoInPreviousBuild.add(newEntry);
// If they're the same, just ignore.
// Using size rather than gzip size. I've seen different platforms produce different zipped
// sizes.
if (
oldEntry.size === newEntry.size &&
oldEntry.name === newEntry.name
) continue;
// Entry is in both builds (maybe renamed).
buildChanges.push({
beforeName: oldEntry.name,
afterName: newEntry.name,
beforeSize: oldEntry.gzipSize,
afterSize: newEntry.gzipSize,
});
}
// Look for entries that are only in the new build.
for (const newEntry of buildInfo) {
if (alsoInPreviousBuild.has(newEntry)) continue;
buildChanges.push({
afterName: newEntry.name,
afterSize: newEntry.gzipSize,
});
}
return buildChanges;
}
async function main() {
// Output the current build sizes for later retrieval.
const buildInfo = await dirToInfoArray(__dirname + '/../build');
console.log(buildSizePrefix + JSON.stringify(buildInfo));
console.log('\nBuild change report:');
let previousBuildInfo;
try {
previousBuildInfo = await getPreviousBuildInfo();
} catch (err) {
console.log(` Couldn't parse previous build info`);
return;
}
if (!previousBuildInfo) {
console.log(` Couldn't find previous build info`);
return;
}
const buildChanges = getChanges(previousBuildInfo, buildInfo);
if (buildChanges.length === 0) {
console.log(' No changes');
return;
}
// One letter references, so it's easier to get the spacing right.
const y = chalk.yellow;
const g = chalk.green;
const r = chalk.red;
for (const change of buildChanges) {
// New file.
if (!change.beforeSize) {
console.log(` ${g('ADDED')} ${change.afterName} - ${prettyBytes(change.afterSize)}`);
continue;
}
// Removed file.
if (!change.afterSize) {
console.log(` ${r('REMOVED')} ${change.beforeName} - was ${prettyBytes(change.beforeSize)}`);
continue;
}
// Changed file.
let size;
if (change.beforeSize === change.afterSize) {
// Just renamed.
size = `${prettyBytes(change.afterSize)} -> no change`;
} else {
const color = change.afterSize > change.beforeSize ? r : g;
const sizeDiff = prettyBytes(change.afterSize - change.beforeSize, { signed: true });
const relativeDiff = Math.round((change.afterSize / change.beforeSize) * 1000) / 1000;
size = `${prettyBytes(change.beforeSize)} -> ${prettyBytes(change.afterSize)}` +
' (' +
color(`${sizeDiff}, ${relativeDiff}x`) +
')';
}
console.log(` ${y('CHANGED')} ${change.afterName} - ${size}`);
if (change.beforeName !== change.afterName) {
console.log(` Renamed from: ${change.beforeName}`);
}
}
}
main();

2115
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,14 +1,13 @@
{
"private": true,
"name": "squoosh",
"version": "1.3.3",
"version": "1.3.1",
"license": "apache-2.0",
"scripts": {
"start": "webpack-dev-server --host 0.0.0.0 --hot",
"build": "webpack -p",
"lint": "tslint -c tslint.json -p tsconfig.json -t verbose 'src/**/*.{ts,tsx,js,jsx}'",
"lintfix": "tslint -c tslint.json -p tsconfig.json -t verbose --fix 'src/**/*.{ts,tsx,js,jsx}'",
"sizereport": "node config/size-report.js"
"lintfix": "tslint -c tslint.json -p tsconfig.json -t verbose --fix 'src/**/*.{ts,tsx,js,jsx}'"
},
"husky": {
"hooks": {
@@ -16,34 +15,30 @@
}
},
"devDependencies": {
"@types/node": "10.12.26",
"@types/node": "10.12.21",
"@types/pretty-bytes": "5.1.0",
"@types/webassembly-js-api": "0.0.2",
"@webcomponents/custom-elements": "1.2.1",
"@webpack-cli/serve": "0.1.3",
"assets-webpack-plugin": "3.9.7",
"chokidar": "2.1.2",
"chalk": "2.4.2",
"chokidar": "2.0.4",
"classnames": "2.2.6",
"clean-webpack-plugin": "1.0.1",
"comlink": "3.1.1",
"copy-webpack-plugin": "5.0.0",
"critters-webpack-plugin": "2.3.0",
"copy-webpack-plugin": "4.6.0",
"critters-webpack-plugin": "2.2.0",
"css-loader": "1.0.1",
"ejs": "2.6.1",
"escape-string-regexp": "1.0.5",
"exports-loader": "0.7.0",
"file-drop-element": "0.0.9",
"file-loader": "3.0.1",
"gzip-size": "5.0.0",
"html-webpack-plugin": "3.2.0",
"husky": "1.3.1",
"idb-keyval": "3.1.0",
"linkstate": "1.1.1",
"loader-utils": "1.2.3",
"loader-utils": "1.2.0",
"mini-css-extract-plugin": "0.5.0",
"minimatch": "3.0.4",
"node-fetch": "2.3.0",
"node-sass": "4.11.0",
"optimize-css-assets-webpack-plugin": "5.0.1",
"pointer-tracker": "2.0.3",
@@ -52,7 +47,6 @@
"pretty-bytes": "5.1.0",
"progress-bar-webpack-plugin": "1.12.1",
"raw-loader": "1.0.0",
"readdirp": "2.2.1",
"sass-loader": "7.1.0",
"script-ext-html-webpack-plugin": "2.1.3",
"source-map-loader": "0.2.4",
@@ -67,9 +61,9 @@
"typescript": "3.2.4",
"url-loader": "1.1.2",
"webpack": "4.28.0",
"webpack-bundle-analyzer": "3.0.4",
"webpack-bundle-analyzer": "3.0.3",
"webpack-cli": "3.2.3",
"webpack-dev-server": "3.2.0",
"worker-plugin": "3.1.0"
"webpack-dev-server": "3.1.14",
"worker-plugin": "3.0.0"
}
}

View File

@@ -1,5 +1,6 @@
import * as identity from './identity/encoder-meta';
import * as optiPNG from './optipng/encoder-meta';
import * as oxiPNG from './oxipng/encoder-meta';
import * as mozJPEG from './mozjpeg/encoder-meta';
import * as webP from './webp/encoder-meta';
import * as browserPNG from './browser-png/encoder-meta';
@@ -18,6 +19,7 @@ export interface EncoderSupportMap {
export type EncoderState =
identity.EncoderState |
optiPNG.EncoderState |
oxiPNG.EncoderState |
mozJPEG.EncoderState |
webP.EncoderState |
browserPNG.EncoderState |
@@ -32,6 +34,7 @@ export type EncoderState =
export type EncoderOptions =
identity.EncodeOptions |
optiPNG.EncodeOptions |
oxiPNG.EncodeOptions |
mozJPEG.EncodeOptions |
webP.EncodeOptions |
browserPNG.EncodeOptions |
@@ -48,6 +51,7 @@ export type EncoderType = keyof typeof encoderMap;
export const encoderMap = {
[identity.type]: identity,
[optiPNG.type]: optiPNG,
[oxiPNG.type]: oxiPNG,
[mozJPEG.type]: mozJPEG,
[webP.type]: webP,
[browserPNG.type]: browserPNG,

View File

@@ -3,7 +3,7 @@ export interface EncodeOptions {
}
export interface EncoderState { type: typeof type; options: EncodeOptions; }
export const type = 'png';
export const type = 'optipng';
export const label = 'OptiPNG';
export const mimeType = 'image/png';
export const extension = 'png';

View File

@@ -0,0 +1,13 @@
export interface EncodeOptions {
level: number;
}
export interface EncoderState { type: typeof type; options: EncodeOptions; }
export const type = 'oxipng';
export const label = 'OxiPNG';
export const mimeType = 'image/png';
export const extension = 'png';
export const defaultOptions: EncodeOptions = {
level: 6,
};

View File

@@ -0,0 +1,18 @@
import * as oxipng from '../../../codecs/oxipng/pkg/oxipng_manual';
import { EncodeOptions } from './encoder-meta';
export async function compress(data: BufferSource, { level }: EncodeOptions): Promise<ArrayBuffer> {
let buffer: ArrayBuffer;
if (ArrayBuffer.isView(data)) {
buffer = data.buffer;
} else {
buffer = data;
}
debugger;
const resultView = await oxipng.compress(new Uint8Array(buffer), level);
const result = new Uint8Array(resultView);
// wasm cant run on SharedArrayBuffers, so we hard-cast to ArrayBuffer.
return result.buffer as ArrayBuffer;
}

View File

@@ -0,0 +1,42 @@
import { h, Component } from 'preact';
import { bind } from '../../lib/initial-util';
import { inputFieldValueAsNumber, preventDefault } from '../../lib/util';
import { EncodeOptions } from './encoder-meta';
import Range from '../../components/range';
import * as style from '../../components/Options/style.scss';
type Props = {
options: EncodeOptions;
onChange(newOptions: EncodeOptions): void;
};
export default class OxiPNGEncoderOptions extends Component<Props, {}> {
@bind
onChange(event: Event) {
const form = (event.currentTarget as HTMLInputElement).closest('form') as HTMLFormElement;
const options: EncodeOptions = {
level: inputFieldValueAsNumber(form.level),
};
this.props.onChange(options);
}
render({ options }: Props) {
return (
<form class={style.optionsSection} onSubmit={preventDefault}>
<div class={style.optionOneCell}>
<Range
name="level"
min="0"
max="9"
step="1"
value={options.level}
onInput={this.onChange}
>
Effort:
</Range>
</div>
</form>
);
}
}

View File

@@ -41,6 +41,16 @@ async function optiPngEncode(
return compress(data, options);
}
async function oxiPngEncode(
data: BufferSource, options: import('../oxipng/encoder-meta').EncodeOptions,
): Promise<ArrayBuffer> {
const { compress } = await import(
/* webpackChunkName: "process-optipng" */
'../oxipng/encoder',
);
return compress(data, options);
}
async function webpEncode(
data: ImageData, options: import('../webp/encoder-meta').EncodeOptions,
): Promise<ArrayBuffer> {
@@ -59,7 +69,15 @@ async function webpDecode(data: ArrayBuffer): Promise<ImageData> {
return decode(data);
}
const exports = { mozjpegEncode, quantize, rotate, optiPngEncode, webpEncode, webpDecode };
const exports = {
mozjpegEncode,
quantize,
rotate,
optiPngEncode,
oxiPngEncode,
webpEncode,
webpDecode,
};
export type ProcessorWorkerApi = typeof exports;
expose(exports, self);

View File

@@ -3,6 +3,7 @@ import { QuantizeOptions } from './imagequant/processor-meta';
import { canvasEncode, blobToArrayBuffer } from '../lib/util';
import { EncodeOptions as MozJPEGEncoderOptions } from './mozjpeg/encoder-meta';
import { EncodeOptions as OptiPNGEncoderOptions } from './optipng/encoder-meta';
import { EncodeOptions as OxiPNGEncoderOptions } from './oxipng/encoder-meta';
import { EncodeOptions as WebPEncoderOptions } from './webp/encoder-meta';
import { EncodeOptions as BrowserJPEGOptions } from './browser-jpeg/encoder-meta';
import { EncodeOptions as BrowserWebpEncodeOptions } from './browser-webp/encoder-meta';
@@ -147,6 +148,16 @@ export default class Processor {
return this._workerApi!.optiPngEncode(pngBuffer, opts);
}
@Processor._processingJob({ needsWorker: true })
async oxiPngEncode(
data: ImageData, opts: OxiPNGEncoderOptions,
): Promise<ArrayBuffer> {
// OptiPNG expects PNG input.
const pngBlob = await canvasEncode(data, 'image/png');
const pngBuffer = await blobToArrayBuffer(pngBlob);
return this._workerApi!.oxiPngEncode(pngBuffer, opts);
}
@Processor._processingJob({ needsWorker: true })
webpEncode(data: ImageData, opts: WebPEncoderOptions): Promise<ArrayBuffer> {
return this._workerApi!.webpEncode(data, opts);

View File

@@ -3,10 +3,3 @@ export interface RotateOptions {
}
export const defaultOptions: RotateOptions = { rotate: 0 };
export interface RotateModuleInstance {
exports: {
memory: WebAssembly.Memory;
rotate(width: number, height: number, rotate: 0 | 90 | 180 | 270): void;
};
}

View File

@@ -1,33 +1,73 @@
import wasmUrl from '../../../codecs/rotate/rotate.wasm';
import { RotateOptions, RotateModuleInstance } from './processor-meta';
import { RotateOptions } from './processor-meta';
const instancePromise = (WebAssembly as any).instantiateStreaming(fetch(wasmUrl));
export function rotate(data: ImageData, opts: RotateOptions): ImageData {
const { rotate } = opts;
const flipDimensions = rotate % 180 !== 0;
const { width: inputWidth, height: inputHeight } = data;
const outputWidth = flipDimensions ? inputHeight : inputWidth;
const outputHeight = flipDimensions ? inputWidth : inputHeight;
const out = new ImageData(outputWidth, outputHeight);
let i = 0;
export async function rotate(
data: ImageData,
opts: RotateOptions,
): Promise<ImageData> {
const { instance } = (await instancePromise) as {instance: RotateModuleInstance};
// In the straight-copy case, d1 is x, d2 is y.
// x starts at 0 and increases.
// y starts at 0 and increases.
let d1Start = 0;
let d1Limit = inputWidth;
let d1Advance = 1;
let d1Multiplier = 1;
let d2Start = 0;
let d2Limit = inputHeight;
let d2Advance = 1;
let d2Multiplier = inputWidth;
// Number of wasm memory pages (á 64KiB) needed to store the image twice.
const bytesPerImage = data.width * data.height * 4;
const numPagesNeeded = Math.ceil((bytesPerImage * 2 + 8) / (64 * 1024));
// Only count full pages, just to be safe.
const numPagesAvailable = Math.floor(instance.exports.memory.buffer.byteLength / (64 * 1024));
const additionalPagesToAllocate = numPagesNeeded - numPagesAvailable;
if (additionalPagesToAllocate > 0) {
instance.exports.memory.grow(additionalPagesToAllocate);
if (rotate === 90) {
// d1 is y, d2 is x.
// y starts at its max value and decreases.
// x starts at 0 and increases.
d1Start = inputHeight - 1;
d1Limit = inputHeight;
d1Advance = -1;
d1Multiplier = inputWidth;
d2Start = 0;
d2Limit = inputWidth;
d2Advance = 1;
d2Multiplier = 1;
} else if (rotate === 180) {
// d1 is x, d2 is y.
// x starts at its max and decreases.
// y starts at its max and decreases.
d1Start = inputWidth - 1;
d1Limit = inputWidth;
d1Advance = -1;
d1Multiplier = 1;
d2Start = inputHeight - 1;
d2Limit = inputHeight;
d2Advance = -1;
d2Multiplier = inputWidth;
} else if (rotate === 270) {
// d1 is y, d2 is x.
// y starts at 0 and increases.
// x starts at its max and decreases.
d1Start = 0;
d1Limit = inputHeight;
d1Advance = 1;
d1Multiplier = inputWidth;
d2Start = inputWidth - 1;
d2Limit = inputWidth;
d2Advance = -1;
d2Multiplier = 1;
}
const view = new Uint8ClampedArray(instance.exports.memory.buffer);
view.set(data.data, 8);
instance.exports.rotate(data.width, data.height, opts.rotate);
const flipDimensions = opts.rotate % 180 !== 0;
return new ImageData(
view.slice(bytesPerImage + 8, bytesPerImage * 2 + 8),
flipDimensions ? data.height : data.width,
flipDimensions ? data.width : data.height,
);
const inB = new Uint32Array(data.data.buffer);
const outB = new Uint32Array(out.data.buffer);
for (let d2 = d2Start; d2 >= 0 && d2 < d2Limit; d2 += d2Advance) {
for (let d1 = d1Start; d1 >= 0 && d1 < d1Limit; d1 += d1Advance) {
const start = ((d1 * d1Multiplier) + (d2 * d2Multiplier));
outB[i] = inB[start];
i += 1;
}
}
return out;
}

View File

@@ -8,6 +8,7 @@ import Options from '../Options';
import ResultCache from './result-cache';
import * as identity from '../../codecs/identity/encoder-meta';
import * as optiPNG from '../../codecs/optipng/encoder-meta';
import * as oxiPNG from '../../codecs/oxipng/encoder-meta';
import * as mozJPEG from '../../codecs/mozjpeg/encoder-meta';
import * as webP from '../../codecs/webp/encoder-meta';
import * as browserPNG from '../../codecs/browser-png/encoder-meta';
@@ -131,6 +132,7 @@ async function compressImage(
const compressedData = await (() => {
switch (encodeData.type) {
case optiPNG.type: return processor.optiPngEncode(image, encodeData.options);
case oxiPNG.type: return processor.oxiPngEncode(image, encodeData.options);
case mozJPEG.type: return processor.mozjpegEncode(image, encodeData.options);
case webP.type: return processor.webpEncode(image, encodeData.options);
case browserPNG.type: return processor.browserPngEncode(image);