Compare commits
3 Commits
Author | SHA1 | Date | |
---|---|---|---|
668acf2698 | |||
7042491257 | |||
307e1f9356 |
13
.babelrc
Normal file
@ -0,0 +1,13 @@
|
||||
{
|
||||
"plugins": [
|
||||
"transform-class-properties",
|
||||
"transform-react-constant-elements",
|
||||
"transform-react-remove-prop-types",
|
||||
[
|
||||
"transform-react-jsx",
|
||||
{
|
||||
"pragma": "h"
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
5
.firebaserc
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"projects": {
|
||||
"default": "squoosh-beta"
|
||||
}
|
||||
}
|
36
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -1,36 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Something is not working as expected
|
||||
labels:
|
||||
|
||||
---
|
||||
|
||||
**Before you start**
|
||||
Please take a look at the [FAQ](https://github.com/GoogleChromeLabs/squoosh/wiki/FAQ) as well as the already opened issues! If nothing fits your problem, go ahead and fill out the following template:
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Version:**
|
||||
- OS w/ version: [e.g. iOS 12]
|
||||
- Browser w/ version [e.g. Chrome 70]
|
||||
- Node version: [e.g. 10.11.0]
|
||||
- npm version: [e.g. 6.4.1]
|
||||
|
||||
**Is your issue related to the quality of image compression?**
|
||||
Please attach original and output images (you can drag & drop to attach).
|
||||
- Original image
|
||||
- Output image from Squoosh
|
||||
|
||||
**Additional context, screenshots, screencasts**
|
||||
Add any other context about the problem here.
|
18
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@ -1,18 +0,0 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
labels:
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Does other service/app have this feature?**
|
||||
Add any service you know/use that has this feature (We want to know for research)
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
1
.gitignore
vendored
@ -3,4 +3,3 @@ node_modules
|
||||
/*.log
|
||||
*.scss.d.ts
|
||||
*.css.d.ts
|
||||
*.o
|
||||
|
@ -1,4 +0,0 @@
|
||||
language: node_js
|
||||
cache: npm
|
||||
script: npm run build
|
||||
after_success: npm run sizereport
|
20
Dockerfile
Normal file
@ -0,0 +1,20 @@
|
||||
FROM selenium/node-chrome:latest
|
||||
|
||||
USER root
|
||||
|
||||
RUN apt-get update -qqy \
|
||||
&& apt-get -qqy install python git build-essential \
|
||||
&& rm -rf /var/lib/apt/lists/* /var/cache/apt/* \
|
||||
&& rm /bin/sh && ln -s /bin/bash /bin/sh \
|
||||
&& chown seluser /usr/local
|
||||
|
||||
ENV NVM_DIR /usr/local/nvm
|
||||
RUN wget -qO- https://raw.githubusercontent.com/creationix/nvm/v0.33.2/install.sh | bash \
|
||||
&& source $NVM_DIR/nvm.sh \
|
||||
&& nvm install v8
|
||||
|
||||
ENV CHROME_BIN /opt/google/chrome/chrome
|
||||
ENV INSIDE_DOCKER=1
|
||||
|
||||
WORKDIR /usr/src
|
||||
ENTRYPOINT source $NVM_DIR/nvm.sh && npm i && npm test
|
32
README.md
@ -1,31 +1,5 @@
|
||||
# [Squoosh]!
|
||||
# Squoosh!
|
||||
|
||||
[Squoosh] is an image compression web app that allows you to dive into the advanced options provided
|
||||
by various image compressors.
|
||||
Squoosh will be an image compression web app that allows you to dive into the
|
||||
advanced options provided by various image compressors.
|
||||
|
||||
# Privacy
|
||||
|
||||
Google Analytics is used to record the following:
|
||||
|
||||
* [Basic visit data](https://support.google.com/analytics/answer/6004245?ref_topic=2919631).
|
||||
* Before and after image size once an image is downloaded. These values are rounded to the nearest
|
||||
kilobyte.
|
||||
|
||||
Image compression is handled locally; no additional data is sent to the server.
|
||||
|
||||
# Building locally
|
||||
|
||||
Clone the repo, and:
|
||||
|
||||
```sh
|
||||
npm install
|
||||
npm run build
|
||||
```
|
||||
|
||||
You can run the development server with:
|
||||
|
||||
```sh
|
||||
npm start
|
||||
```
|
||||
|
||||
[Squoosh]: https://squoosh.app
|
||||
|
18
_headers.ejs
@ -1,18 +0,0 @@
|
||||
# Long-term cache by default.
|
||||
/*
|
||||
Cache-Control: max-age=31536000
|
||||
|
||||
# And here are the exceptions:
|
||||
/
|
||||
Cache-Control: no-cache
|
||||
|
||||
/serviceworker.js
|
||||
Cache-Control: no-cache
|
||||
|
||||
/manifest.json
|
||||
Cache-Control: must-revalidate, max-age=3600
|
||||
|
||||
# URLs in /assets do not include a hash and are mutable.
|
||||
# But it isn't a big deal if the user gets an old version.
|
||||
/assets/*
|
||||
Cache-Control: must-revalidate, max-age=3600
|
@ -1,2 +0,0 @@
|
||||
/index.html / 301
|
||||
/* /index.html 301
|
@ -11,6 +11,6 @@ $ npm install
|
||||
$ npm run build
|
||||
```
|
||||
|
||||
This will build two files: `<codec name>_<enc or dec>.js` and `<codec name>_<enc or dec>.wasm`. It will most likely be necessary to set [`Module["locateFile"]`](https://kripken.github.io/emscripten-site/docs/api_reference/module.html#affecting-execution) to successfully load the `.wasm` file. When the `.js` file is loaded, a global `<codec name>_<enc or dec>` is created with the same API as an [Emscripten `Module`](https://kripken.github.io/emscripten-site/docs/api_reference/module.html).
|
||||
This will build two files: `<codec name>_<enc or dec>.js` and `<codec name>_<enc or dec>.wasm`. It will most likely be necessary to set [`Module["locateFile"]`](https://kripken.github.io/emscripten-site/docs/api_reference/module.html#affecting-execution) to sucessfully load the `.wasm` file. When the `.js` file is loaded, a global `<codec name>_<enc or dec>` is created with the same API as an [Emscripten `Module`](https://kripken.github.io/emscripten-site/docs/api_reference/module.html).
|
||||
|
||||
Each codec will document its API in its README.
|
||||
|
Before Width: | Height: | Size: 74 KiB |
Before Width: | Height: | Size: 239 KiB |
@ -1,30 +0,0 @@
|
||||
# ImageQuant
|
||||
|
||||
- Source: <https://github.com/ImageOptim/libimagequant>
|
||||
- Version: v2.12.1
|
||||
|
||||
## Dependencies
|
||||
|
||||
- Docker
|
||||
|
||||
## Example
|
||||
|
||||
See `example.html`
|
||||
|
||||
## API
|
||||
|
||||
### `int version()`
|
||||
|
||||
Returns the version of libimagequant as a number. va.b.c is encoded as 0x0a0b0c
|
||||
|
||||
### `RawImage quantize(std::string buffer, int image_width, int image_height, int numColors, float dithering)`
|
||||
|
||||
Quantizes the given images, using at most `numColors`, a value between 2 and 256. `dithering` is a value between 0 and 1 controlling the amount of dithering. `RawImage` is a class with 3 fields: `buffer`, `width`, and `height`.
|
||||
|
||||
### `RawImage zx_quantize(std::string buffer, int image_width, int image_height, float dithering)`
|
||||
|
||||
???
|
||||
|
||||
### `void free_result()`
|
||||
|
||||
Frees the result created by `quantize()`.
|
@ -1,48 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
export OPTIMIZE="-Os"
|
||||
export LDFLAGS="${OPTIMIZE}"
|
||||
export CFLAGS="${OPTIMIZE}"
|
||||
export CPPFLAGS="${OPTIMIZE}"
|
||||
|
||||
echo "============================================="
|
||||
echo "Compiling libimagequant"
|
||||
echo "============================================="
|
||||
(
|
||||
emcc \
|
||||
--bind \
|
||||
${OPTIMIZE} \
|
||||
-s ALLOW_MEMORY_GROWTH=1 \
|
||||
-s MODULARIZE=1 \
|
||||
-s 'EXPORT_NAME="imagequant"' \
|
||||
-I node_modules/libimagequant \
|
||||
--std=c99 \
|
||||
-c \
|
||||
node_modules/libimagequant/{libimagequant,pam,mediancut,blur,mempool,kmeans,nearest}.c
|
||||
)
|
||||
echo "============================================="
|
||||
echo "Compiling wasm module"
|
||||
echo "============================================="
|
||||
(
|
||||
emcc \
|
||||
--bind \
|
||||
${OPTIMIZE} \
|
||||
-s ALLOW_MEMORY_GROWTH=1 \
|
||||
-s MODULARIZE=1 \
|
||||
-s 'EXPORT_NAME="imagequant"' \
|
||||
-I node_modules/libimagequant \
|
||||
-o ./imagequant.js \
|
||||
--std=c++11 *.o \
|
||||
-x c++ \
|
||||
imagequant.cpp
|
||||
)
|
||||
echo "============================================="
|
||||
echo "Compiling wasm module done"
|
||||
echo "============================================="
|
||||
|
||||
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
|
||||
echo "Did you update your docker image?"
|
||||
echo "Run \`docker pull trzeci/emscripten\`"
|
||||
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
|
@ -1,41 +0,0 @@
|
||||
<!doctype html>
|
||||
<style>
|
||||
canvas {
|
||||
image-rendering: pixelated;
|
||||
}
|
||||
</style>
|
||||
<script src='imagequant.js'></script>
|
||||
<script>
|
||||
const Module = imagequant();
|
||||
|
||||
async function loadImage(src) {
|
||||
// Load image
|
||||
const img = document.createElement('img');
|
||||
img.src = src;
|
||||
await new Promise(resolve => img.onload = resolve);
|
||||
// Make canvas same size as image
|
||||
const canvas = document.createElement('canvas');
|
||||
[canvas.width, canvas.height] = [img.width, img.height];
|
||||
// Draw image onto canvas
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.drawImage(img, 0, 0);
|
||||
return ctx.getImageData(0, 0, img.width, img.height);
|
||||
}
|
||||
|
||||
Module.onRuntimeInitialized = async _ => {
|
||||
console.log('Version:', Module.version().toString(16));
|
||||
const image = await loadImage('../example.png');
|
||||
// const rawImage = Module.quantize(image.data, image.width, image.height, 256, 1.0);
|
||||
const rawImage = Module.zx_quantize(image.data, image.width, image.height, 1.0);
|
||||
console.log('done');
|
||||
Module.free_result();
|
||||
|
||||
const imageData = new ImageData(new Uint8ClampedArray(rawImage.buffer), rawImage.width, rawImage.height);
|
||||
const canvas = document.createElement('canvas');
|
||||
canvas.width = image.width;
|
||||
canvas.height = image.height;
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.putImageData(imageData, 0, 0);
|
||||
document.body.appendChild(canvas);
|
||||
};
|
||||
</script>
|
@ -1,245 +0,0 @@
|
||||
#include "emscripten/bind.h"
|
||||
#include "emscripten/val.h"
|
||||
#include <stdlib.h>
|
||||
#include <inttypes.h>
|
||||
#include <limits.h>
|
||||
#include <math.h>
|
||||
|
||||
#include "libimagequant.h"
|
||||
|
||||
using namespace emscripten;
|
||||
|
||||
int version() {
|
||||
return (((LIQ_VERSION/10000) % 100) << 16) |
|
||||
(((LIQ_VERSION/100 ) % 100) << 8) |
|
||||
(((LIQ_VERSION/1 ) % 100) << 0);
|
||||
}
|
||||
|
||||
class RawImage {
|
||||
public:
|
||||
val buffer;
|
||||
int width;
|
||||
int height;
|
||||
|
||||
RawImage(val b, int w, int h)
|
||||
: buffer(b), width(w), height(h) {}
|
||||
};
|
||||
|
||||
|
||||
liq_attr *attr;
|
||||
liq_image *image;
|
||||
liq_result *res;
|
||||
uint8_t* result;
|
||||
RawImage quantize(std::string rawimage, int image_width, int image_height, int num_colors, float dithering) {
|
||||
const uint8_t* image_buffer = (uint8_t*)rawimage.c_str();
|
||||
int size = image_width * image_height;
|
||||
attr = liq_attr_create();
|
||||
image = liq_image_create_rgba(attr, image_buffer, image_width, image_height, 0);
|
||||
liq_set_max_colors(attr, num_colors);
|
||||
liq_image_quantize(image, attr, &res);
|
||||
liq_set_dithering_level(res, dithering);
|
||||
uint8_t* image8bit = (uint8_t*) malloc(size);
|
||||
result = (uint8_t*) malloc(size * 4);
|
||||
liq_write_remapped_image(res, image, image8bit, size);
|
||||
const liq_palette *pal = liq_get_palette(res);
|
||||
// Turn palletted image back into an RGBA image
|
||||
for(int i = 0; i < size; i++) {
|
||||
result[i * 4 + 0] = pal->entries[image8bit[i]].r;
|
||||
result[i * 4 + 1] = pal->entries[image8bit[i]].g;
|
||||
result[i * 4 + 2] = pal->entries[image8bit[i]].b;
|
||||
result[i * 4 + 3] = pal->entries[image8bit[i]].a;
|
||||
}
|
||||
free(image8bit);
|
||||
liq_result_destroy(res);
|
||||
liq_image_destroy(image);
|
||||
liq_attr_destroy(attr);
|
||||
return {
|
||||
val(typed_memory_view(image_width*image_height*4, result)),
|
||||
image_width,
|
||||
image_height
|
||||
};
|
||||
}
|
||||
|
||||
const liq_color zx_colors[] = {
|
||||
{.a = 255, .r = 0, .g = 0, .b = 0}, // regular black
|
||||
{.a = 255, .r = 0, .g = 0, .b = 215}, // regular blue
|
||||
{.a = 255, .r = 215, .g = 0, .b = 0}, // regular red
|
||||
{.a = 255, .r = 215, .g = 0, .b = 215}, // regular magenta
|
||||
{.a = 255, .r = 0, .g = 215, .b = 0}, // regular green
|
||||
{.a = 255, .r = 0, .g = 215, .b = 215}, // regular cyan
|
||||
{.a = 255, .r = 215, .g = 215, .b = 0}, // regular yellow
|
||||
{.a = 255, .r = 215, .g = 215, .b = 215}, // regular white
|
||||
{.a = 255, .r = 0, .g = 0, .b = 255}, // bright blue
|
||||
{.a = 255, .r = 255, .g = 0, .b = 0}, // bright red
|
||||
{.a = 255, .r = 255, .g = 0, .b = 255}, // bright magenta
|
||||
{.a = 255, .r = 0, .g = 255, .b = 0}, // bright green
|
||||
{.a = 255, .r = 0, .g = 255, .b = 255}, // bright cyan
|
||||
{.a = 255, .r = 255, .g = 255, .b = 0}, // bright yellow
|
||||
{.a = 255, .r = 255, .g = 255, .b = 255} // bright white
|
||||
};
|
||||
|
||||
uint8_t block[8 * 8 * 4];
|
||||
|
||||
/**
|
||||
* The ZX has one bit per pixel, but can assign two colours to an 8x8 block. The two colours must
|
||||
* both be 'regular' or 'bright'. Black exists as both regular and bright.
|
||||
*/
|
||||
RawImage zx_quantize(std::string rawimage, int image_width, int image_height, float dithering) {
|
||||
const uint8_t* image_buffer = (uint8_t*) rawimage.c_str();
|
||||
int size = image_width * image_height;
|
||||
int bytes_per_pixel = 4;
|
||||
result = (uint8_t*) malloc(size * bytes_per_pixel);
|
||||
uint8_t* image8bit = (uint8_t*) malloc(8 * 8);
|
||||
|
||||
// For each 8x8 grid
|
||||
for (int block_start_y = 0; block_start_y < image_height; block_start_y += 8) {
|
||||
for (int block_start_x = 0; block_start_x < image_width; block_start_x += 8) {
|
||||
int color_popularity[15] = {0};
|
||||
int block_index = 0;
|
||||
int block_width = 8;
|
||||
int block_height = 8;
|
||||
|
||||
// If the block hangs off the right/bottom of the image dimensions, make it smaller to fit.
|
||||
if (block_start_y + block_height > image_height) {
|
||||
block_height = image_height - block_start_y;
|
||||
}
|
||||
|
||||
if (block_start_x + block_width > image_width) {
|
||||
block_width = image_width - block_start_x;
|
||||
}
|
||||
|
||||
// For each pixel in that block:
|
||||
for (int y = block_start_y; y < block_start_y + block_height; y++) {
|
||||
for (int x = block_start_x; x < block_start_x + block_width; x++) {
|
||||
int pixel_start = (y * image_width * bytes_per_pixel) + (x * bytes_per_pixel);
|
||||
int smallest_distance = INT_MAX;
|
||||
int winning_index = -1;
|
||||
|
||||
// Copy pixel data for quantizing later
|
||||
block[block_index++] = image_buffer[pixel_start];
|
||||
block[block_index++] = image_buffer[pixel_start + 1];
|
||||
block[block_index++] = image_buffer[pixel_start + 2];
|
||||
block[block_index++] = image_buffer[pixel_start + 3];
|
||||
|
||||
// Which zx color is this pixel closest to?
|
||||
for (int color_index = 0; color_index < 15; color_index++) {
|
||||
liq_color color = zx_colors[color_index];
|
||||
|
||||
// Using Euclidean distance. LibQuant has better methods, but it requires conversion to
|
||||
// LAB, so I don't think it's worth it.
|
||||
int distance =
|
||||
pow(color.r - image_buffer[pixel_start + 0], 2) +
|
||||
pow(color.g - image_buffer[pixel_start + 1], 2) +
|
||||
pow(color.b - image_buffer[pixel_start + 2], 2);
|
||||
|
||||
if (distance < smallest_distance) {
|
||||
winning_index = color_index;
|
||||
smallest_distance = distance;
|
||||
}
|
||||
}
|
||||
color_popularity[winning_index]++;
|
||||
}
|
||||
}
|
||||
|
||||
// Get the three most popular colours for the block.
|
||||
int first_color_index = 0;
|
||||
int second_color_index = 0;
|
||||
int third_color_index = 0;
|
||||
int highest_popularity = -1;
|
||||
int second_highest_popularity = -1;
|
||||
int third_highest_popularity = -1;
|
||||
|
||||
for (int color_index = 0; color_index < 15; color_index++) {
|
||||
if (color_popularity[color_index] > highest_popularity) {
|
||||
// Store this as the most popular pixel, and demote the current values:
|
||||
third_color_index = second_color_index;
|
||||
third_highest_popularity = second_highest_popularity;
|
||||
second_color_index = first_color_index;
|
||||
second_highest_popularity = highest_popularity;
|
||||
first_color_index = color_index;
|
||||
highest_popularity = color_popularity[color_index];
|
||||
} else if (color_popularity[color_index] > second_highest_popularity) {
|
||||
third_color_index = second_color_index;
|
||||
third_highest_popularity = second_highest_popularity;
|
||||
second_color_index = color_index;
|
||||
second_highest_popularity = color_popularity[color_index];
|
||||
} else if (color_popularity[color_index] > third_highest_popularity) {
|
||||
third_color_index = color_index;
|
||||
third_highest_popularity = color_popularity[color_index];
|
||||
}
|
||||
}
|
||||
|
||||
// ZX images can't mix bright and regular colours, except black which appears in both.
|
||||
// Resolve any conflict:
|
||||
while (1) {
|
||||
// If either colour is black, there's no conflict to resolve.
|
||||
if (first_color_index != 0 && second_color_index != 0) {
|
||||
if (first_color_index >= 8 && second_color_index < 8) {
|
||||
// Make the second color bright
|
||||
second_color_index = second_color_index + 7;
|
||||
} else if (first_color_index < 8 && second_color_index >= 8) {
|
||||
// Make the second color regular
|
||||
second_color_index = second_color_index - 7;
|
||||
}
|
||||
}
|
||||
|
||||
// If, during conflict resolving, we now have two of the same colour (because we initially
|
||||
// selected the bright & regular version of the same colour), retry again with the third
|
||||
// most popular colour.
|
||||
if (first_color_index == second_color_index) {
|
||||
second_color_index = third_color_index;
|
||||
} else break;
|
||||
}
|
||||
|
||||
// Quantize
|
||||
attr = liq_attr_create();
|
||||
image = liq_image_create_rgba(attr, block, block_width, block_height, 0);
|
||||
liq_set_max_colors(attr, 2);
|
||||
liq_image_add_fixed_color(image, zx_colors[first_color_index]);
|
||||
liq_image_add_fixed_color(image, zx_colors[second_color_index]);
|
||||
liq_image_quantize(image, attr, &res);
|
||||
liq_set_dithering_level(res, dithering);
|
||||
liq_write_remapped_image(res, image, image8bit, size);
|
||||
const liq_palette *pal = liq_get_palette(res);
|
||||
|
||||
// Turn palletted image back into an RGBA image, and write it into the full size result image.
|
||||
for(int y = 0; y < block_height; y++) {
|
||||
for(int x = 0; x < block_width; x++) {
|
||||
int image8BitPos = y * block_width + x;
|
||||
int resultStartPos = ((block_start_y + y) * bytes_per_pixel * image_width) + ((block_start_x + x) * bytes_per_pixel);
|
||||
result[resultStartPos + 0] = pal->entries[image8bit[image8BitPos]].r;
|
||||
result[resultStartPos + 1] = pal->entries[image8bit[image8BitPos]].g;
|
||||
result[resultStartPos + 2] = pal->entries[image8bit[image8BitPos]].b;
|
||||
result[resultStartPos + 3] = pal->entries[image8bit[image8BitPos]].a;
|
||||
}
|
||||
}
|
||||
|
||||
liq_result_destroy(res);
|
||||
liq_image_destroy(image);
|
||||
liq_attr_destroy(attr);
|
||||
}
|
||||
}
|
||||
|
||||
free(image8bit);
|
||||
return {
|
||||
val(typed_memory_view(image_width*image_height*4, result)),
|
||||
image_width,
|
||||
image_height
|
||||
};
|
||||
}
|
||||
|
||||
void free_result() {
|
||||
free(result);
|
||||
}
|
||||
|
||||
EMSCRIPTEN_BINDINGS(my_module) {
|
||||
class_<RawImage>("RawImage")
|
||||
.property("buffer", &RawImage::buffer)
|
||||
.property("width", &RawImage::width)
|
||||
.property("height", &RawImage::height);
|
||||
|
||||
function("quantize", &quantize);
|
||||
function("zx_quantize", &zx_quantize);
|
||||
function("version", &version);
|
||||
function("free_result", &free_result);
|
||||
}
|
15
codecs/imagequant/imagequant.d.ts
vendored
@ -1,15 +0,0 @@
|
||||
interface RawImage {
|
||||
buffer: Uint8Array;
|
||||
width: number;
|
||||
height: number;
|
||||
}
|
||||
|
||||
interface QuantizerModule extends EmscriptenWasm.Module {
|
||||
quantize(data: BufferSource, width: number, height: number, numColors: number, dither: number): RawImage;
|
||||
zx_quantize(data: BufferSource, width: number, height: number, dither: number): RawImage;
|
||||
free_result(): void;
|
||||
}
|
||||
|
||||
export default function(opts: EmscriptenWasm.ModuleOpts): QuantizerModule;
|
||||
|
||||
|
1147
codecs/imagequant/package-lock.json
generated
@ -1,13 +0,0 @@
|
||||
{
|
||||
"name": "imagequant",
|
||||
"scripts": {
|
||||
"install": "napa",
|
||||
"build": "docker run --rm -v $(pwd):/src trzeci/emscripten ./build.sh"
|
||||
},
|
||||
"napa": {
|
||||
"libimagequant": "ImageOptim/libimagequant#2.12.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"napa": "3.0.0"
|
||||
}
|
||||
}
|
@ -6,6 +6,8 @@
|
||||
## Dependencies
|
||||
|
||||
- Docker
|
||||
- Automake
|
||||
- pkg-config
|
||||
|
||||
## Example
|
||||
|
||||
@ -17,31 +19,26 @@ See `example.html`
|
||||
|
||||
Returns the version of MozJPEG as a number. va.b.c is encoded as 0x0a0b0c
|
||||
|
||||
### `uint8_t* create_buffer(int width, int height)`
|
||||
|
||||
Allocates an RGBA buffer for an image with the given dimension.
|
||||
|
||||
### `void destroy_buffer(uint8_t* p)`
|
||||
|
||||
Frees a buffer created with `create_buffer`.
|
||||
|
||||
### `void encode(uint8_t* image_buffer, int image_width, int image_height, int quality)`
|
||||
|
||||
Encodes the given image with given dimension to JPEG. `quality` is a number between 0 and 100. The higher the number, the better the quality of the encoded image. The result is implicitly stored and can be accessed using the `get_result_*()` functions.
|
||||
|
||||
### `void free_result()`
|
||||
|
||||
Frees the result created by `encode()`.
|
||||
|
||||
### `Uint8Array encode(std::string image_in, int image_width, int image_height, MozJpegOptions opts)`
|
||||
### `int get_result_pointer()`
|
||||
|
||||
Encodes the given image with given dimension to JPEG. Options looks like this:
|
||||
Returns the pointer to the start of the buffer holding the encoded data.
|
||||
|
||||
```c++
|
||||
struct MozJpegOptions {
|
||||
int quality;
|
||||
bool baseline;
|
||||
bool arithmetic;
|
||||
bool progressive;
|
||||
bool optimize_coding;
|
||||
int smoothing;
|
||||
int color_space;
|
||||
int quant_table;
|
||||
bool trellis_multipass;
|
||||
bool trellis_opt_zero;
|
||||
bool trellis_opt_table;
|
||||
int trellis_loops;
|
||||
bool auto_subsample;
|
||||
int chroma_subsample;
|
||||
bool separate_chroma_quality;
|
||||
int chroma_quality;
|
||||
};
|
||||
```
|
||||
### `int get_result_size()`
|
||||
|
||||
Returns the length of the buffer holding the encoded data.
|
||||
|
@ -1,53 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
export OPTIMIZE="-Os"
|
||||
export LDFLAGS="${OPTIMIZE}"
|
||||
export CFLAGS="${OPTIMIZE}"
|
||||
export CPPFLAGS="${OPTIMIZE}"
|
||||
|
||||
apt-get update
|
||||
apt-get install -qqy autoconf libtool libpng-dev pkg-config
|
||||
|
||||
echo "============================================="
|
||||
echo "Compiling mozjpeg"
|
||||
echo "============================================="
|
||||
(
|
||||
cd node_modules/mozjpeg
|
||||
autoreconf -fiv
|
||||
emconfigure ./configure --without-simd
|
||||
emmake make libjpeg.la
|
||||
)
|
||||
echo "============================================="
|
||||
echo "Compiling mozjpeg done"
|
||||
echo "============================================="
|
||||
|
||||
echo "============================================="
|
||||
echo "Compiling wasm bindings"
|
||||
echo "============================================="
|
||||
(
|
||||
emcc \
|
||||
--bind \
|
||||
${OPTIMIZE} \
|
||||
-s WASM=1 \
|
||||
-s ALLOW_MEMORY_GROWTH=1 \
|
||||
-s MODULARIZE=1 \
|
||||
-s 'EXPORT_NAME="mozjpeg_enc"' \
|
||||
-I node_modules/mozjpeg \
|
||||
-o ./mozjpeg_enc.js \
|
||||
-Wno-deprecated-register \
|
||||
-Wno-writable-strings \
|
||||
node_modules/mozjpeg/rdswitch.c \
|
||||
-x c++ -std=c++11 \
|
||||
mozjpeg_enc.cpp \
|
||||
node_modules/mozjpeg/.libs/libjpeg.a
|
||||
)
|
||||
echo "============================================="
|
||||
echo "Compiling wasm bindings done"
|
||||
echo "============================================="
|
||||
|
||||
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
|
||||
echo "Did you update your docker image?"
|
||||
echo "Run \`docker pull trzeci/emscripten\`"
|
||||
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
|
@ -1,7 +1,7 @@
|
||||
<!doctype html>
|
||||
<script src='mozjpeg_enc.js'></script>
|
||||
<script>
|
||||
const module = mozjpeg_enc();
|
||||
const Module = mozjpeg_enc();
|
||||
|
||||
async function loadImage(src) {
|
||||
// Load image
|
||||
@ -17,27 +17,26 @@
|
||||
return ctx.getImageData(0, 0, img.width, img.height);
|
||||
}
|
||||
|
||||
module.onRuntimeInitialized = async _ => {
|
||||
console.log('Version:', module.version().toString(16));
|
||||
Module.onRuntimeInitialized = async _ => {
|
||||
const api = {
|
||||
version: Module.cwrap('version', 'number', []),
|
||||
create_buffer: Module.cwrap('create_buffer', 'number', ['number', 'number']),
|
||||
destroy_buffer: Module.cwrap('destroy_buffer', '', ['number']),
|
||||
encode: Module.cwrap('encode', '', ['number', 'number', 'number', 'number']),
|
||||
free_result: Module.cwrap('free_result', '', ['number']),
|
||||
get_result_pointer: Module.cwrap('get_result_pointer', 'number', []),
|
||||
get_result_size: Module.cwrap('get_result_size', 'number', []),
|
||||
};
|
||||
const image = await loadImage('../example.png');
|
||||
const result = module.encode(image.data, image.width, image.height, {
|
||||
quality: 75,
|
||||
baseline: false,
|
||||
arithmetic: false,
|
||||
progressive: true,
|
||||
optimize_coding: true,
|
||||
smoothing: 0,
|
||||
color_space: 3,
|
||||
quant_table: 3,
|
||||
trellis_multipass: false,
|
||||
trellis_opt_zero: false,
|
||||
trellis_opt_table: false,
|
||||
trellis_loops: 1,
|
||||
auto_subsample: true,
|
||||
chroma_subsample: 2,
|
||||
separate_chroma_quality: false,
|
||||
chroma_quality: 75,
|
||||
});
|
||||
const p = api.create_buffer(image.width, image.height);
|
||||
Module.HEAP8.set(image.data, p);
|
||||
api.encode(p, image.width, image.height, 2);
|
||||
const resultPointer = api.get_result_pointer();
|
||||
const resultSize = api.get_result_size();
|
||||
const resultView = new Uint8Array(Module.HEAP8.buffer, resultPointer, resultSize);
|
||||
const result = new Uint8Array(resultView);
|
||||
api.free_result(resultPointer);
|
||||
api.destroy_buffer(p);
|
||||
|
||||
const blob = new Blob([result], {type: 'image/jpeg'});
|
||||
const blobURL = URL.createObjectURL(blob);
|
||||
|
@ -1,40 +1,17 @@
|
||||
#include <emscripten/bind.h>
|
||||
#include <emscripten/val.h>
|
||||
#include "emscripten.h"
|
||||
#include <stdlib.h>
|
||||
#include <inttypes.h>
|
||||
#include <stdio.h>
|
||||
#include <setjmp.h>
|
||||
#include <string.h>
|
||||
#include "config.h"
|
||||
#include "jpeglib.h"
|
||||
#include "cdjpeg.h"
|
||||
#include "config.h"
|
||||
|
||||
using namespace emscripten;
|
||||
|
||||
// MozJPEG doesn’t expose a numeric version, so I have to do some fun C macro hackery to turn it
|
||||
// into a string. More details here: https://gcc.gnu.org/onlinedocs/cpp/Stringizing.html
|
||||
// MozJPEG doesn’t expose a numeric version, so I have to do some fun C macro hackery to turn it into a string. More details here: https://gcc.gnu.org/onlinedocs/cpp/Stringizing.html
|
||||
#define xstr(s) str(s)
|
||||
#define str(s) #s
|
||||
|
||||
struct MozJpegOptions {
|
||||
int quality;
|
||||
bool baseline;
|
||||
bool arithmetic;
|
||||
bool progressive;
|
||||
bool optimize_coding;
|
||||
int smoothing;
|
||||
int color_space;
|
||||
int quant_table;
|
||||
bool trellis_multipass;
|
||||
bool trellis_opt_zero;
|
||||
bool trellis_opt_table;
|
||||
int trellis_loops;
|
||||
bool auto_subsample;
|
||||
int chroma_subsample;
|
||||
bool separate_chroma_quality;
|
||||
int chroma_quality;
|
||||
};
|
||||
|
||||
EMSCRIPTEN_KEEPALIVE
|
||||
int version() {
|
||||
char buffer[] = xstr(MOZJPEG_VERSION);
|
||||
int version = 0;
|
||||
@ -51,11 +28,27 @@ int version() {
|
||||
return version;
|
||||
}
|
||||
|
||||
uint8_t* last_result;
|
||||
struct jpeg_compress_struct cinfo;
|
||||
EMSCRIPTEN_KEEPALIVE
|
||||
uint8_t* create_buffer(int width, int height) {
|
||||
return malloc(width * height * 4 * sizeof(uint8_t));
|
||||
}
|
||||
|
||||
val encode(std::string image_in, int image_width, int image_height, MozJpegOptions opts) {
|
||||
uint8_t* image_buffer = (uint8_t*) image_in.c_str();
|
||||
EMSCRIPTEN_KEEPALIVE
|
||||
void destroy_buffer(uint8_t* p) {
|
||||
free(p);
|
||||
}
|
||||
|
||||
int result[2];
|
||||
EMSCRIPTEN_KEEPALIVE
|
||||
void encode(uint8_t* image_buffer, int image_width, int image_height, int quality) {
|
||||
// Manually convert RGBA data into RGB
|
||||
for(int y = 0; y < image_height; y++) {
|
||||
for(int x = 0; x < image_width; x++) {
|
||||
image_buffer[(y*image_width + x)*3 + 0] = image_buffer[(y*image_width + x)*4 + 0];
|
||||
image_buffer[(y*image_width + x)*3 + 1] = image_buffer[(y*image_width + x)*4 + 1];
|
||||
image_buffer[(y*image_width + x)*3 + 2] = image_buffer[(y*image_width + x)*4 + 2];
|
||||
}
|
||||
}
|
||||
|
||||
// The code below is basically the `write_JPEG_file` function from
|
||||
// https://github.com/mozilla/mozjpeg/blob/master/example.c
|
||||
@ -68,6 +61,7 @@ val encode(std::string image_in, int image_width, int image_height, MozJpegOptio
|
||||
* compression/decompression processes, in existence at once. We refer
|
||||
* to any one struct (and its associated working data) as a "JPEG object".
|
||||
*/
|
||||
struct jpeg_compress_struct cinfo;
|
||||
/* This struct represents a JPEG error handler. It is declared separately
|
||||
* because applications often want to supply a specialized error handler
|
||||
* (see the second half of this file for an example). But here we just
|
||||
@ -115,57 +109,18 @@ val encode(std::string image_in, int image_width, int image_height, MozJpegOptio
|
||||
*/
|
||||
cinfo.image_width = image_width; /* image width and height, in pixels */
|
||||
cinfo.image_height = image_height;
|
||||
cinfo.input_components = 4; /* # of color components per pixel */
|
||||
cinfo.in_color_space = JCS_EXT_RGBA; /* colorspace of input image */
|
||||
cinfo.input_components = 3; /* # of color components per pixel */
|
||||
cinfo.in_color_space = JCS_RGB; /* colorspace of input image */
|
||||
/* Now use the library's routine to set default compression parameters.
|
||||
* (You must set at least cinfo.in_color_space before calling this,
|
||||
* since the defaults depend on the source color space.)
|
||||
*/
|
||||
jpeg_set_defaults(&cinfo);
|
||||
/* Now you can set any non-default parameters you wish to.
|
||||
* Here we just illustrate the use of quality (quantization table) scaling:
|
||||
*/
|
||||
jpeg_set_quality(&cinfo, quality, TRUE /* limit to baseline-JPEG values */);
|
||||
|
||||
jpeg_set_colorspace(&cinfo, (J_COLOR_SPACE) opts.color_space);
|
||||
|
||||
if (opts.quant_table != -1) {
|
||||
jpeg_c_set_int_param(&cinfo, JINT_BASE_QUANT_TBL_IDX, opts.quant_table);
|
||||
}
|
||||
|
||||
cinfo.optimize_coding = opts.optimize_coding;
|
||||
|
||||
if (opts.arithmetic) {
|
||||
cinfo.arith_code = TRUE;
|
||||
cinfo.optimize_coding = FALSE;
|
||||
}
|
||||
|
||||
cinfo.smoothing_factor = opts.smoothing;
|
||||
|
||||
jpeg_c_set_bool_param(&cinfo, JBOOLEAN_USE_SCANS_IN_TRELLIS, opts.trellis_multipass);
|
||||
jpeg_c_set_bool_param(&cinfo, JBOOLEAN_TRELLIS_EOB_OPT, opts.trellis_opt_zero);
|
||||
jpeg_c_set_bool_param(&cinfo, JBOOLEAN_TRELLIS_Q_OPT, opts.trellis_opt_table);
|
||||
jpeg_c_set_int_param(&cinfo, JINT_TRELLIS_NUM_LOOPS, opts.trellis_loops);
|
||||
|
||||
// A little hacky to build a string for this, but it means we can use set_quality_ratings which
|
||||
// does some useful heuristic stuff.
|
||||
std::string quality_str = std::to_string(opts.quality);
|
||||
|
||||
if (opts.separate_chroma_quality && opts.color_space == JCS_YCbCr) {
|
||||
quality_str += "," + std::to_string(opts.chroma_quality);
|
||||
}
|
||||
|
||||
char const *pqual = quality_str.c_str();
|
||||
|
||||
set_quality_ratings(&cinfo, (char*) pqual, opts.baseline);
|
||||
|
||||
if (!opts.auto_subsample && opts.color_space == JCS_YCbCr) {
|
||||
cinfo.comp_info[0].h_samp_factor = opts.chroma_subsample;
|
||||
cinfo.comp_info[0].v_samp_factor = opts.chroma_subsample;
|
||||
}
|
||||
|
||||
if (!opts.baseline && opts.progressive) {
|
||||
jpeg_simple_progression(&cinfo);
|
||||
} else {
|
||||
cinfo.num_scans = 0;
|
||||
cinfo.scan_info = NULL;
|
||||
}
|
||||
/* Step 4: Start compressor */
|
||||
|
||||
/* TRUE ensures that we will write a complete interchange-JPEG file.
|
||||
@ -181,7 +136,7 @@ val encode(std::string image_in, int image_width, int image_height, MozJpegOptio
|
||||
* To keep things simple, we pass one scanline per call; you can pass
|
||||
* more if you wish, though.
|
||||
*/
|
||||
row_stride = image_width * 4; /* JSAMPLEs per row in image_buffer */
|
||||
row_stride = image_width * 3; /* JSAMPLEs per row in image_buffer */
|
||||
|
||||
while (cinfo.next_scanline < cinfo.image_height) {
|
||||
/* jpeg_write_scanlines expects an array of pointers to scanlines.
|
||||
@ -197,38 +152,27 @@ val encode(std::string image_in, int image_width, int image_height, MozJpegOptio
|
||||
jpeg_finish_compress(&cinfo);
|
||||
/* Step 7: release JPEG compression object */
|
||||
|
||||
last_result = output;
|
||||
result[0] = (int)output;
|
||||
result[1] = size;
|
||||
|
||||
/* And we're done! */
|
||||
return val(typed_memory_view(size, output));
|
||||
}
|
||||
|
||||
void free_result() {
|
||||
/* This is an important step since it will release a good deal of memory. */
|
||||
jpeg_destroy_compress(&cinfo);
|
||||
|
||||
/* And we're done! */
|
||||
}
|
||||
|
||||
EMSCRIPTEN_BINDINGS(my_module) {
|
||||
value_object<MozJpegOptions>("MozJpegOptions")
|
||||
.field("quality", &MozJpegOptions::quality)
|
||||
.field("baseline", &MozJpegOptions::baseline)
|
||||
.field("arithmetic", &MozJpegOptions::arithmetic)
|
||||
.field("progressive", &MozJpegOptions::progressive)
|
||||
.field("optimize_coding", &MozJpegOptions::optimize_coding)
|
||||
.field("smoothing", &MozJpegOptions::smoothing)
|
||||
.field("color_space", &MozJpegOptions::color_space)
|
||||
.field("quant_table", &MozJpegOptions::quant_table)
|
||||
.field("trellis_multipass", &MozJpegOptions::trellis_multipass)
|
||||
.field("trellis_opt_zero", &MozJpegOptions::trellis_opt_zero)
|
||||
.field("trellis_opt_table", &MozJpegOptions::trellis_opt_table)
|
||||
.field("trellis_loops", &MozJpegOptions::trellis_loops)
|
||||
.field("chroma_subsample", &MozJpegOptions::chroma_subsample)
|
||||
.field("auto_subsample", &MozJpegOptions::auto_subsample)
|
||||
.field("separate_chroma_quality", &MozJpegOptions::separate_chroma_quality)
|
||||
.field("chroma_quality", &MozJpegOptions::chroma_quality)
|
||||
;
|
||||
|
||||
function("version", &version);
|
||||
function("encode", &encode);
|
||||
function("free_result", &free_result);
|
||||
EMSCRIPTEN_KEEPALIVE
|
||||
void free_result() {
|
||||
free(result[0]); // not sure if this is right with mozjpeg
|
||||
}
|
||||
|
||||
EMSCRIPTEN_KEEPALIVE
|
||||
int get_result_pointer() {
|
||||
return result[0];
|
||||
}
|
||||
|
||||
EMSCRIPTEN_KEEPALIVE
|
||||
int get_result_size() {
|
||||
return result[1];
|
||||
}
|
||||
|
9
codecs/mozjpeg_enc/mozjpeg_enc.d.ts
vendored
@ -1,8 +1 @@
|
||||
import { EncodeOptions } from '../../src/codecs/mozjpeg/encoder-meta';
|
||||
|
||||
interface MozJPEGModule extends EmscriptenWasm.Module {
|
||||
encode(data: BufferSource, width: number, height: number, options: EncodeOptions): Uint8Array;
|
||||
free_result(): void;
|
||||
}
|
||||
|
||||
export default function(opts: EmscriptenWasm.ModuleOpts): MozJPEGModule;
|
||||
export default function(opts: EmscriptenWasm.ModuleOpts): EmscriptenWasm.Module;
|
||||
|
@ -2,12 +2,14 @@
|
||||
"name": "mozjpeg_enc",
|
||||
"scripts": {
|
||||
"install": "napa",
|
||||
"build": "docker run --rm -v $(pwd):/src trzeci/emscripten ./build.sh"
|
||||
"build": "npm run build:library && npm run build:wasm",
|
||||
"build:library": "cd node_modules/mozjpeg && autoreconf -fiv && docker run --rm -v $(pwd):/src trzeci/emscripten emconfigure ./configure --without-simd && docker run --rm -v $(pwd):/src trzeci/emscripten emmake make libjpeg.la",
|
||||
"build:wasm": "docker run --rm -v $(pwd):/src trzeci/emscripten emcc -O3 -s WASM=1 -s EXTRA_EXPORTED_RUNTIME_METHODS='[\"cwrap\"]' -s ALLOW_MEMORY_GROWTH=1 -s MODULARIZE=1 -s 'EXPORT_NAME=\"mozjpeg_enc\"' -I node_modules/mozjpeg -o ./mozjpeg_enc.js mozjpeg_enc.c node_modules/mozjpeg/.libs/libjpeg.a"
|
||||
},
|
||||
"napa": {
|
||||
"mozjpeg": "mozilla/mozjpeg#v3.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"napa": "3.0.0"
|
||||
"napa": "^3.0.0"
|
||||
}
|
||||
}
|
||||
|
2
codecs/optipng/.gitignore
vendored
@ -1,2 +0,0 @@
|
||||
build/
|
||||
*.o
|
@ -1,26 +0,0 @@
|
||||
# OptiPNG
|
||||
|
||||
- Source: <http://optipng.sourceforge.net/>
|
||||
- Version: v0.7.7
|
||||
|
||||
## Dependencies
|
||||
|
||||
- Docker
|
||||
|
||||
## Example
|
||||
|
||||
See `example.html`
|
||||
|
||||
## API
|
||||
|
||||
### `int version()`
|
||||
|
||||
Returns the version of optipng as a number. va.b.c is encoded as 0x0a0b0c
|
||||
|
||||
### `ArrayBuffer compress(std::string buffer, {level})`;
|
||||
|
||||
`compress` will re-compress the given PNG image via `buffer`. `level` is a number between 0 and 7.
|
||||
|
||||
### `void free_result()`
|
||||
|
||||
Frees the result created by `compress()`.
|
@ -1,87 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
export OPTIMIZE="-Os"
|
||||
export PREFIX="/src/build"
|
||||
export CFLAGS="${OPTIMIZE} -I${PREFIX}/include/"
|
||||
export CPPFLAGS="${OPTIMIZE} -I${PREFIX}/include/"
|
||||
export LDFLAGS="${OPTIMIZE} -L${PREFIX}/lib/"
|
||||
|
||||
apt-get update
|
||||
apt-get install -qqy autoconf libtool
|
||||
|
||||
echo "============================================="
|
||||
echo "Compiling zlib"
|
||||
echo "============================================="
|
||||
test -n "$SKIP_ZLIB" || (
|
||||
cd node_modules/zlib
|
||||
emconfigure ./configure --prefix=${PREFIX}/
|
||||
emmake make
|
||||
emmake make install
|
||||
)
|
||||
echo "============================================="
|
||||
echo "Compiling zlib done"
|
||||
echo "============================================="
|
||||
|
||||
echo "============================================="
|
||||
echo "Compiling libpng"
|
||||
echo "============================================="
|
||||
test -n "$SKIP_LIBPNG" || (
|
||||
cd node_modules/libpng
|
||||
autoreconf -i
|
||||
emconfigure ./configure --with-zlib-prefix=${PREFIX}/ --prefix=${PREFIX}/
|
||||
emmake make
|
||||
emmake make install
|
||||
)
|
||||
echo "============================================="
|
||||
echo "Compiling libpng done"
|
||||
echo "============================================="
|
||||
|
||||
echo "============================================="
|
||||
echo "Compiling optipng"
|
||||
echo "============================================="
|
||||
(
|
||||
emcc \
|
||||
${OPTIMIZE} \
|
||||
-Wno-implicit-function-declaration \
|
||||
-I ${PREFIX}/include \
|
||||
-I node_modules/optipng/src/opngreduc \
|
||||
-I node_modules/optipng/src/pngxtern \
|
||||
-I node_modules/optipng/src/cexcept \
|
||||
-I node_modules/optipng/src/gifread \
|
||||
-I node_modules/optipng/src/pnmio \
|
||||
-I node_modules/optipng/src/minitiff \
|
||||
--std=c99 -c \
|
||||
node_modules/optipng/src/opngreduc/*.c \
|
||||
node_modules/optipng/src/pngxtern/*.c \
|
||||
node_modules/optipng/src/gifread/*.c \
|
||||
node_modules/optipng/src/minitiff/*.c \
|
||||
node_modules/optipng/src/pnmio/*.c \
|
||||
node_modules/optipng/src/optipng/*.c
|
||||
|
||||
emcc \
|
||||
--bind \
|
||||
${OPTIMIZE} \
|
||||
-s ALLOW_MEMORY_GROWTH=1 -s MODULARIZE=1 -s 'EXPORT_NAME="optipng"' \
|
||||
-I ${PREFIX}/include \
|
||||
-I node_modules/optipng/src/opngreduc \
|
||||
-I node_modules/optipng/src/pngxtern \
|
||||
-I node_modules/optipng/src/cexcept \
|
||||
-I node_modules/optipng/src/gifread \
|
||||
-I node_modules/optipng/src/pnmio \
|
||||
-I node_modules/optipng/src/minitiff \
|
||||
-o "optipng.js" \
|
||||
--std=c++11 \
|
||||
optipng.cpp \
|
||||
*.o \
|
||||
${PREFIX}/lib/libz.so ${PREFIX}/lib/libpng.a
|
||||
)
|
||||
echo "============================================="
|
||||
echo "Compiling optipng done"
|
||||
echo "============================================="
|
||||
|
||||
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
|
||||
echo "Did you update your docker image?"
|
||||
echo "Run \`docker pull trzeci/emscripten\`"
|
||||
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
|
@ -1,19 +0,0 @@
|
||||
<!doctype html>
|
||||
<script src='optipng.js'></script>
|
||||
<script>
|
||||
const Module = optipng();
|
||||
|
||||
Module.onRuntimeInitialized = async _ => {
|
||||
console.log('Version:', Module.version().toString(16));
|
||||
const image = await fetch('../example_palette.png').then(r => r.arrayBuffer());
|
||||
const newImage = Module.compress(image, {level: 3});
|
||||
console.log('done');
|
||||
Module.free_result();
|
||||
|
||||
console.log(`Old size: ${image.byteLength}, new size: ${newImage.byteLength} (${newImage.byteLength/image.byteLength*100}%)`);
|
||||
const blobURL = URL.createObjectURL(new Blob([newImage], {type: 'image/png'}));
|
||||
const img = document.createElement('img');
|
||||
img.src = blobURL;
|
||||
document.body.appendChild(img);
|
||||
};
|
||||
</script>
|
@ -1,53 +0,0 @@
|
||||
#include "emscripten/bind.h"
|
||||
#include "emscripten/val.h"
|
||||
|
||||
#include <stdio.h>
|
||||
|
||||
using namespace emscripten;
|
||||
|
||||
extern "C" int main(int argc, char *argv[]);
|
||||
|
||||
int version() {
|
||||
// FIXME (@surma): Haven’t found a version in optipng :(
|
||||
return 0;
|
||||
}
|
||||
|
||||
struct OptiPngOpts {
|
||||
int level;
|
||||
};
|
||||
|
||||
uint8_t* result;
|
||||
val compress(std::string png, OptiPngOpts opts) {
|
||||
remove("input.png");
|
||||
remove("output.png");
|
||||
FILE* infile = fopen("input.png", "wb");
|
||||
fwrite(png.c_str(), png.length(), 1, infile);
|
||||
fflush(infile);
|
||||
fclose(infile);
|
||||
|
||||
char optlevel[8];
|
||||
sprintf(&optlevel[0], "-o%d", opts.level);
|
||||
char* args[] = {"optipng", optlevel, "-out", "output.png", "input.png"};
|
||||
main(5, args);
|
||||
|
||||
FILE *outfile = fopen("output.png", "rb");
|
||||
fseek(outfile, 0, SEEK_END);
|
||||
int fsize = ftell(outfile);
|
||||
result = (uint8_t*) malloc(fsize);
|
||||
fseek(outfile, 0, SEEK_SET);
|
||||
fread(result, fsize, 1, outfile);
|
||||
return val(typed_memory_view(fsize, result));
|
||||
}
|
||||
|
||||
void free_result() {
|
||||
free(result);
|
||||
}
|
||||
|
||||
EMSCRIPTEN_BINDINGS(my_module) {
|
||||
value_object<OptiPngOpts>("OptiPngOpts")
|
||||
.field("level", &OptiPngOpts::level);
|
||||
|
||||
function("version", &version);
|
||||
function("compress", &compress);
|
||||
function("free_result", &free_result);
|
||||
}
|
10
codecs/optipng/optipng.d.ts
vendored
@ -1,10 +0,0 @@
|
||||
import {EncodeOptions} from "src/codecs/optipng/encoder";
|
||||
|
||||
export interface OptiPngModule extends EmscriptenWasm.Module {
|
||||
compress(data: BufferSource, opts: EncodeOptions): Uint8Array;
|
||||
free_result(): void;
|
||||
}
|
||||
|
||||
export default function(opts: EmscriptenWasm.ModuleOpts): OptiPngModule;
|
||||
|
||||
|
1457
codecs/optipng/package-lock.json
generated
@ -1,22 +0,0 @@
|
||||
{
|
||||
"name": "optipng",
|
||||
"scripts": {
|
||||
"install": "tar-dependency install && napa",
|
||||
"build": "npm run build:wasm",
|
||||
"build:wasm": "docker run --rm -v $(pwd):/src -e SKIP_ZLIB=\"${SKIP_ZLIB}\" -e SKIP_LIBPNG=\"${SKIP_LIBPNG}\" trzeci/emscripten ./build.sh"
|
||||
},
|
||||
"tarDependencies": {
|
||||
"node_modules/optipng": {
|
||||
"url": "https://netcologne.dl.sourceforge.net/project/optipng/OptiPNG/optipng-0.7.7/optipng-0.7.7.tar.gz",
|
||||
"strip": 1
|
||||
}
|
||||
},
|
||||
"napa": {
|
||||
"libpng": "emscripten-ports/libpng",
|
||||
"zlib": "emscripten-ports/zlib"
|
||||
},
|
||||
"dependencies": {
|
||||
"napa": "3.0.0",
|
||||
"tar-dependency": "0.0.3"
|
||||
}
|
||||
}
|
2
codecs/rotate/.gitignore
vendored
@ -1,2 +0,0 @@
|
||||
target
|
||||
Cargo.lock
|
@ -1,14 +0,0 @@
|
||||
[package]
|
||||
name = "rotate"
|
||||
version = "0.1.0"
|
||||
authors = ["Surma <surma@google.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
name = "rotate"
|
||||
path = "rotate.rs"
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
opt-level = "s"
|
@ -1,17 +0,0 @@
|
||||
FROM ubuntu
|
||||
RUN apt-get update && \
|
||||
apt-get install -qqy git build-essential cmake python2.7
|
||||
RUN git clone --recursive https://github.com/WebAssembly/wabt /usr/src/wabt
|
||||
RUN mkdir -p /usr/src/wabt/build
|
||||
WORKDIR /usr/src/wabt/build
|
||||
RUN cmake .. -DCMAKE_INSTALL_PREFIX=/opt/wabt && \
|
||||
make && \
|
||||
make install
|
||||
|
||||
FROM rust
|
||||
RUN rustup install nightly && \
|
||||
rustup target add --toolchain nightly wasm32-unknown-unknown
|
||||
|
||||
COPY --from=0 /opt/wabt /opt/wabt
|
||||
ENV PATH="/opt/wabt/bin:${PATH}"
|
||||
WORKDIR /src
|
@ -1,45 +0,0 @@
|
||||
// THIS IS NOT A NODE SCRIPT
|
||||
// This is a d8 script. Please install jsvu[1] and install v8.
|
||||
// Then run `npm run --silent benchmark`.
|
||||
// [1]: https://github.com/GoogleChromeLabs/jsvu
|
||||
async function init() {
|
||||
// Adjustable constants.
|
||||
const imageDimensions = 4096;
|
||||
const iterations = new Array(100);
|
||||
|
||||
// Constants. Don’t change.
|
||||
const imageByteSize = imageDimensions * imageDimensions * 4;
|
||||
const wasmPageSize = 64 * 1024;
|
||||
|
||||
const buffer = readbuffer("rotate.wasm");
|
||||
const { instance } = await WebAssembly.instantiate(buffer);
|
||||
|
||||
const pagesAvailable = Math.floor(
|
||||
instance.exports.memory.buffer.byteLength / wasmPageSize
|
||||
);
|
||||
const pagesNeeded = Math.floor((imageByteSize * 2 + 4) / wasmPageSize) + 1;
|
||||
const additionalPagesNeeded = pagesNeeded - pagesAvailable;
|
||||
if (additionalPagesNeeded > 0) {
|
||||
instance.exports.memory.grow(additionalPagesNeeded);
|
||||
}
|
||||
|
||||
[0, 90, 180, 270].forEach(rotation => {
|
||||
print(`\n${rotation} degrees`);
|
||||
print(`==============================`);
|
||||
for (let i = 0; i < 100; i++) {
|
||||
const start = Date.now();
|
||||
instance.exports.rotate(imageDimensions, imageDimensions, rotation);
|
||||
iterations[i] = Date.now() - start;
|
||||
}
|
||||
const average = iterations.reduce((sum, c) => sum + c) / iterations.length;
|
||||
const stddev = Math.sqrt(
|
||||
iterations
|
||||
.map(i => Math.pow(i - average, 2))
|
||||
.reduce((sum, c) => sum + c) / iterations.length
|
||||
);
|
||||
print(`n = ${iterations.length}`);
|
||||
print(`Average: ${average}`);
|
||||
print(`StdDev: ${stddev}`);
|
||||
});
|
||||
}
|
||||
init().catch(e => console.error(e.stack));
|
@ -1,25 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
echo "============================================="
|
||||
echo "Compiling wasm"
|
||||
echo "============================================="
|
||||
(
|
||||
rustup run nightly \
|
||||
cargo build \
|
||||
--target wasm32-unknown-unknown \
|
||||
--release
|
||||
cp target/wasm32-unknown-unknown/release/rotate.wasm .
|
||||
wasm-strip rotate.wasm
|
||||
)
|
||||
echo "============================================="
|
||||
echo "Compiling wasm done"
|
||||
echo "============================================="
|
||||
|
||||
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
|
||||
echo "Did you update your docker image?"
|
||||
echo "Run \`docker pull ubuntu\`"
|
||||
echo "Run \`docker pull rust\`"
|
||||
echo "Run \`docker build -t squoosh-rotate .\`"
|
||||
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
|
@ -1,11 +0,0 @@
|
||||
{
|
||||
"name": "rotate",
|
||||
"scripts": {
|
||||
"build:image": "docker build -t squoosh-rotate .",
|
||||
"build": "docker run --rm -v $(pwd):/src squoosh-rotate ./build.sh",
|
||||
"benchmark": "echo File size after gzip && npm run benchmark:filesize && echo Optimizing && npm run -s benchmark:optimizing",
|
||||
"benchmark:baseline": "v8 --liftoff --no-wasm-tier-up --no-opt ./benchmark.js",
|
||||
"benchmark:optimizing": "v8 --no-liftoff --no-wasm-tier-up ./benchmark.js",
|
||||
"benchmark:filesize": "cat rotate.wasm | gzip -c9n | wc -c"
|
||||
}
|
||||
}
|
@ -1,113 +0,0 @@
|
||||
use std::slice::{from_raw_parts, from_raw_parts_mut};
|
||||
|
||||
// This function is taken from Zachary Dremann
|
||||
// https://github.com/GoogleChromeLabs/squoosh/pull/462
|
||||
trait HardUnwrap<T> {
|
||||
fn unwrap_hard(self) -> T;
|
||||
}
|
||||
|
||||
impl<T> HardUnwrap<T> for Option<T> {
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[inline]
|
||||
fn unwrap_hard(self) -> T {
|
||||
match self {
|
||||
Some(t) => t,
|
||||
None => std::process::abort(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
fn unwrap_hard(self) -> T {
|
||||
self.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
const TILE_SIZE: usize = 16;
|
||||
|
||||
fn get_buffers<'a>(width: usize, height: usize) -> (&'a [u32], &'a mut [u32]) {
|
||||
let num_pixels = width * height;
|
||||
let in_b: &[u32];
|
||||
let out_b: &mut [u32];
|
||||
unsafe {
|
||||
in_b = from_raw_parts::<u32>(8 as *const u32, num_pixels);
|
||||
out_b = from_raw_parts_mut::<u32>((num_pixels * 4 + 8) as *mut u32, num_pixels);
|
||||
}
|
||||
return (in_b, out_b);
|
||||
}
|
||||
|
||||
#[inline(never)]
|
||||
fn rotate_0(width: usize, height: usize) {
|
||||
let (in_b, out_b) = get_buffers(width, height);
|
||||
for (in_p, out_p) in in_b.iter().zip(out_b.iter_mut()) {
|
||||
*out_p = *in_p;
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(never)]
|
||||
fn rotate_90(width: usize, height: usize) {
|
||||
let (in_b, out_b) = get_buffers(width, height);
|
||||
let new_width = height;
|
||||
let _new_height = width;
|
||||
for y_start in (0..height).step_by(TILE_SIZE) {
|
||||
for x_start in (0..width).step_by(TILE_SIZE) {
|
||||
for y in y_start..(y_start + TILE_SIZE).min(height) {
|
||||
let in_offset = y * width;
|
||||
let in_bounds = if x_start + TILE_SIZE < width {
|
||||
(in_offset + x_start)..(in_offset + x_start + TILE_SIZE)
|
||||
} else {
|
||||
(in_offset + x_start)..(in_offset + width)
|
||||
};
|
||||
let in_chunk = in_b.get(in_bounds).unwrap_hard();
|
||||
for (x, in_p) in in_chunk.iter().enumerate() {
|
||||
let new_x = (new_width - 1) - y;
|
||||
let new_y = x + x_start;
|
||||
*out_b.get_mut(new_y * new_width + new_x).unwrap_hard() = *in_p;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(never)]
|
||||
fn rotate_180(width: usize, height: usize) {
|
||||
let (in_b, out_b) = get_buffers(width, height);
|
||||
for (in_p, out_p) in in_b.iter().zip(out_b.iter_mut().rev()) {
|
||||
*out_p = *in_p;
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(never)]
|
||||
fn rotate_270(width: usize, height: usize) {
|
||||
let (in_b, out_b) = get_buffers(width, height);
|
||||
let new_width = height;
|
||||
let new_height = width;
|
||||
for y_start in (0..height).step_by(TILE_SIZE) {
|
||||
for x_start in (0..width).step_by(TILE_SIZE) {
|
||||
for y in y_start..(y_start + TILE_SIZE).min(height) {
|
||||
let in_offset = y * width;
|
||||
let in_bounds = if x_start + TILE_SIZE < width {
|
||||
(in_offset + x_start)..(in_offset + x_start + TILE_SIZE)
|
||||
} else {
|
||||
(in_offset + x_start)..(in_offset + width)
|
||||
};
|
||||
let in_chunk = in_b.get(in_bounds).unwrap_hard();
|
||||
for (x, in_p) in in_chunk.iter().enumerate() {
|
||||
let new_x = y;
|
||||
let new_y = new_height - 1 - (x_start + x);
|
||||
*out_b.get_mut(new_y * new_width + new_x).unwrap_hard() = *in_p;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
fn rotate(width: usize, height: usize, rotate: usize) {
|
||||
match rotate {
|
||||
0 => rotate_0(width, height),
|
||||
90 => rotate_90(width, height),
|
||||
180 => rotate_180(width, height),
|
||||
270 => rotate_270(width, height),
|
||||
_ => std::process::abort(),
|
||||
}
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
# WebP decoder
|
||||
|
||||
- Source: <https://github.com/webmproject/libwebp>
|
||||
- Version: v1.0.2
|
||||
|
||||
## Example
|
||||
|
||||
See `example.html`
|
||||
|
||||
## API
|
||||
|
||||
### `int version()`
|
||||
|
||||
Returns the version of libwebp as a number. va.b.c is encoded as 0x0a0b0c
|
||||
|
||||
### `RawImage decode(std::string buffer)`
|
||||
|
||||
Decodes the given webp buffer into raw RGBA. `RawImage` is a class with 3 fields: `buffer`, `width`, and `height`.
|
||||
|
||||
### `void free_result()`
|
||||
|
||||
Frees the result created by `decode()`.
|
@ -1,60 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
export OPTIMIZE="-Os"
|
||||
export LDFLAGS="${OPTIMIZE}"
|
||||
export CFLAGS="${OPTIMIZE}"
|
||||
export CPPFLAGS="${OPTIMIZE}"
|
||||
apt-get update
|
||||
apt-get install -qqy autoconf libtool libpng-dev pkg-config
|
||||
|
||||
echo "============================================="
|
||||
echo "Compiling libwebp"
|
||||
echo "============================================="
|
||||
test -n "$SKIP_LIBWEBP" || (
|
||||
cd node_modules/libwebp
|
||||
autoreconf -fiv
|
||||
rm -rf build || true
|
||||
mkdir -p build && cd build
|
||||
emconfigure ../configure \
|
||||
--disable-libwebpdemux \
|
||||
--disable-wic \
|
||||
--disable-gif \
|
||||
--disable-tiff \
|
||||
--disable-jpeg \
|
||||
--disable-png \
|
||||
--disable-sdl \
|
||||
--disable-gl \
|
||||
--disable-threading \
|
||||
--disable-neon-rtcd \
|
||||
--disable-neon \
|
||||
--disable-sse2 \
|
||||
--disable-sse4.1
|
||||
emmake make
|
||||
)
|
||||
echo "============================================="
|
||||
echo "Compiling wasm bindings"
|
||||
echo "============================================="
|
||||
(
|
||||
emcc \
|
||||
${OPTIMIZE} \
|
||||
--bind \
|
||||
-s ALLOW_MEMORY_GROWTH=1 \
|
||||
-s MODULARIZE=1 \
|
||||
-s 'EXPORT_NAME="webp_dec"' \
|
||||
--std=c++11 \
|
||||
-I node_modules/libwebp \
|
||||
-o ./webp_dec.js \
|
||||
-x c++ \
|
||||
webp_dec.cpp \
|
||||
node_modules/libwebp/build/src/.libs/libwebp.a
|
||||
)
|
||||
echo "============================================="
|
||||
echo "Compiling wasm bindings done"
|
||||
echo "============================================="
|
||||
|
||||
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
|
||||
echo "Did you update your docker image?"
|
||||
echo "Run \`docker pull trzeci/emscripten\`"
|
||||
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
|
@ -1,24 +0,0 @@
|
||||
<!doctype html>
|
||||
<script src='webp_dec.js'></script>
|
||||
<script>
|
||||
const Module = webp_dec();
|
||||
|
||||
async function loadFile(src) {
|
||||
const resp = await fetch(src);
|
||||
return await resp.arrayBuffer();
|
||||
}
|
||||
|
||||
Module.onRuntimeInitialized = async _ => {
|
||||
console.log('Version:', Module.version().toString(16));
|
||||
const image = await loadFile('../example.webp');
|
||||
const result = Module.decode(image);
|
||||
const imageData = new ImageData(new Uint8ClampedArray(result.buffer), result.width, result.height);
|
||||
Module.free_result();
|
||||
const canvas = document.createElement('canvas');
|
||||
canvas.width = result.width;
|
||||
canvas.height = result.height;
|
||||
document.body.appendChild(canvas);
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.putImageData(imageData, 0, 0);
|
||||
};
|
||||
</script>
|
1147
codecs/webp_dec/package-lock.json
generated
@ -1,13 +0,0 @@
|
||||
{
|
||||
"name": "webp_dec",
|
||||
"scripts": {
|
||||
"install": "napa",
|
||||
"build": "docker run --rm -v $(pwd):/src trzeci/emscripten ./build.sh"
|
||||
},
|
||||
"napa": {
|
||||
"libwebp": "webmproject/libwebp#v1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"napa": "3.0.0"
|
||||
}
|
||||
}
|
@ -1,47 +0,0 @@
|
||||
#include "emscripten/bind.h"
|
||||
#include "emscripten/val.h"
|
||||
#include "src/webp/decode.h"
|
||||
#include "src/webp/demux.h"
|
||||
#include <string>
|
||||
|
||||
using namespace emscripten;
|
||||
|
||||
int version() {
|
||||
return WebPGetDecoderVersion();
|
||||
}
|
||||
|
||||
class RawImage {
|
||||
public:
|
||||
val buffer;
|
||||
int width;
|
||||
int height;
|
||||
|
||||
RawImage(val b, int w, int h)
|
||||
: buffer(b), width(w), height(h) {}
|
||||
};
|
||||
|
||||
uint8_t* last_result;
|
||||
RawImage decode(std::string buffer) {
|
||||
int width, height;
|
||||
last_result = WebPDecodeRGBA((const uint8_t*)buffer.c_str(), buffer.size(), &width, &height);
|
||||
return RawImage(
|
||||
val(typed_memory_view(width*height*4, last_result)),
|
||||
width,
|
||||
height
|
||||
);
|
||||
}
|
||||
|
||||
void free_result() {
|
||||
free(last_result);
|
||||
}
|
||||
|
||||
EMSCRIPTEN_BINDINGS(my_module) {
|
||||
class_<RawImage>("RawImage")
|
||||
.property("buffer", &RawImage::buffer)
|
||||
.property("width", &RawImage::width)
|
||||
.property("height", &RawImage::height);
|
||||
|
||||
function("decode", &decode);
|
||||
function("version", &version);
|
||||
function("free_result", &free_result);
|
||||
}
|
13
codecs/webp_dec/webp_dec.d.ts
vendored
@ -1,13 +0,0 @@
|
||||
interface RawImage {
|
||||
buffer: Uint8Array;
|
||||
width: number;
|
||||
height: number;
|
||||
}
|
||||
|
||||
interface WebPModule extends EmscriptenWasm.Module {
|
||||
decode(data: BufferSource): RawImage;
|
||||
free_result(): void;
|
||||
}
|
||||
|
||||
export default function(opts: EmscriptenWasm.ModuleOpts): WebPModule;
|
||||
|
@ -1,7 +1,7 @@
|
||||
# WebP encoder
|
||||
|
||||
- Source: <https://github.com/webmproject/libwebp>
|
||||
- Version: v1.0.2
|
||||
- Version: v0.6.1
|
||||
|
||||
## Dependencies
|
||||
|
||||
@ -17,10 +17,26 @@ See `example.html`
|
||||
|
||||
Returns the version of libwebp as a number. va.b.c is encoded as 0x0a0b0c
|
||||
|
||||
### `UInt8Array encode(uint8_t* image_buffer, int image_width, int image_height, WebPConfig config)`
|
||||
### `uint8_t* create_buffer(int width, int height)`
|
||||
|
||||
Encodes the given image with given dimension to WebP.
|
||||
Allocates an RGBA buffer for an image with the given dimension.
|
||||
|
||||
### `void destroy_buffer(uint8_t* p)`
|
||||
|
||||
Frees a buffer created with `create_buffer`.
|
||||
|
||||
### `void encode(uint8_t* image_buffer, int image_width, int image_height, float quality)`
|
||||
|
||||
Encodes the given image with given dimension to WebP. `quality` is a number between 0 and 100. The higher the number, the better the quality of the encoded image. The result is implicitly stored and can be accessed using the `get_result_*()` functions.
|
||||
|
||||
### `void free_result()`
|
||||
|
||||
Frees the last result created by `encode()`.
|
||||
Frees the result created by `encode()`.
|
||||
|
||||
### `int get_result_pointer()`
|
||||
|
||||
Returns the pointer to the start of the buffer holding the encoded data.
|
||||
|
||||
### `int get_result_size()`
|
||||
|
||||
Returns the length of the buffer holding the encoded data.
|
||||
|
@ -1,61 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
export OPTIMIZE="-Os"
|
||||
export LDFLAGS="${OPTIMIZE}"
|
||||
export CFLAGS="${OPTIMIZE}"
|
||||
export CPPFLAGS="${OPTIMIZE}"
|
||||
|
||||
apt-get update
|
||||
apt-get install -qqy autoconf libtool libpng-dev pkg-config
|
||||
|
||||
echo "============================================="
|
||||
echo "Compiling libwebp"
|
||||
echo "============================================="
|
||||
test -n "$SKIP_LIBWEBP" || (
|
||||
cd node_modules/libwebp
|
||||
autoreconf -fiv
|
||||
rm -rf build || true
|
||||
mkdir -p build && cd build
|
||||
emconfigure ../configure \
|
||||
--disable-libwebpdemux \
|
||||
--disable-wic \
|
||||
--disable-gif \
|
||||
--disable-tiff \
|
||||
--disable-jpeg \
|
||||
--disable-png \
|
||||
--disable-sdl \
|
||||
--disable-gl \
|
||||
--disable-threading \
|
||||
--disable-neon-rtcd \
|
||||
--disable-neon \
|
||||
--disable-sse2 \
|
||||
--disable-sse4.1
|
||||
emmake make
|
||||
)
|
||||
echo "============================================="
|
||||
echo "Compiling wasm bindings"
|
||||
echo "============================================="
|
||||
(
|
||||
emcc \
|
||||
${OPTIMIZE} \
|
||||
--bind \
|
||||
-s ALLOW_MEMORY_GROWTH=1 \
|
||||
-s MODULARIZE=1 \
|
||||
-s 'EXPORT_NAME="webp_enc"' \
|
||||
--std=c++11 \
|
||||
-I node_modules/libwebp \
|
||||
-o ./webp_enc.js \
|
||||
-x c++ \
|
||||
webp_enc.cpp \
|
||||
node_modules/libwebp/build/src/.libs/libwebp.a
|
||||
)
|
||||
echo "============================================="
|
||||
echo "Compiling wasm bindings done"
|
||||
echo "============================================="
|
||||
|
||||
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
|
||||
echo "Did you update your docker image?"
|
||||
echo "Run \`docker pull trzeci/emscripten\`"
|
||||
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
|
@ -1,7 +1,7 @@
|
||||
<!doctype html>
|
||||
<script src='webp_enc.js'></script>
|
||||
<script>
|
||||
const module = webp_enc();
|
||||
const Module = webp_enc();
|
||||
|
||||
async function loadImage(src) {
|
||||
// Load image
|
||||
@ -17,43 +17,28 @@
|
||||
return ctx.getImageData(0, 0, img.width, img.height);
|
||||
}
|
||||
|
||||
module.onRuntimeInitialized = async _ => {
|
||||
console.log('Version:', module.version().toString(16));
|
||||
Module.onRuntimeInitialized = async _ => {
|
||||
const api = {
|
||||
version: Module.cwrap('version', 'number', []),
|
||||
create_buffer: Module.cwrap('create_buffer', 'number', ['number', 'number']),
|
||||
destroy_buffer: Module.cwrap('destroy_buffer', '', ['number']),
|
||||
encode: Module.cwrap('encode', '', ['number', 'number', 'number', 'number']),
|
||||
free_result: Module.cwrap('free_result', '', ['number']),
|
||||
get_result_pointer: Module.cwrap('get_result_pointer', 'number', []),
|
||||
get_result_size: Module.cwrap('get_result_size', 'number', []),
|
||||
};
|
||||
const image = await loadImage('../example.png');
|
||||
const result = module.encode(image.data, image.width, image.height, {
|
||||
quality: 75,
|
||||
target_size: 0,
|
||||
target_PSNR: 0,
|
||||
method: 4,
|
||||
sns_strength: 50,
|
||||
filter_strength: 60,
|
||||
filter_sharpness: 0,
|
||||
filter_type: 1,
|
||||
partitions: 0,
|
||||
segments: 4,
|
||||
pass: 1,
|
||||
show_compressed: 0,
|
||||
preprocessing: 0,
|
||||
autofilter: 0,
|
||||
partition_limit: 0,
|
||||
alpha_compression: 1,
|
||||
alpha_filtering: 1,
|
||||
alpha_quality: 100,
|
||||
lossless: 0,
|
||||
exact: 0,
|
||||
image_hint: 0,
|
||||
emulate_jpeg_size: 0,
|
||||
thread_level: 0,
|
||||
low_memory: 0,
|
||||
near_lossless: 100,
|
||||
use_delta_palette: 0,
|
||||
use_sharp_yuv: 0,
|
||||
});
|
||||
console.log('size', result.length);
|
||||
const blob = new Blob([result], {type: 'image/webp'});
|
||||
|
||||
module.free_result();
|
||||
const p = api.create_buffer(image.width, image.height);
|
||||
Module.HEAP8.set(image.data, p);
|
||||
api.encode(p, image.width, image.height, 2);
|
||||
const resultPointer = api.get_result_pointer();
|
||||
const resultSize = api.get_result_size();
|
||||
const resultView = new Uint8Array(Module.HEAP8.buffer, resultPointer, resultSize);
|
||||
const result = new Uint8Array(resultView);
|
||||
api.free_result(resultPointer);
|
||||
api.destroy_buffer(p);
|
||||
|
||||
const blob = new Blob([result], {type: 'image/jpeg'});
|
||||
const blobURL = URL.createObjectURL(blob);
|
||||
const img = document.createElement('img');
|
||||
img.src = blobURL;
|
||||
|
@ -2,12 +2,12 @@
|
||||
"name": "webp_enc",
|
||||
"scripts": {
|
||||
"install": "napa",
|
||||
"build": "docker run --rm -v $(pwd):/src trzeci/emscripten ./build.sh"
|
||||
"build": "docker run --rm -v $(pwd):/src trzeci/emscripten emcc -O3 -s WASM=1 -s EXTRA_EXPORTED_RUNTIME_METHODS='[\"cwrap\"]' -s ALLOW_MEMORY_GROWTH=1 -s MODULARIZE=1 -s 'EXPORT_NAME=\"webp_enc\"' -I node_modules/libwebp -o ./webp_enc.js webp_enc.c node_modules/libwebp/src/{dec,dsp,demux,enc,mux,utils}/*.c"
|
||||
},
|
||||
"napa": {
|
||||
"libwebp": "webmproject/libwebp#v1.0.2"
|
||||
"libwebp": "webmproject/libwebp#v0.6.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"napa": "3.0.0"
|
||||
"napa": "^3.0.0"
|
||||
}
|
||||
}
|
||||
|
44
codecs/webp_enc/webp_enc.c
Normal file
@ -0,0 +1,44 @@
|
||||
#include "emscripten.h"
|
||||
#include "src/webp/encode.h"
|
||||
#include <stdlib.h>
|
||||
|
||||
EMSCRIPTEN_KEEPALIVE
|
||||
int version() {
|
||||
return WebPGetEncoderVersion();
|
||||
}
|
||||
|
||||
EMSCRIPTEN_KEEPALIVE
|
||||
uint8_t* create_buffer(int width, int height) {
|
||||
return malloc(width * height * 4 * sizeof(uint8_t));
|
||||
}
|
||||
|
||||
EMSCRIPTEN_KEEPALIVE
|
||||
void destroy_buffer(uint8_t* p) {
|
||||
free(p);
|
||||
}
|
||||
|
||||
int result[2];
|
||||
EMSCRIPTEN_KEEPALIVE
|
||||
void encode(uint8_t* img_in, int width, int height, float quality) {
|
||||
uint8_t* img_out;
|
||||
size_t size;
|
||||
size = WebPEncodeRGBA(img_in, width, height, width * 4, quality, &img_out);
|
||||
result[0] = (int)img_out;
|
||||
result[1] = size;
|
||||
}
|
||||
|
||||
EMSCRIPTEN_KEEPALIVE
|
||||
void free_result() {
|
||||
WebPFree(result[0]);
|
||||
}
|
||||
|
||||
EMSCRIPTEN_KEEPALIVE
|
||||
int get_result_pointer() {
|
||||
return result[0];
|
||||
}
|
||||
|
||||
EMSCRIPTEN_KEEPALIVE
|
||||
int get_result_size() {
|
||||
return result[1];
|
||||
}
|
||||
|
@ -1,96 +0,0 @@
|
||||
#include <emscripten/bind.h>
|
||||
#include <emscripten/val.h>
|
||||
#include "src/webp/encode.h"
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <stdexcept>
|
||||
|
||||
using namespace emscripten;
|
||||
|
||||
int version() {
|
||||
return WebPGetEncoderVersion();
|
||||
}
|
||||
|
||||
uint8_t* last_result;
|
||||
|
||||
val encode(std::string img, int width, int height, WebPConfig config) {
|
||||
uint8_t* img_in = (uint8_t*) img.c_str();
|
||||
|
||||
// A lot of this is duplicated from Encode in picture_enc.c
|
||||
WebPPicture pic;
|
||||
WebPMemoryWriter wrt;
|
||||
int ok;
|
||||
|
||||
if (!WebPPictureInit(&pic)) {
|
||||
// shouldn't happen, except if system installation is broken
|
||||
throw std::runtime_error("Unexpected error");
|
||||
}
|
||||
|
||||
// Only use use_argb if we really need it, as it's slower.
|
||||
pic.use_argb = config.lossless || config.use_sharp_yuv || config.preprocessing > 0;
|
||||
pic.width = width;
|
||||
pic.height = height;
|
||||
pic.writer = WebPMemoryWrite;
|
||||
pic.custom_ptr = &wrt;
|
||||
|
||||
WebPMemoryWriterInit(&wrt);
|
||||
|
||||
ok = WebPPictureImportRGBA(&pic, (uint8_t*) img_in, width * 4) && WebPEncode(&config, &pic);
|
||||
WebPPictureFree(&pic);
|
||||
if (!ok) {
|
||||
WebPMemoryWriterClear(&wrt);
|
||||
throw std::runtime_error("Encode failed");
|
||||
}
|
||||
|
||||
last_result = wrt.mem;
|
||||
|
||||
return val(typed_memory_view(wrt.size, wrt.mem));
|
||||
}
|
||||
|
||||
void free_result() {
|
||||
WebPFree(last_result);
|
||||
}
|
||||
|
||||
|
||||
EMSCRIPTEN_BINDINGS(my_module) {
|
||||
enum_<WebPImageHint>("WebPImageHint")
|
||||
.value("WEBP_HINT_DEFAULT", WebPImageHint::WEBP_HINT_DEFAULT)
|
||||
.value("WEBP_HINT_PICTURE", WebPImageHint::WEBP_HINT_PICTURE)
|
||||
.value("WEBP_HINT_PHOTO", WebPImageHint::WEBP_HINT_PHOTO)
|
||||
.value("WEBP_HINT_GRAPH", WebPImageHint::WEBP_HINT_GRAPH)
|
||||
;
|
||||
|
||||
value_object<WebPConfig>("WebPConfig")
|
||||
.field("lossless", &WebPConfig::lossless)
|
||||
.field("quality", &WebPConfig::quality)
|
||||
.field("method", &WebPConfig::method)
|
||||
.field("image_hint", &WebPConfig::image_hint)
|
||||
.field("target_size", &WebPConfig::target_size)
|
||||
.field("target_PSNR", &WebPConfig::target_PSNR)
|
||||
.field("segments", &WebPConfig::segments)
|
||||
.field("sns_strength", &WebPConfig::sns_strength)
|
||||
.field("filter_strength", &WebPConfig::filter_strength)
|
||||
.field("filter_sharpness", &WebPConfig::filter_sharpness)
|
||||
.field("filter_type", &WebPConfig::filter_type)
|
||||
.field("autofilter", &WebPConfig::autofilter)
|
||||
.field("alpha_compression", &WebPConfig::alpha_compression)
|
||||
.field("alpha_filtering", &WebPConfig::alpha_filtering)
|
||||
.field("alpha_quality", &WebPConfig::alpha_quality)
|
||||
.field("pass", &WebPConfig::pass)
|
||||
.field("show_compressed", &WebPConfig::show_compressed)
|
||||
.field("preprocessing", &WebPConfig::preprocessing)
|
||||
.field("partitions", &WebPConfig::partitions)
|
||||
.field("partition_limit", &WebPConfig::partition_limit)
|
||||
.field("emulate_jpeg_size", &WebPConfig::emulate_jpeg_size)
|
||||
.field("thread_level", &WebPConfig::thread_level)
|
||||
.field("low_memory", &WebPConfig::low_memory)
|
||||
.field("near_lossless", &WebPConfig::near_lossless)
|
||||
.field("exact", &WebPConfig::exact)
|
||||
.field("use_delta_palette", &WebPConfig::use_delta_palette)
|
||||
.field("use_sharp_yuv", &WebPConfig::use_sharp_yuv)
|
||||
;
|
||||
|
||||
function("version", &version);
|
||||
function("encode", &encode);
|
||||
function("free_result", &free_result);
|
||||
}
|
9
codecs/webp_enc/webp_enc.d.ts
vendored
@ -1,9 +0,0 @@
|
||||
import { EncodeOptions } from '../../src/codecs/webp/encoder-meta';
|
||||
|
||||
interface WebPModule extends EmscriptenWasm.Module {
|
||||
encode(data: BufferSource, width: number, height: number, options: EncodeOptions): Uint8Array;
|
||||
free_result(): void;
|
||||
}
|
||||
|
||||
|
||||
export default function(opts: EmscriptenWasm.ModuleOpts): WebPModule;
|
@ -1,74 +0,0 @@
|
||||
const DtsCreator = require('typed-css-modules');
|
||||
const chokidar = require('chokidar');
|
||||
const util = require('util');
|
||||
const sass = require('node-sass');
|
||||
|
||||
const sassRender = util.promisify(sass.render);
|
||||
|
||||
async function sassToCss(path) {
|
||||
const result = await sassRender({ file: path });
|
||||
return result.css;
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {Object} Opts
|
||||
* @property {boolean} watch Watch for changes
|
||||
*/
|
||||
/**
|
||||
* Create typing files for CSS & SCSS.
|
||||
*
|
||||
* @param {string[]} rootPaths Paths to search within
|
||||
* @param {Opts} [opts={}] Options.
|
||||
*/
|
||||
function addCssTypes(rootPaths, opts = {}) {
|
||||
return new Promise((resolve) => {
|
||||
const { watch = false } = opts;
|
||||
|
||||
const paths = [];
|
||||
const preReadyPromises = [];
|
||||
let ready = false;
|
||||
|
||||
for (const rootPath of rootPaths) {
|
||||
// Look for scss & css in each path.
|
||||
paths.push(rootPath + '/**/*.scss');
|
||||
paths.push(rootPath + '/**/*.css');
|
||||
}
|
||||
|
||||
// For simplicity, the watcher is used even if we're not watching.
|
||||
// If we're not watching, we stop the watcher after the initial files are found.
|
||||
const watcher = chokidar.watch(paths, {
|
||||
// Avoid processing already-processed files.
|
||||
ignored: '*.d.*',
|
||||
// Without this, travis and netlify builds never complete. I'm not sure why, but it might be
|
||||
// related to https://github.com/paulmillr/chokidar/pull/758
|
||||
persistent: watch,
|
||||
});
|
||||
|
||||
function change(path) {
|
||||
const promise = (async function() {
|
||||
const creator = new DtsCreator({ camelCase: true });
|
||||
const result = path.endsWith('.scss') ?
|
||||
await creator.create(path, await sassToCss(path)) :
|
||||
await creator.create(path);
|
||||
|
||||
await result.writeFile();
|
||||
})();
|
||||
|
||||
if (!ready) preReadyPromises.push(promise);
|
||||
}
|
||||
|
||||
watcher.on('change', change);
|
||||
watcher.on('add', change);
|
||||
|
||||
// 'ready' is when events have been fired for file discovery.
|
||||
watcher.on('ready', () => {
|
||||
ready = true;
|
||||
// Wait for the current set of processing to finish.
|
||||
Promise.all(preReadyPromises).then(resolve);
|
||||
// And if we're not watching, close the watcher.
|
||||
if (!watch) watcher.close();
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = addCssTypes;
|
@ -1,47 +0,0 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const ejs = require('ejs');
|
||||
const AssetsPlugin = require('assets-webpack-plugin');
|
||||
|
||||
module.exports = class AssetTemplatePlugin extends AssetsPlugin {
|
||||
constructor(options) {
|
||||
options = options || {};
|
||||
if (!options.template) throw Error('AssetTemplatePlugin: template option is required.');
|
||||
super({
|
||||
useCompilerPath: true,
|
||||
filename: options.filename,
|
||||
processOutput: files => this._processOutput(files)
|
||||
});
|
||||
this._template = path.resolve(process.cwd(), options.template);
|
||||
const ignore = options.ignore || /(manifest\.json|\.DS_Store)$/;
|
||||
this._ignore = typeof ignore === 'function' ? ({ test: ignore }) : ignore;
|
||||
}
|
||||
|
||||
_processOutput(files) {
|
||||
const mapping = {
|
||||
all: [],
|
||||
byType: {},
|
||||
entries: {}
|
||||
};
|
||||
for (const entryName in files) {
|
||||
// non-entry-point-derived assets are collected under an empty string key
|
||||
// since that's a bit awkward, we'll call them "assets"
|
||||
const name = entryName === '' ? 'assets' : entryName;
|
||||
const listing = files[entryName];
|
||||
const entry = mapping.entries[name] = {
|
||||
all: [],
|
||||
byType: {}
|
||||
};
|
||||
for (let type in listing) {
|
||||
const list = [].concat(listing[type]).filter(file => !this._ignore.test(file));
|
||||
if (!list.length) continue;
|
||||
mapping.all = mapping.all.concat(list);
|
||||
mapping.byType[type] = (mapping.byType[type] || []).concat(list);
|
||||
entry.all = entry.all.concat(list);
|
||||
entry.byType[type] = (entry.byType[type] || []).concat(list);
|
||||
}
|
||||
}
|
||||
mapping.files = mapping.all;
|
||||
return ejs.render(fs.readFileSync(this._template, 'utf8'), mapping);
|
||||
}
|
||||
};
|
@ -1,158 +0,0 @@
|
||||
const util = require('util');
|
||||
const minimatch = require('minimatch');
|
||||
const SingleEntryPlugin = require('webpack/lib/SingleEntryPlugin');
|
||||
const WebWorkerTemplatePlugin = require('webpack/lib/webworker/WebWorkerTemplatePlugin');
|
||||
const ParserHelpers = require('webpack/lib/ParserHelpers');
|
||||
|
||||
const NAME = 'auto-sw-plugin';
|
||||
const JS_TYPES = ['auto', 'esm', 'dynamic'];
|
||||
|
||||
/**
|
||||
* Automatically finds and bundles Service Workers by looking for navigator.serviceWorker.register(..).
|
||||
* An Array of webpack assets is injected into the Service Worker bundle as a `BUILD_ASSETS` global.
|
||||
* Hidden and `.map` files are excluded by default, and this can be customized using the include & exclude options.
|
||||
* @example
|
||||
* // webpack config
|
||||
* plugins: [
|
||||
* new AutoSWPlugin({
|
||||
* exclude: [
|
||||
* '**\/.*', // don't expose hidden files (default)
|
||||
* '**\/*.map', // don't precache sourcemaps (default)
|
||||
* 'index.html' // don't cache the page itself
|
||||
* ]
|
||||
* })
|
||||
* ]
|
||||
* @param {Object} [options={}]
|
||||
* @param {string[]} [options.exclude] Minimatch pattern(s) of which assets to omit from BUILD_ASSETS.
|
||||
* @param {string[]} [options.include] Minimatch pattern(s) of assets to allow in BUILD_ASSETS.
|
||||
*/
|
||||
module.exports = class AutoSWPlugin {
|
||||
constructor(options) {
|
||||
this.options = Object.assign({
|
||||
exclude: [
|
||||
'**/*.map',
|
||||
'**/.*'
|
||||
]
|
||||
}, options || {});
|
||||
}
|
||||
|
||||
apply(compiler) {
|
||||
const serviceWorkers = [];
|
||||
|
||||
compiler.hooks.emit.tapPromise(NAME, compilation => this.emit(compiler, compilation, serviceWorkers));
|
||||
|
||||
compiler.hooks.normalModuleFactory.tap(NAME, (factory) => {
|
||||
for (const type of JS_TYPES) {
|
||||
factory.hooks.parser.for(`javascript/${type}`).tap(NAME, parser => {
|
||||
let counter = 0;
|
||||
|
||||
const processRegisterCall = expr => {
|
||||
const dep = parser.evaluateExpression(expr.arguments[0]);
|
||||
|
||||
if (!dep.isString()) {
|
||||
parser.state.module.warnings.push({
|
||||
message: 'navigator.serviceWorker.register() will only be bundled if passed a String literal.'
|
||||
});
|
||||
return false;
|
||||
}
|
||||
|
||||
const filename = dep.string;
|
||||
const outputFilename = this.options.filename || 'serviceworker.js'
|
||||
const context = parser.state.current.context;
|
||||
serviceWorkers.push({
|
||||
outputFilename,
|
||||
filename,
|
||||
context
|
||||
});
|
||||
|
||||
const id = `__webpack__serviceworker__${++counter}`;
|
||||
ParserHelpers.toConstantDependency(parser, id)(expr.arguments[0]);
|
||||
return ParserHelpers.addParsedVariableToModule(parser, id, '__webpack_public_path__ + ' + JSON.stringify(outputFilename));
|
||||
};
|
||||
|
||||
parser.hooks.call.for('navigator.serviceWorker.register').tap(NAME, processRegisterCall);
|
||||
parser.hooks.call.for('self.navigator.serviceWorker.register').tap(NAME, processRegisterCall);
|
||||
parser.hooks.call.for('window.navigator.serviceWorker.register').tap(NAME, processRegisterCall);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
createFilter(list) {
|
||||
const filters = [].concat(list);
|
||||
for (let i=0; i<filters.length; i++) {
|
||||
if (typeof filters[i] === 'string') {
|
||||
filters[i] = minimatch.filter(filters[i]);
|
||||
}
|
||||
}
|
||||
return filters;
|
||||
}
|
||||
|
||||
async emit(compiler, compilation, serviceWorkers) {
|
||||
let assetMapping = Object.keys(compilation.assets);
|
||||
if (this.options.include) {
|
||||
const filters = this.createFilter(this.options.include);
|
||||
assetMapping = assetMapping.filter(filename => {
|
||||
for (const filter of filters) {
|
||||
if (filter(filename)) return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
if (this.options.exclude) {
|
||||
const filters = this.createFilter(this.options.exclude);
|
||||
assetMapping = assetMapping.filter(filename => {
|
||||
for (const filter of filters) {
|
||||
if (filter(filename)) return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
await Promise.all(serviceWorkers.map(
|
||||
(serviceWorker, index) => this.compileServiceWorker(compiler, compilation, serviceWorker, index, assetMapping)
|
||||
));
|
||||
}
|
||||
|
||||
async compileServiceWorker(compiler, compilation, options, index, assetMapping) {
|
||||
const entryFilename = options.filename;
|
||||
|
||||
const chunkFilename = compiler.options.output.chunkFilename.replace(/\.([a-z]+)$/i, '.serviceworker.$1');
|
||||
const workerOptions = {
|
||||
filename: options.outputFilename, // chunkFilename.replace(/\.?\[(?:chunkhash|contenthash|hash)(:\d+(?::\d+)?)?\]/g, ''),
|
||||
chunkFilename: this.options.chunkFilename || chunkFilename,
|
||||
globalObject: 'self'
|
||||
};
|
||||
|
||||
const childCompiler = compilation.createChildCompiler(NAME, { filename: workerOptions.filename });
|
||||
(new WebWorkerTemplatePlugin(workerOptions)).apply(childCompiler);
|
||||
|
||||
/* The duplication DefinePlugin ends up causing is problematic (it doesn't hoist injections), so we'll do it manually. */
|
||||
// (new DefinePlugin({
|
||||
// BUILD_ASSETS: JSON.stringify(assetMapping)
|
||||
// })).apply(childCompiler);
|
||||
(new SingleEntryPlugin(options.context, entryFilename, workerOptions.filename)).apply(childCompiler);
|
||||
|
||||
const subCache = `subcache ${__dirname} ${entryFilename} ${index}`;
|
||||
let childCompilation;
|
||||
childCompiler.hooks.compilation.tap(NAME, c => {
|
||||
childCompilation = c;
|
||||
if (childCompilation.cache) {
|
||||
if (!childCompilation.cache[subCache]) childCompilation.cache[subCache] = {};
|
||||
childCompilation.cache = childCompilation.cache[subCache];
|
||||
}
|
||||
});
|
||||
|
||||
await (util.promisify(childCompiler.runAsChild.bind(childCompiler)))();
|
||||
|
||||
const versionVar = this.options.version ?
|
||||
`var VERSION = ${JSON.stringify(this.options.version)};` : '';
|
||||
const original = childCompilation.assets[workerOptions.filename].source();
|
||||
const source = `${versionVar}var BUILD_ASSETS=${JSON.stringify(assetMapping)};${original}`;
|
||||
childCompilation.assets[workerOptions.filename] = {
|
||||
source: () => source,
|
||||
size: () => Buffer.byteLength(source, 'utf8')
|
||||
};
|
||||
|
||||
Object.assign(compilation.assets, childCompilation.assets);
|
||||
}
|
||||
};
|
@ -1,203 +0,0 @@
|
||||
const path = require('path');
|
||||
const { URL } = require('url');
|
||||
|
||||
const gzipSize = require('gzip-size');
|
||||
const fetch = require('node-fetch');
|
||||
const prettyBytes = require('pretty-bytes');
|
||||
const escapeRE = require('escape-string-regexp');
|
||||
const readdirp = require('readdirp');
|
||||
const chalk = new require('chalk').constructor({ level: 4 });
|
||||
|
||||
function fetchTravis(path, options = {}) {
|
||||
const url = new URL(path, 'https://api.travis-ci.org');
|
||||
url.search = new URLSearchParams(options);
|
||||
|
||||
return fetch(url, {
|
||||
headers: { 'Travis-API-Version': '3' },
|
||||
});
|
||||
}
|
||||
|
||||
function fetchTravisBuildInfo(user, repo, branch) {
|
||||
return fetchTravis(`/repo/${encodeURIComponent(`${user}/${repo}`)}/builds`, {
|
||||
'branch.name': branch,
|
||||
state: 'passed',
|
||||
limit: 1,
|
||||
event_type: 'push',
|
||||
}).then(r => r.json());
|
||||
}
|
||||
|
||||
function fetchTravisText(path) {
|
||||
return fetchTravis(path).then(r => r.text());
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively-read a directory and turn it into an array of { name, size, gzipSize }
|
||||
*/
|
||||
async function dirToInfoArray(startPath) {
|
||||
const results = await new Promise((resolve, reject) => {
|
||||
readdirp({ root: startPath }, (err, results) => {
|
||||
if (err) reject(err); else resolve(results);
|
||||
});
|
||||
});
|
||||
|
||||
return Promise.all(
|
||||
results.files.map(async (entry) => ({
|
||||
name: entry.path,
|
||||
gzipSize: await gzipSize.file(entry.fullPath),
|
||||
size: entry.stat.size,
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to treat two entries with different file name hashes as the same file.
|
||||
*/
|
||||
function findHashedMatch(name, buildInfo) {
|
||||
const nameParts = /^(.+\.)[a-f0-9]+(\..+)$/.exec(name);
|
||||
if (!nameParts) return;
|
||||
|
||||
const matchRe = new RegExp(`^${escapeRE(nameParts[1])}[a-f0-9]+${escapeRE(nameParts[2])}$`);
|
||||
const matchingEntry = buildInfo.find(entry => matchRe.test(entry.name));
|
||||
return matchingEntry;
|
||||
}
|
||||
|
||||
const buildSizePrefix = '=== BUILD SIZES: ';
|
||||
const buildSizePrefixRe = new RegExp(`^${escapeRE(buildSizePrefix)}(.+)$`, 'm');
|
||||
|
||||
async function getPreviousBuildInfo() {
|
||||
const buildData = await fetchTravisBuildInfo('GoogleChromeLabs', 'squoosh', 'master');
|
||||
const jobUrl = buildData.builds[0].jobs[0]['@href'];
|
||||
const log = await fetchTravisText(jobUrl + '/log.txt');
|
||||
const reResult = buildSizePrefixRe.exec(log);
|
||||
|
||||
if (!reResult) return;
|
||||
return JSON.parse(reResult[1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate an array that represents the difference between builds.
|
||||
* Returns an array of { beforeName, afterName, beforeSize, afterSize }.
|
||||
* Sizes are gzipped size.
|
||||
* Before/after properties are missing if resource isn't in the previous/new build.
|
||||
*/
|
||||
function getChanges(previousBuildInfo, buildInfo) {
|
||||
const buildChanges = [];
|
||||
const alsoInPreviousBuild = new Set();
|
||||
|
||||
for (const oldEntry of previousBuildInfo) {
|
||||
const newEntry = buildInfo.find(entry => entry.name === oldEntry.name) ||
|
||||
findHashedMatch(oldEntry.name, buildInfo);
|
||||
|
||||
// Entry is in previous build, but not the new build.
|
||||
if (!newEntry) {
|
||||
buildChanges.push({
|
||||
beforeName: oldEntry.name,
|
||||
beforeSize: oldEntry.gzipSize,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// Mark this entry so we know we've dealt with it.
|
||||
alsoInPreviousBuild.add(newEntry);
|
||||
|
||||
// If they're the same, just ignore.
|
||||
// Using size rather than gzip size. I've seen different platforms produce different zipped
|
||||
// sizes.
|
||||
if (
|
||||
oldEntry.size === newEntry.size &&
|
||||
oldEntry.name === newEntry.name
|
||||
) continue;
|
||||
|
||||
// Entry is in both builds (maybe renamed).
|
||||
buildChanges.push({
|
||||
beforeName: oldEntry.name,
|
||||
afterName: newEntry.name,
|
||||
beforeSize: oldEntry.gzipSize,
|
||||
afterSize: newEntry.gzipSize,
|
||||
});
|
||||
}
|
||||
|
||||
// Look for entries that are only in the new build.
|
||||
for (const newEntry of buildInfo) {
|
||||
if (alsoInPreviousBuild.has(newEntry)) continue;
|
||||
|
||||
buildChanges.push({
|
||||
afterName: newEntry.name,
|
||||
afterSize: newEntry.gzipSize,
|
||||
});
|
||||
}
|
||||
|
||||
return buildChanges;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
// Output the current build sizes for later retrieval.
|
||||
const buildInfo = await dirToInfoArray(__dirname + '/../build');
|
||||
console.log(buildSizePrefix + JSON.stringify(buildInfo));
|
||||
console.log('\nBuild change report:');
|
||||
|
||||
let previousBuildInfo;
|
||||
|
||||
try {
|
||||
previousBuildInfo = await getPreviousBuildInfo();
|
||||
} catch (err) {
|
||||
console.log(` Couldn't parse previous build info`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!previousBuildInfo) {
|
||||
console.log(` Couldn't find previous build info`);
|
||||
return;
|
||||
}
|
||||
|
||||
const buildChanges = getChanges(previousBuildInfo, buildInfo);
|
||||
|
||||
if (buildChanges.length === 0) {
|
||||
console.log(' No changes');
|
||||
return;
|
||||
}
|
||||
|
||||
// One letter references, so it's easier to get the spacing right.
|
||||
const y = chalk.yellow;
|
||||
const g = chalk.green;
|
||||
const r = chalk.red;
|
||||
|
||||
for (const change of buildChanges) {
|
||||
// New file.
|
||||
if (!change.beforeSize) {
|
||||
console.log(` ${g('ADDED')} ${change.afterName} - ${prettyBytes(change.afterSize)}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Removed file.
|
||||
if (!change.afterSize) {
|
||||
console.log(` ${r('REMOVED')} ${change.beforeName} - was ${prettyBytes(change.beforeSize)}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Changed file.
|
||||
let size;
|
||||
|
||||
if (change.beforeSize === change.afterSize) {
|
||||
// Just renamed.
|
||||
size = `${prettyBytes(change.afterSize)} -> no change`;
|
||||
} else {
|
||||
const color = change.afterSize > change.beforeSize ? r : g;
|
||||
const sizeDiff = prettyBytes(change.afterSize - change.beforeSize, { signed: true });
|
||||
const relativeDiff = Math.round((change.afterSize / change.beforeSize) * 1000) / 1000;
|
||||
|
||||
size = `${prettyBytes(change.beforeSize)} -> ${prettyBytes(change.afterSize)}` +
|
||||
' (' +
|
||||
color(`${sizeDiff}, ${relativeDiff}x`) +
|
||||
')';
|
||||
}
|
||||
|
||||
console.log(` ${y('CHANGED')} ${change.afterName} - ${size}`);
|
||||
|
||||
if (change.beforeName !== change.afterName) {
|
||||
console.log(` Renamed from: ${change.beforeName}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
54
firebase.json
Normal file
@ -0,0 +1,54 @@
|
||||
{
|
||||
"hosting": {
|
||||
"public": "build",
|
||||
"ignore": [
|
||||
"firebase.json",
|
||||
"**/.*",
|
||||
"**/node_modules/**"
|
||||
],
|
||||
"rewrites": [
|
||||
{
|
||||
"source": "**",
|
||||
"destination": "/index.html"
|
||||
}
|
||||
],
|
||||
"headers": [
|
||||
{
|
||||
"source": "**/*.@(eot|otf|ttf|ttc|woff|font.css)",
|
||||
"headers": [
|
||||
{
|
||||
"key": "Access-Control-Allow-Origin",
|
||||
"value": "*"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"source": "**/*.@(jpg|jpeg|gif|png|ico)",
|
||||
"headers": [
|
||||
{
|
||||
"key": "Cache-Control",
|
||||
"value": "max-age=7200"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"source": "**/*.@(js|css|json|manifest|map)",
|
||||
"headers": [
|
||||
{
|
||||
"key": "Cache-Control",
|
||||
"value": "max-age=31536000"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"source": "sw.js",
|
||||
"headers": [
|
||||
{
|
||||
"key": "Cache-Control",
|
||||
"value": "max-age=0"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
11
global.d.ts
vendored
@ -1,23 +1,22 @@
|
||||
declare const __webpack_public_path__: string;
|
||||
declare const PRERENDER: boolean;
|
||||
|
||||
declare interface NodeModule {
|
||||
hot: any;
|
||||
}
|
||||
|
||||
declare interface Window {
|
||||
STATE: any;
|
||||
ga: typeof ga;
|
||||
STATE: any
|
||||
}
|
||||
|
||||
declare namespace JSX {
|
||||
interface Element { }
|
||||
interface IntrinsicElements { }
|
||||
interface HTMLAttributes {
|
||||
decoding?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module 'preact-i18n';
|
||||
declare module 'preact-material-components-drawer';
|
||||
declare module 'material-radial-progress';
|
||||
|
||||
declare module 'classnames' {
|
||||
export default function classnames(...args: any[]): string;
|
||||
}
|
||||
|
113
karma.conf.js
Normal file
@ -0,0 +1,113 @@
|
||||
const fs = require("fs");
|
||||
|
||||
function readJsonFile(path) {
|
||||
// TypeScript puts lots of comments in the default `tsconfig.json`, so you
|
||||
// can’t use `require()` to read it. Hence this hack.
|
||||
return eval("(" + fs.readFileSync(path).toString("utf-8") + ")");
|
||||
}
|
||||
|
||||
const typeScriptConfig = readJsonFile("./tsconfig.json");
|
||||
const babel = readJsonFile("./.babelrc");
|
||||
|
||||
module.exports = function(config) {
|
||||
const options = {
|
||||
// base path that will be used to resolve all patterns (eg. files, exclude)
|
||||
basePath: "",
|
||||
|
||||
// frameworks to use
|
||||
// available frameworks: https://npmjs.org/browse/keyword/karma-adapter
|
||||
frameworks: ["mocha", "chai", "karma-typescript"],
|
||||
|
||||
// list of files / patterns to load in the browser
|
||||
files: [
|
||||
{
|
||||
pattern: "test/**/*.ts",
|
||||
type: "module"
|
||||
},
|
||||
{
|
||||
pattern: "src/**/*.ts",
|
||||
included: false
|
||||
}
|
||||
],
|
||||
|
||||
// list of files / patterns to exclude
|
||||
exclude: [],
|
||||
// preprocess matching files before serving them to the browser // available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor
|
||||
preprocessors: {
|
||||
"src/**/*.ts": ["karma-typescript", "babel"],
|
||||
"test/**/*.ts": ["karma-typescript", "babel"]
|
||||
},
|
||||
babelPreprocessor: {
|
||||
options: babel
|
||||
},
|
||||
karmaTypescriptConfig: {
|
||||
// Inline `tsconfig.json` so that the right TS libs are loaded
|
||||
...typeScriptConfig,
|
||||
// Coverage is a thing that karma-typescript forces on you and only
|
||||
// creates problems. This is the simplest way of disabling it that I
|
||||
// could find.
|
||||
coverageOptions: {
|
||||
exclude: /.*/
|
||||
}
|
||||
},
|
||||
mime: {
|
||||
// Default mimetype for .ts files is video/mp2t but we need
|
||||
// text/javascript for modules to work.
|
||||
"text/javascript": ["ts"]
|
||||
},
|
||||
plugins: [
|
||||
// Load all modules whose name starts with "karma" (usually the default).
|
||||
"karma-*",
|
||||
// We don’t have file extensions on our imports as they are primarily
|
||||
// consumed by webpack. With Karma, however, this turns into a real HTTP
|
||||
// request for a non-existent file. This inline plugin is a middleware
|
||||
// that appends `.ts` to the request URL.
|
||||
{
|
||||
"middleware:redirect_to_ts": [
|
||||
"value",
|
||||
(req, res, next) => {
|
||||
if (req.url.startsWith("/base/src")) {
|
||||
req.url += '.ts';
|
||||
}
|
||||
next();
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
// Run our middleware before all other middlewares.
|
||||
beforeMiddleware: ["redirect_to_ts"],
|
||||
|
||||
// test results reporter to use
|
||||
// possible values: 'dots', 'progress'
|
||||
// available reporters: https://npmjs.org/browse/keyword/karma-reporter
|
||||
reporters: ["progress"],
|
||||
|
||||
// web server port
|
||||
port: 9876,
|
||||
|
||||
// enable / disable colors in the output (reporters and logs)
|
||||
colors: true,
|
||||
|
||||
// level of logging
|
||||
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
|
||||
logLevel: config.LOG_INFO,
|
||||
|
||||
// enable / disable watching file and executing tests whenever any file changes
|
||||
autoWatch: false,
|
||||
|
||||
// start these browsers
|
||||
// available browser launchers: https://npmjs.org/browse/keyword/karma-launcher
|
||||
browsers: ["ChromeHeadless"],
|
||||
|
||||
// Continuous Integration mode
|
||||
// if true, Karma captures browsers, runs the tests and exits
|
||||
singleRun: true,
|
||||
|
||||
// These custom files allow us to use ES6 modules in our tests.
|
||||
// Remove these 2 lines (and files) once https://github.com/karma-runner/karma/pull/2834 lands.
|
||||
customContextFile: "test/context.html",
|
||||
customDebugFile: "test/debug.html"
|
||||
};
|
||||
|
||||
config.set(options);
|
||||
};
|
18734
package-lock.json
generated
160
package.json
@ -1,75 +1,109 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "squoosh",
|
||||
"version": "1.3.4",
|
||||
"version": "0.0.0",
|
||||
"license": "apache-2.0",
|
||||
"scripts": {
|
||||
"start": "webpack-dev-server --host 0.0.0.0 --hot",
|
||||
"build:mozjpeg_enc": "cd codecs/mozjpeg_enc && npm run build",
|
||||
"build:codecs": "npm run build:mozjpeg_enc",
|
||||
"start": "webpack serve --host 0.0.0.0 --hot",
|
||||
"build": "webpack -p",
|
||||
"lint": "tslint -c tslint.json -p tsconfig.json -t verbose 'src/**/*.{ts,tsx,js,jsx}'",
|
||||
"lintfix": "tslint -c tslint.json -p tsconfig.json -t verbose --fix 'src/**/*.{ts,tsx,js,jsx}'",
|
||||
"sizereport": "node config/size-report.js"
|
||||
"lint": "eslint src",
|
||||
"test": "npm run build && mocha -R spec && karma start"
|
||||
},
|
||||
"husky": {
|
||||
"hooks": {
|
||||
"pre-commit": "npm run lint"
|
||||
"eslintConfig": {
|
||||
"extends": [
|
||||
"standard",
|
||||
"standard-jsx"
|
||||
],
|
||||
"rules": {
|
||||
"indent": [
|
||||
2,
|
||||
2
|
||||
],
|
||||
"semi": [
|
||||
2,
|
||||
"always"
|
||||
],
|
||||
"prefer-const": 1
|
||||
}
|
||||
},
|
||||
"eslintIgnore": [
|
||||
"build/*"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@types/node": "10.12.29",
|
||||
"@types/pretty-bytes": "5.1.0",
|
||||
"@types/webassembly-js-api": "0.0.2",
|
||||
"@webcomponents/custom-elements": "1.2.1",
|
||||
"@webpack-cli/serve": "0.1.3",
|
||||
"assets-webpack-plugin": "3.9.10",
|
||||
"chokidar": "2.1.2",
|
||||
"chalk": "2.4.2",
|
||||
"classnames": "2.2.6",
|
||||
"clean-webpack-plugin": "1.0.1",
|
||||
"comlink": "3.1.1",
|
||||
"copy-webpack-plugin": "5.0.0",
|
||||
"critters-webpack-plugin": "2.3.0",
|
||||
"css-loader": "1.0.1",
|
||||
"ejs": "2.6.1",
|
||||
"escape-string-regexp": "1.0.5",
|
||||
"exports-loader": "0.7.0",
|
||||
"file-drop-element": "0.2.0",
|
||||
"file-loader": "3.0.1",
|
||||
"gzip-size": "5.0.0",
|
||||
"html-webpack-plugin": "3.2.0",
|
||||
"husky": "1.3.1",
|
||||
"idb-keyval": "3.1.0",
|
||||
"linkstate": "1.1.1",
|
||||
"loader-utils": "1.2.3",
|
||||
"mini-css-extract-plugin": "0.5.0",
|
||||
"minimatch": "3.0.4",
|
||||
"node-fetch": "2.3.0",
|
||||
"node-sass": "4.11.0",
|
||||
"optimize-css-assets-webpack-plugin": "5.0.1",
|
||||
"pointer-tracker": "2.0.3",
|
||||
"preact": "8.4.2",
|
||||
"prerender-loader": "1.3.0",
|
||||
"pretty-bytes": "5.1.0",
|
||||
"progress-bar-webpack-plugin": "1.12.1",
|
||||
"raw-loader": "1.0.0",
|
||||
"readdirp": "2.2.1",
|
||||
"sass-loader": "7.1.0",
|
||||
"script-ext-html-webpack-plugin": "2.1.3",
|
||||
"source-map-loader": "0.2.4",
|
||||
"style-loader": "0.23.1",
|
||||
"terser-webpack-plugin": "1.2.3",
|
||||
"ts-loader": "5.3.3",
|
||||
"tslint": "5.13.1",
|
||||
"tslint-config-airbnb": "5.11.1",
|
||||
"tslint-config-semistandard": "7.0.0",
|
||||
"tslint-react": "3.6.0",
|
||||
"typed-css-modules": "0.4.1",
|
||||
"typescript": "3.3.3333",
|
||||
"url-loader": "1.1.2",
|
||||
"webpack": "4.28.0",
|
||||
"webpack-bundle-analyzer": "3.1.0",
|
||||
"webpack-cli": "3.2.3",
|
||||
"webpack-dev-server": "3.2.1",
|
||||
"worker-plugin": "3.1.0"
|
||||
"@types/chai": "^4.1.3",
|
||||
"@types/karma": "^1.7.3",
|
||||
"@types/mocha": "^5.2.0",
|
||||
"@types/node": "^9.4.7",
|
||||
"@types/webassembly-js-api": "0.0.1",
|
||||
"babel-loader": "^7.1.4",
|
||||
"babel-plugin-jsx-pragmatic": "^1.0.2",
|
||||
"babel-plugin-syntax-dynamic-import": "^6.18.0",
|
||||
"babel-plugin-transform-class-properties": "^6.24.1",
|
||||
"babel-plugin-transform-decorators-legacy": "^1.3.4",
|
||||
"babel-plugin-transform-object-rest-spread": "^6.26.0",
|
||||
"babel-plugin-transform-react-constant-elements": "^6.23.0",
|
||||
"babel-plugin-transform-react-jsx": "^6.24.1",
|
||||
"babel-plugin-transform-react-remove-prop-types": "^0.4.13",
|
||||
"babel-preset-env": "^1.6.1",
|
||||
"babel-register": "^6.26.0",
|
||||
"chai": "^4.1.2",
|
||||
"clean-webpack-plugin": "^0.1.19",
|
||||
"copy-webpack-plugin": "^4.5.1",
|
||||
"css-loader": "^0.28.11",
|
||||
"eslint": "^4.18.2",
|
||||
"eslint-config-standard": "^11.0.0",
|
||||
"eslint-config-standard-jsx": "^5.0.0",
|
||||
"eslint-plugin-import": "^2.10.0",
|
||||
"eslint-plugin-node": "^6.0.1",
|
||||
"eslint-plugin-promise": "^3.7.0",
|
||||
"eslint-plugin-react": "^7.7.0",
|
||||
"eslint-plugin-standard": "^3.0.1",
|
||||
"exports-loader": "^0.7.0",
|
||||
"express": "^4.16.3",
|
||||
"file-loader": "^1.1.11",
|
||||
"html-webpack-plugin": "^3.0.6",
|
||||
"if-env": "^1.0.4",
|
||||
"karma": "^2.0.2",
|
||||
"karma-babel-preprocessor": "^7.0.0",
|
||||
"karma-chai": "^0.1.0",
|
||||
"karma-chrome-launcher": "^2.2.0",
|
||||
"karma-mocha": "^1.3.0",
|
||||
"karma-typescript": "^3.0.12",
|
||||
"loader-utils": "^1.1.0",
|
||||
"mini-css-extract-plugin": "^0.3.0",
|
||||
"mocha": "^5.2.0",
|
||||
"node-sass": "^4.7.2",
|
||||
"optimize-css-assets-webpack-plugin": "^4.0.0",
|
||||
"prettier": "^1.12.1",
|
||||
"progress-bar-webpack-plugin": "^1.11.0",
|
||||
"puppeteer": "^1.3.0",
|
||||
"raw-loader": "^0.5.1",
|
||||
"sass-loader": "^6.0.7",
|
||||
"script-ext-html-webpack-plugin": "^2.0.1",
|
||||
"source-map-loader": "^0.2.3",
|
||||
"style-loader": "^0.20.3",
|
||||
"ts-loader": "^4.0.1",
|
||||
"tslint": "^5.9.1",
|
||||
"tslint-config-semistandard": "^7.0.0",
|
||||
"tslint-react": "^3.5.1",
|
||||
"typescript": "^2.7.2",
|
||||
"typings-for-css-modules-loader": "^1.7.0",
|
||||
"webpack": "^4.3.0",
|
||||
"webpack-bundle-analyzer": "^2.11.1",
|
||||
"webpack-cli": "^2.0.13",
|
||||
"webpack-dev-server": "^3.1.1",
|
||||
"webpack-plugin-replace": "^1.1.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"classnames": "^2.2.5",
|
||||
"material-components-web": "^0.32.0",
|
||||
"material-radial-progress": "git+https://gist.github.com/02134901c77c5309924bfcf8b4435ebe.git",
|
||||
"preact": "^8.2.7",
|
||||
"preact-i18n": "^1.2.0",
|
||||
"preact-material-components": "^1.3.7",
|
||||
"preact-material-components-drawer": "git+https://gist.github.com/a78fceed440b98e62582e4440b86bfab.git",
|
||||
"preact-router": "^2.6.0"
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +0,0 @@
|
||||
{
|
||||
"extends": [
|
||||
"config:base"
|
||||
]
|
||||
}
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 1.1 KiB |
Before Width: | Height: | Size: 26 KiB |
Before Width: | Height: | Size: 6.7 KiB |
BIN
src/assets/icon.png
Normal file
After Width: | Height: | Size: 27 KiB |
@ -1,11 +0,0 @@
|
||||
import { canvasEncodeTest } from '../generic/util';
|
||||
|
||||
export interface EncodeOptions { }
|
||||
export interface EncoderState { type: typeof type; options: EncodeOptions; }
|
||||
|
||||
export const type = 'browser-bmp';
|
||||
export const label = 'Browser BMP';
|
||||
export const mimeType = 'image/bmp';
|
||||
export const extension = 'bmp';
|
||||
export const defaultOptions: EncodeOptions = {};
|
||||
export const featureTest = () => canvasEncodeTest(mimeType);
|
@ -1,6 +0,0 @@
|
||||
import { mimeType } from './encoder-meta';
|
||||
import { canvasEncode } from '../../lib/util';
|
||||
|
||||
export function encode(data: ImageData) {
|
||||
return canvasEncode(data, mimeType);
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
import { canvasEncodeTest } from '../generic/util';
|
||||
|
||||
export interface EncodeOptions {}
|
||||
export interface EncoderState { type: typeof type; options: EncodeOptions; }
|
||||
|
||||
export const type = 'browser-gif';
|
||||
export const label = 'Browser GIF';
|
||||
export const mimeType = 'image/gif';
|
||||
export const extension = 'gif';
|
||||
export const defaultOptions: EncodeOptions = {};
|
||||
export const featureTest = () => canvasEncodeTest(mimeType);
|
@ -1,6 +0,0 @@
|
||||
import { mimeType } from './encoder-meta';
|
||||
import { canvasEncode } from '../../lib/util';
|
||||
|
||||
export function encode(data: ImageData) {
|
||||
return canvasEncode(data, mimeType);
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
import { canvasEncodeTest } from '../generic/util';
|
||||
|
||||
export interface EncodeOptions { }
|
||||
export interface EncoderState { type: typeof type; options: EncodeOptions; }
|
||||
|
||||
export const type = 'browser-jp2';
|
||||
export const label = 'Browser JPEG 2000';
|
||||
export const mimeType = 'image/jp2';
|
||||
export const extension = 'jp2';
|
||||
export const defaultOptions: EncodeOptions = {};
|
||||
export const featureTest = () => canvasEncodeTest(mimeType);
|
@ -1,6 +0,0 @@
|
||||
import { mimeType } from './encoder-meta';
|
||||
import { canvasEncode } from '../../lib/util';
|
||||
|
||||
export function encode(data: ImageData) {
|
||||
return canvasEncode(data, mimeType);
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
export interface EncodeOptions { quality: number; }
|
||||
export interface EncoderState { type: typeof type; options: EncodeOptions; }
|
||||
|
||||
export const type = 'browser-jpeg';
|
||||
export const label = 'Browser JPEG';
|
||||
export const mimeType = 'image/jpeg';
|
||||
export const extension = 'jpg';
|
||||
export const defaultOptions: EncodeOptions = { quality: 0.75 };
|
@ -1,6 +0,0 @@
|
||||
import { EncodeOptions, mimeType } from './encoder-meta';
|
||||
import { canvasEncode } from '../../lib/util';
|
||||
|
||||
export function encode(data: ImageData, { quality }: EncodeOptions) {
|
||||
return canvasEncode(data, mimeType, quality);
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
import qualityOption from '../generic/quality-option';
|
||||
|
||||
export default qualityOption({ min: 0, max: 1, step: 0.01 });
|
@ -1,11 +0,0 @@
|
||||
import { canvasEncodeTest } from '../generic/util';
|
||||
|
||||
export interface EncodeOptions { }
|
||||
export interface EncoderState { type: typeof type; options: EncodeOptions; }
|
||||
|
||||
export const type = 'browser-pdf';
|
||||
export const label = 'Browser PDF';
|
||||
export const mimeType = 'application/pdf';
|
||||
export const extension = 'pdf';
|
||||
export const defaultOptions: EncodeOptions = {};
|
||||
export const featureTest = () => canvasEncodeTest(mimeType);
|
@ -1,6 +0,0 @@
|
||||
import { mimeType } from './encoder-meta';
|
||||
import { canvasEncode } from '../../lib/util';
|
||||
|
||||
export function encode(data: ImageData) {
|
||||
return canvasEncode(data, mimeType);
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
export interface EncodeOptions {}
|
||||
export interface EncoderState { type: typeof type; options: EncodeOptions; }
|
||||
|
||||
export const type = 'browser-png';
|
||||
export const label = 'Browser PNG';
|
||||
export const mimeType = 'image/png';
|
||||
export const extension = 'png';
|
||||
export const defaultOptions: EncodeOptions = {};
|
@ -1,6 +0,0 @@
|
||||
import { mimeType } from './encoder-meta';
|
||||
import { canvasEncode } from '../../lib/util';
|
||||
|
||||
export function encode(data: ImageData) {
|
||||
return canvasEncode(data, mimeType);
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
import { canvasEncodeTest } from '../generic/util';
|
||||
|
||||
export interface EncodeOptions { }
|
||||
export interface EncoderState { type: typeof type; options: EncodeOptions; }
|
||||
|
||||
export const type = 'browser-tiff';
|
||||
export const label = 'Browser TIFF';
|
||||
export const mimeType = 'image/tiff';
|
||||
export const extension = 'tiff';
|
||||
export const defaultOptions: EncodeOptions = {};
|
||||
export const featureTest = () => canvasEncodeTest(mimeType);
|
@ -1,6 +0,0 @@
|
||||
import { mimeType } from './encoder-meta';
|
||||
import { canvasEncode } from '../../lib/util';
|
||||
|
||||
export function encode(data: ImageData) {
|
||||
return canvasEncode(data, mimeType);
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
import { canvasEncodeTest } from '../generic/util';
|
||||
|
||||
export interface EncodeOptions { quality: number; }
|
||||
export interface EncoderState { type: typeof type; options: EncodeOptions; }
|
||||
|
||||
export const type = 'browser-webp';
|
||||
export const label = 'Browser WebP';
|
||||
export const mimeType = 'image/webp';
|
||||
export const extension = 'webp';
|
||||
export const defaultOptions: EncodeOptions = { quality: 0.75 };
|
||||
export const featureTest = () => canvasEncodeTest(mimeType);
|
@ -1,6 +0,0 @@
|
||||
import { EncodeOptions, mimeType } from './encoder-meta';
|
||||
import { canvasEncode } from '../../lib/util';
|
||||
|
||||
export function encode(data: ImageData, { quality }: EncodeOptions) {
|
||||
return canvasEncode(data, mimeType, quality);
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
import qualityOption from '../generic/quality-option';
|
||||
|
||||
export default qualityOption({ min: 0, max: 1, step: 0.01 });
|
@ -1,20 +0,0 @@
|
||||
import { nativeDecode, sniffMimeType, canDecodeImage } from '../lib/util';
|
||||
import Processor from './processor';
|
||||
import webpDataUrl from 'url-loader!./tiny.webp';
|
||||
|
||||
const nativeWebPSupported = canDecodeImage(webpDataUrl);
|
||||
|
||||
export async function decodeImage(blob: Blob, processor: Processor): Promise<ImageData> {
|
||||
const mimeType = await sniffMimeType(blob);
|
||||
|
||||
try {
|
||||
if (mimeType === 'image/webp' && !(await nativeWebPSupported)) {
|
||||
return await processor.webpDecode(blob);
|
||||
}
|
||||
|
||||
// Otherwise, just throw it at the browser's decoder.
|
||||
return await nativeDecode(blob);
|
||||
} catch (err) {
|
||||
throw Error("Couldn't decode image");
|
||||
}
|
||||
}
|