Add better output formatting
This commit is contained in:
189
cli/src/index.js
189
cli/src/index.js
@ -13,6 +13,19 @@ import WorkerPool from "./worker_pool.js";
|
||||
import ImageData from "./image_data.js";
|
||||
globalThis.ImageData = ImageData;
|
||||
|
||||
function clamp(v, min, max) {
|
||||
if (v < min) return min;
|
||||
if (v > max) return max;
|
||||
return v;
|
||||
}
|
||||
|
||||
const suffix = ["B", "KB", "MB"];
|
||||
function prettyPrintSize(size) {
|
||||
const base = Math.floor(Math.log2(size) / 10);
|
||||
const index = clamp(base, 0, 2);
|
||||
return (size / 2 ** (10 * index)).toFixed(2) + suffix[index];
|
||||
}
|
||||
|
||||
async function decodeFile(file) {
|
||||
const buffer = await fsp.readFile(file);
|
||||
const firstChunk = buffer.slice(0, 16);
|
||||
@ -28,7 +41,111 @@ async function decodeFile(file) {
|
||||
const rgba = (await supportedFormats[key].dec()).decode(
|
||||
new Uint8Array(buffer)
|
||||
);
|
||||
return rgba;
|
||||
return {
|
||||
file,
|
||||
bitmap: rgba,
|
||||
size: buffer.length
|
||||
};
|
||||
}
|
||||
|
||||
async function encodeFile({
|
||||
file,
|
||||
size,
|
||||
bitmap,
|
||||
outputFile,
|
||||
encName,
|
||||
encConfig
|
||||
}) {
|
||||
const encoder = await supportedFormats[encName].enc();
|
||||
const out = encoder.encode(
|
||||
bitmap.data.buffer,
|
||||
bitmap.width,
|
||||
bitmap.height,
|
||||
encConfig
|
||||
);
|
||||
await fsp.writeFile(outputFile, out);
|
||||
return {
|
||||
inputSize: size,
|
||||
inputFile: file,
|
||||
outputFile,
|
||||
outputSize: out.length
|
||||
};
|
||||
}
|
||||
|
||||
async function processFiles(files) {
|
||||
// Create output directory
|
||||
await fsp.mkdir(program.outputDir, { recursive: true });
|
||||
const workerPool = new WorkerPool(cpus().length, __filename);
|
||||
|
||||
const decodedFiles = await Promise.all(files.map(file => decodeFile(file)));
|
||||
|
||||
let jobsStarted = 0;
|
||||
let jobsFinished = 0;
|
||||
for (const { file, bitmap, size } of decodedFiles) {
|
||||
const ext = extname(file);
|
||||
const base = basename(file, ext);
|
||||
|
||||
for (const [encName, value] of Object.entries(supportedFormats)) {
|
||||
if (!program[encName]) {
|
||||
continue;
|
||||
}
|
||||
const encConfig = Object.assign(
|
||||
{},
|
||||
value.defaultEncoderOptions,
|
||||
JSON5.parse(program[encName])
|
||||
);
|
||||
const outputFile = join(program.outputDir, `${base}.${value.extension}`);
|
||||
jobsStarted++;
|
||||
workerPool
|
||||
.dispatchJob({
|
||||
file,
|
||||
size,
|
||||
bitmap,
|
||||
outputFile,
|
||||
encName,
|
||||
encConfig
|
||||
})
|
||||
.then(({ outputFile, inputSize, outputSize }) => {
|
||||
jobsFinished++;
|
||||
const numDigits = jobsStarted.toString().length;
|
||||
console.log(
|
||||
`${jobsFinished
|
||||
.toString()
|
||||
.padStart(
|
||||
numDigits
|
||||
)}/${jobsStarted}: ${outputFile} ${prettyPrintSize(
|
||||
inputSize
|
||||
)} -> ${prettyPrintSize(outputSize)} (${(
|
||||
(outputSize / inputSize) *
|
||||
100
|
||||
).toFixed(1)}%)`
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
// Wait for all jobs to finish
|
||||
await workerPool.join();
|
||||
}
|
||||
|
||||
if (isMainThread) {
|
||||
program
|
||||
.name("squoosh-cli")
|
||||
.version(version)
|
||||
.arguments("<files...>")
|
||||
.option("-d, --output-dir <dir>", "Output directory", ".")
|
||||
.action(processFiles);
|
||||
|
||||
// Create a CLI option for each supported encoder
|
||||
for (const [key, value] of Object.entries(supportedFormats)) {
|
||||
program.option(
|
||||
`--${key} [config]`,
|
||||
`Use ${value.name} to generate a .${value.extension} file with the given configuration`
|
||||
);
|
||||
}
|
||||
|
||||
program.parse(process.argv);
|
||||
} else {
|
||||
WorkerPool.useThisThreadAsWorker(encodeFile);
|
||||
}
|
||||
|
||||
/*
|
||||
@ -81,73 +198,3 @@ const visdifModule = require("../codecs/visdif/visdif.js");
|
||||
attempts
|
||||
};
|
||||
}*/
|
||||
|
||||
async function processFiles(files) {
|
||||
// Create output directory
|
||||
await fsp.mkdir(program.outputDir, { recursive: true });
|
||||
const workerPool = new WorkerPool(cpus().length, __filename);
|
||||
|
||||
for (const file of files) {
|
||||
const ext = extname(file);
|
||||
const base = basename(file, ext);
|
||||
const bitmap = await decodeFile(file);
|
||||
|
||||
for (const [encName, value] of Object.entries(supportedFormats)) {
|
||||
if (!program[encName]) {
|
||||
continue;
|
||||
}
|
||||
const encConfig = Object.assign(
|
||||
{},
|
||||
value.defaultEncoderOptions,
|
||||
JSON5.parse(program[encName])
|
||||
);
|
||||
const outputFile = join(program.outputDir, `${base}.${value.extension}`);
|
||||
workerPool
|
||||
.dispatchJob({
|
||||
bitmap,
|
||||
outputFile,
|
||||
encName,
|
||||
encConfig
|
||||
})
|
||||
.then(({ outputSize }) => {
|
||||
console.log(`Written ${file}. Size: ${outputSize}`);
|
||||
});
|
||||
}
|
||||
}
|
||||
// Wait for all jobs to finish
|
||||
await workerPool.join();
|
||||
}
|
||||
|
||||
if (isMainThread) {
|
||||
program
|
||||
.name("squoosh-cli")
|
||||
.version(version)
|
||||
.arguments("<files...>")
|
||||
.option("-d, --output-dir <dir>", "Output directory", ".")
|
||||
.action(processFiles);
|
||||
|
||||
// Create a CLI option for each supported encoder
|
||||
for (const [key, value] of Object.entries(supportedFormats)) {
|
||||
program.option(
|
||||
`--${key} [config]`,
|
||||
`Use ${value.name} to generate a .${value.extension} file with the given configuration`
|
||||
);
|
||||
}
|
||||
|
||||
program.parse(process.argv);
|
||||
} else {
|
||||
WorkerPool.useThisThreadAsWorker(
|
||||
async ({ id, bitmap, outputFile, encName, encConfig }) => {
|
||||
console.log("received", { outputFile, encName });
|
||||
const encoder = await supportedFormats[encName].enc();
|
||||
const out = encoder.encode(
|
||||
bitmap.data.buffer,
|
||||
bitmap.width,
|
||||
bitmap.height,
|
||||
encConfig
|
||||
);
|
||||
await fsp.writeFile(outputFile, out);
|
||||
return { outputSize: out.length };
|
||||
}
|
||||
);
|
||||
}
|
||||
|
Reference in New Issue
Block a user