Started migrating workflow controller
This commit is contained in:
51
shared-operations/src/functions/common/detectEmptyPages.ts
Normal file
51
shared-operations/src/functions/common/detectEmptyPages.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
|
||||
import { PdfFile } from '../../wrappers/PdfFile';
|
||||
import { PDFPageProxy } from "pdfjs-dist/types/src/display/api.js";
|
||||
import { Image } from 'image-js';
|
||||
|
||||
import { getImagesOnPage } from "./getImagesOnPage.js";
|
||||
|
||||
export async function detectEmptyPages(file: PdfFile, whiteThreashold: number): Promise<number[]> {
|
||||
const pdfDoc = await file.getAsPdfJs();
|
||||
|
||||
const emptyPages: number[] = [];
|
||||
for (let i = 1; i <= pdfDoc.numPages; i++) {
|
||||
const page = await pdfDoc.getPage(i);
|
||||
console.log("Checking page " + i);
|
||||
|
||||
if(!await hasText(page)) {
|
||||
console.log(`Found text on Page ${i}, page is not empty`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if(!await areImagesBlank(page, whiteThreashold)) {
|
||||
console.log(`Found non white image on Page ${i}, page is not empty`);
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(`Page ${i} is empty.`);
|
||||
emptyPages.push(i - 1);
|
||||
}
|
||||
return emptyPages;
|
||||
}
|
||||
|
||||
async function hasText(page: PDFPageProxy): Promise<boolean> {
|
||||
const textContent = await page.getTextContent();
|
||||
return textContent.items.length === 0;
|
||||
}
|
||||
|
||||
async function areImagesBlank(page: PDFPageProxy, threshold: number): Promise<boolean> {
|
||||
const images = await getImagesOnPage(page);
|
||||
for (const image of images) {
|
||||
if(!await isImageBlank(image, threshold))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
async function isImageBlank(image: string | Uint8Array | ArrayBuffer, threshold: number): Promise<boolean> {
|
||||
var img = await Image.load(image);
|
||||
var grey = img.grey();
|
||||
var mean = grey.getMean();
|
||||
return mean[0] <= threshold;
|
||||
}
|
||||
15
shared-operations/src/functions/common/getImagesOnPage.ts
Normal file
15
shared-operations/src/functions/common/getImagesOnPage.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
|
||||
import { PDFPageProxy } from "pdfjs-dist/types/src/display/api.js";
|
||||
import * as PDFJS from 'pdfjs-dist';
|
||||
|
||||
export async function getImagesOnPage(page: PDFPageProxy) {
|
||||
const ops = await page.getOperatorList();
|
||||
const images: any = [];
|
||||
for (var j=0; j < ops.fnArray.length; j++) {
|
||||
if (ops.fnArray[j] == PDFJS.OPS.paintImageXObject) {
|
||||
const image = page.objs.get(ops.argsArray[j][0]);
|
||||
images.push(image);
|
||||
}
|
||||
}
|
||||
return images;
|
||||
}
|
||||
47
shared-operations/src/functions/common/pdf-utils.ts
Normal file
47
shared-operations/src/functions/common/pdf-utils.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
|
||||
import { PdfFile, convertAllToPdfLibFile } from '../../wrappers/PdfFile';
|
||||
|
||||
export async function sortPdfs(
|
||||
files: PdfFile[],
|
||||
sortType: "orderProvided"|"byFileName"|"byDateModified"|"byDateCreated"|"byPDFTitle" = "orderProvided"
|
||||
): Promise<PdfFile[]> {
|
||||
|
||||
const pdfLibFiles = await convertAllToPdfLibFile(files);
|
||||
|
||||
switch(sortType) {
|
||||
case "byFileName":
|
||||
pdfLibFiles.sort((a, b) => {
|
||||
if (!a || !b) return 0;
|
||||
const ad = a.filename, bd = b.filename;
|
||||
if (!ad || !bd) return 0;
|
||||
return ad.localeCompare(bd);
|
||||
});
|
||||
break;
|
||||
case "byDateModified":
|
||||
pdfLibFiles.sort((a, b) => {
|
||||
const ad = a.pdfLib?.getModificationDate()?.getTime();
|
||||
const bd = b.pdfLib?.getModificationDate()?.getTime();
|
||||
if (!ad || !bd) return 0;
|
||||
return ad > bd ? 1 : -1
|
||||
});
|
||||
break;
|
||||
case "byDateCreated":
|
||||
pdfLibFiles.sort((a, b) => {
|
||||
const ad = a.pdfLib?.getCreationDate()?.getTime();
|
||||
const bd = b.pdfLib?.getCreationDate()?.getTime();
|
||||
if (!ad || !bd) return 0;
|
||||
return ad > bd ? 1 : -1
|
||||
});
|
||||
break;
|
||||
case "byPDFTitle":
|
||||
pdfLibFiles.sort((a, b) => {
|
||||
const ad = a.pdfLib?.getTitle();
|
||||
const bd = b.pdfLib?.getTitle();
|
||||
if (!ad || !bd) return 0;
|
||||
return ad.localeCompare(bd);
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
return pdfLibFiles;
|
||||
}
|
||||
12
shared-operations/src/functions/impose.ts
Normal file
12
shared-operations/src/functions/impose.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
export async function impose(snapshot: any, nup: number, format: string, pdfcpuWrapper: any) {
|
||||
return await pdfcpuWrapper.oneToOne([
|
||||
"pdfcpu.wasm",
|
||||
"nup",
|
||||
"-c",
|
||||
"disable",
|
||||
'f:' + format,
|
||||
"/output.pdf",
|
||||
String(nup),
|
||||
"input.pdf",
|
||||
], snapshot);
|
||||
}
|
||||
18
shared-operations/src/functions/mergePDFs.ts
Normal file
18
shared-operations/src/functions/mergePDFs.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
|
||||
import { PDFDocument } from 'pdf-lib';
|
||||
import { PdfFile, convertAllToPdfLibFile, fromPdfLib } from '../wrappers/PdfFile';
|
||||
|
||||
export async function mergePDFs(files: PdfFile[]): Promise<PdfFile> {
|
||||
|
||||
const pdfLibFiles = await convertAllToPdfLibFile(files);
|
||||
|
||||
const mergedPdf = await PDFDocument.create();
|
||||
|
||||
for (let i = 0; i < pdfLibFiles.length; i++) {
|
||||
const pdfToMerge = await pdfLibFiles[i].getAsPdfLib();
|
||||
const copiedPages = await mergedPdf.copyPages(pdfToMerge, pdfToMerge.getPageIndices());
|
||||
copiedPages.forEach((page) => mergedPdf.addPage(page));
|
||||
}
|
||||
|
||||
return fromPdfLib(mergedPdf, files[0].filename);
|
||||
};
|
||||
26
shared-operations/src/functions/rotatePages.ts
Normal file
26
shared-operations/src/functions/rotatePages.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
|
||||
import { degrees } from 'pdf-lib';
|
||||
import { PdfFile, fromPdfLib } from '../wrappers/PdfFile';
|
||||
|
||||
export async function rotatePages(file: PdfFile, rotation: number|number[]): Promise<PdfFile> {
|
||||
const pdfDoc = await file.getAsPdfLib();
|
||||
const pages = pdfDoc.getPages();
|
||||
|
||||
if (Array.isArray(rotation)) {
|
||||
if (rotation.length != pages.length) {
|
||||
throw new Error(`Number of given rotations '${rotation.length}' is not the same as the number of pages '${pages.length}'`)
|
||||
}
|
||||
for (let i=0; i<rotation.length; i++) {
|
||||
const oldRotation = pages[i].getRotation().angle
|
||||
pages[i].setRotation(degrees(oldRotation + rotation[i]))
|
||||
}
|
||||
} else {
|
||||
pages.forEach(page => {
|
||||
// Change page size
|
||||
const oldRotation = page.getRotation().angle
|
||||
page.setRotation(degrees(oldRotation + rotation))
|
||||
});
|
||||
}
|
||||
|
||||
return fromPdfLib(pdfDoc, file.filename);
|
||||
};
|
||||
36
shared-operations/src/functions/scaleContent.ts
Normal file
36
shared-operations/src/functions/scaleContent.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
|
||||
import { PDFPage } from 'pdf-lib';
|
||||
import { PdfFile, fromPdfLib } from '../wrappers/PdfFile';
|
||||
|
||||
export async function scaleContent(file: PdfFile, scaleFactor: number|number[]): Promise<PdfFile> {
|
||||
const pdfDoc = await file.getAsPdfLib();
|
||||
const pages = pdfDoc.getPages();
|
||||
|
||||
if (Array.isArray(scaleFactor)) {
|
||||
if (scaleFactor.length != pages.length) {
|
||||
throw new Error(`Number of given scale factors '${scaleFactor.length}' is not the same as the number of pages '${pages.length}'`)
|
||||
}
|
||||
for (let i=0; i<scaleFactor.length; i++) {
|
||||
scalePage(pages[i], scaleFactor[i]);
|
||||
}
|
||||
} else {
|
||||
pages.forEach(page => scalePage(page, scaleFactor));
|
||||
}
|
||||
|
||||
return fromPdfLib(pdfDoc, file.filename);
|
||||
};
|
||||
|
||||
function scalePage(page: PDFPage, scaleFactor: number) {
|
||||
const width = page.getWidth();
|
||||
const height = page.getHeight();
|
||||
|
||||
// Scale content
|
||||
page.scaleContent(scaleFactor, scaleFactor);
|
||||
const scaled_diff = {
|
||||
width: Math.round(width - scaleFactor * width),
|
||||
height: Math.round(height - scaleFactor * height),
|
||||
};
|
||||
|
||||
// Center content in new page format
|
||||
page.translateContent(Math.round(scaled_diff.width / 2), Math.round(scaled_diff.height / 2));
|
||||
}
|
||||
56
shared-operations/src/functions/scalePage.ts
Normal file
56
shared-operations/src/functions/scalePage.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
|
||||
import { PDFPage } from 'pdf-lib';
|
||||
import { PdfFile, fromPdfLib } from '../wrappers/PdfFile';
|
||||
|
||||
export async function scalePage(file: PdfFile, pageSize: {width?:number,height?:number}|{width?:number,height?:number}[]): Promise<PdfFile> {
|
||||
const pdfDoc = await file.getAsPdfLib();
|
||||
const pages = pdfDoc.getPages();
|
||||
|
||||
if (Array.isArray(pageSize)) {
|
||||
if (pageSize.length != pages.length) {
|
||||
throw new Error(`Number of given sizes '${pageSize.length}' is not the same as the number of pages '${pages.length}'`)
|
||||
}
|
||||
for (let i=0; i<pageSize.length; i++) {
|
||||
resize(pages[i], pageSize[i]);
|
||||
}
|
||||
} else {
|
||||
pages.forEach(page => resize(page, pageSize));
|
||||
}
|
||||
|
||||
return fromPdfLib(pdfDoc, file.filename);
|
||||
};
|
||||
|
||||
function resize(page: PDFPage, newSize: {width?:number,height?:number}) {
|
||||
const calculatedSize = calculateSize(page, newSize);
|
||||
page.setSize(calculatedSize.width, calculatedSize.height);
|
||||
|
||||
const xRatio = calculatedSize.width / page.getWidth();
|
||||
const yRatio = calculatedSize.height / page.getHeight();
|
||||
page.scaleContent(xRatio, yRatio);
|
||||
}
|
||||
|
||||
function calculateSize(page: PDFPage, newSize: {width?:number,height?:number}): {width:number,height:number} {
|
||||
if (!newSize.width && !newSize.height){
|
||||
throw new Error(`Sizes '${newSize}' cannot have null width and null height`);
|
||||
} else if (!newSize.width && newSize.height) {
|
||||
const oldSize = page.getSize();
|
||||
const ratio = oldSize.width / oldSize.height;
|
||||
return { width: newSize.height * ratio, height: newSize.height };
|
||||
} else if (newSize.width && !newSize.height) {
|
||||
const oldSize = page.getSize();
|
||||
const ratio = oldSize.height / oldSize.width;
|
||||
return { width: newSize.width, height: newSize.width * ratio };
|
||||
}
|
||||
return { width: newSize.width!, height: newSize.height! };
|
||||
}
|
||||
|
||||
export const PageSize = Object.freeze({
|
||||
a4: {
|
||||
width: 594.96,
|
||||
height: 841.92
|
||||
},
|
||||
letter: {
|
||||
width: 612,
|
||||
height: 792
|
||||
}
|
||||
});
|
||||
110
shared-operations/src/functions/splitOn.ts
Normal file
110
shared-operations/src/functions/splitOn.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
|
||||
import jsQR from "jsqr";
|
||||
|
||||
import { detectEmptyPages } from "./common/detectEmptyPages.js";
|
||||
import { getImagesOnPage } from "./common/getImagesOnPage.js";
|
||||
import { selectPages } from "./subDocumentFunctions";
|
||||
import { PdfFile } from '../wrappers/PdfFile.js';
|
||||
|
||||
export async function splitOn(
|
||||
file: PdfFile,
|
||||
type: "BAR_CODE"|"QR_CODE"|"BLANK_PAGE",
|
||||
whiteThreashold: number) {
|
||||
let splitAtPages: number[] = [];
|
||||
|
||||
switch (type) {
|
||||
case "BAR_CODE":
|
||||
// TODO: Implement
|
||||
throw new Error("This split-type has not been implemented yet");
|
||||
|
||||
case "QR_CODE":
|
||||
splitAtPages = await getPagesWithQRCode(file);
|
||||
break;
|
||||
|
||||
case "BLANK_PAGE":
|
||||
splitAtPages = await detectEmptyPages(file, whiteThreashold);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error("An invalid split-type was provided.");
|
||||
}
|
||||
|
||||
console.log("Split At Pages: ", splitAtPages);
|
||||
|
||||
// Remove detected Pages & Split
|
||||
const pdfDoc = await file.getAsPdfLib();
|
||||
const numberOfPages = pdfDoc.getPageCount();
|
||||
|
||||
let pagesArray: number[] = [];
|
||||
let splitAfter = splitAtPages.shift();
|
||||
const subDocuments: PdfFile[] = [];
|
||||
|
||||
for (let i = 0; i < numberOfPages; i++) {
|
||||
console.log(i);
|
||||
if(i == splitAfter) {
|
||||
if(pagesArray.length > 0) {
|
||||
subDocuments.push(await selectPages(file, pagesArray));
|
||||
pagesArray = [];
|
||||
}
|
||||
splitAfter = splitAtPages.shift();
|
||||
}
|
||||
else { // Skip splitAtPage
|
||||
console.log("PagesArray")
|
||||
pagesArray.push(i);
|
||||
}
|
||||
}
|
||||
if(pagesArray.length > 0) {
|
||||
subDocuments.push(await selectPages(file, pagesArray));
|
||||
}
|
||||
pagesArray = [];
|
||||
|
||||
return subDocuments;
|
||||
|
||||
async function getPagesWithQRCode(file: PdfFile) {
|
||||
const pdfDoc = await file.getAsPdfJs();
|
||||
|
||||
const pagesWithQR: number[] = [];
|
||||
for (let i = 0; i < pdfDoc.numPages; i++) {
|
||||
console.log("Page:", i, "/", pdfDoc.numPages);
|
||||
const page = await pdfDoc.getPage(i + 1);
|
||||
|
||||
const images = await getImagesOnPage(page);
|
||||
console.log("images:", images);
|
||||
for (const image of images) {
|
||||
const data = await checkForQROnImage(image);
|
||||
if(data == "https://github.com/Frooodle/Stirling-PDF") {
|
||||
pagesWithQR.push(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
if(pagesWithQR.length == 0) {
|
||||
console.warn("Could not find any QR Codes in the provided PDF.")
|
||||
}
|
||||
return pagesWithQR;
|
||||
}
|
||||
|
||||
async function checkForQROnImage(image: any) {
|
||||
// TODO: There is an issue with the jsQR package (The package expects rgba but sometimes we have rgb), and the package seems to be stale, we could create a fork and fix the issue. In the meanwhile we just force rgba:
|
||||
// Check for rgb and convert to rgba
|
||||
|
||||
if(image.data.length == image.width * image.height * 3) {
|
||||
const tmpArray = new Uint8ClampedArray(image.width * image.height * 4);
|
||||
|
||||
// Iterate through the original array and add an alpha channel
|
||||
for (let i = 0, j = 0; i < image.data.length; i += 3, j += 4) {
|
||||
tmpArray[j] = image.data[i]; // Red channel
|
||||
tmpArray[j + 1] = image.data[i + 1]; // Green channel
|
||||
tmpArray[j + 2] = image.data[i + 2]; // Blue channel
|
||||
tmpArray[j + 3] = 255; // Alpha channel (fully opaque)
|
||||
}
|
||||
|
||||
image.data = tmpArray;
|
||||
}
|
||||
|
||||
const code = jsQR(image.data, image.width, image.height);
|
||||
if(code)
|
||||
return code.data;
|
||||
else
|
||||
return null;
|
||||
}
|
||||
};
|
||||
29
shared-operations/src/functions/splitPDF.ts
Normal file
29
shared-operations/src/functions/splitPDF.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
|
||||
import { PDFDocument } from 'pdf-lib';
|
||||
|
||||
import { selectPages } from "./subDocumentFunctions";
|
||||
import { PdfFile } from '../wrappers/PdfFile';
|
||||
|
||||
export async function splitPDF(file: PdfFile, splitAfterPageArray: number[]): Promise<PdfFile[]> {
|
||||
const byteFile = await file.convertToPdfLibFile();
|
||||
if (!byteFile?.pdfLib) return [];
|
||||
|
||||
const numberOfPages = byteFile.pdfLib.getPages().length;
|
||||
|
||||
let pagesArray: number[] = [];
|
||||
let splitAfter = splitAfterPageArray.shift();
|
||||
const subDocuments: PdfFile[] = [];
|
||||
|
||||
for (let i = 0; i < numberOfPages; i++) {
|
||||
if(splitAfter && i > splitAfter && pagesArray.length > 0) {
|
||||
subDocuments.push(await selectPages(byteFile, pagesArray));
|
||||
splitAfter = splitAfterPageArray.shift();
|
||||
pagesArray = [];
|
||||
}
|
||||
pagesArray.push(i);
|
||||
}
|
||||
subDocuments.push(await selectPages(byteFile, pagesArray));
|
||||
pagesArray = [];
|
||||
|
||||
return subDocuments;
|
||||
};
|
||||
227
shared-operations/src/functions/subDocumentFunctions.ts
Normal file
227
shared-operations/src/functions/subDocumentFunctions.ts
Normal file
@@ -0,0 +1,227 @@
|
||||
|
||||
import { PDFDocument } from 'pdf-lib';
|
||||
import { PdfFile, fromPdfLib } from '../wrappers/PdfFile.js';
|
||||
import { detectEmptyPages } from "./common/detectEmptyPages.js";
|
||||
|
||||
|
||||
export async function sortPagesWithPreset(file: PdfFile, sortPreset: string, fancyPageSelector: string) {
|
||||
if (sortPreset === "CUSTOM_PAGE_ORDER") {
|
||||
return rearrangePages(file, fancyPageSelector);
|
||||
}
|
||||
|
||||
const sortFunction = sorts[sortPreset];
|
||||
if (!sortFunction) {
|
||||
throw new Error("Operation not supported");
|
||||
}
|
||||
|
||||
const byteFile = await file.convertToPdfLibFile();
|
||||
if (!byteFile?.pdfLib) return byteFile;
|
||||
|
||||
const pageCount = byteFile.pdfLib.getPageCount();
|
||||
const sortIndecies = sortFunction(pageCount);
|
||||
return selectPages(byteFile, sortIndecies);
|
||||
}
|
||||
|
||||
export async function rearrangePages(file: PdfFile, fancyPageSelector: string): Promise<PdfFile> {
|
||||
const byteFile = await file.convertToPdfLibFile();
|
||||
if (!byteFile?.pdfLib) return byteFile;
|
||||
|
||||
const pagesToExtractArray = parseFancyPageSelector(fancyPageSelector, byteFile.pdfLib.getPageCount());
|
||||
const newDocument = selectPages(byteFile, pagesToExtractArray);
|
||||
return newDocument;
|
||||
};
|
||||
|
||||
export async function selectPages(file: PdfFile, pagesToExtractArray: number[]): Promise<PdfFile> {
|
||||
const byteFile = await file.convertToPdfLibFile();
|
||||
if (!byteFile?.pdfLib) return byteFile;
|
||||
|
||||
const subDocument = await PDFDocument.create();
|
||||
|
||||
// Check that array max number is not larger pdf pages number
|
||||
if(Math.max(...pagesToExtractArray) >= byteFile.pdfLib.getPageCount()) {
|
||||
throw new Error(`The PDF document only has ${byteFile.pdfLib.getPageCount()} pages and you tried to extract page ${Math.max(...pagesToExtractArray)}`);
|
||||
}
|
||||
|
||||
const copiedPages = await subDocument.copyPages(byteFile.pdfLib, pagesToExtractArray);
|
||||
|
||||
for (let i = 0; i < copiedPages.length; i++) {
|
||||
subDocument.addPage(copiedPages[i]);
|
||||
}
|
||||
|
||||
return fromPdfLib(subDocument, file.filename);
|
||||
}
|
||||
|
||||
export async function removePages(file: PdfFile, pagesToRemoveArray: number[]): Promise<PdfFile> {
|
||||
const byteFile = await file.convertToPdfLibFile();
|
||||
if (!byteFile?.pdfLib) return byteFile;
|
||||
|
||||
const pagesToExtractArray = invertSelection(pagesToRemoveArray, byteFile.pdfLib.getPageIndices())
|
||||
return selectPages(byteFile, pagesToExtractArray);
|
||||
}
|
||||
|
||||
export async function removeBlankPages(file: PdfFile, whiteThreashold: number) {
|
||||
const emptyPages = await detectEmptyPages(file, whiteThreashold);
|
||||
console.log("Empty Pages: ", emptyPages);
|
||||
return removePages(file, emptyPages);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parse the page selector string used in the 'PDF Page Organizer'
|
||||
* @param pageOrderArr
|
||||
* @param totalPages
|
||||
* @returns
|
||||
*/
|
||||
function parseFancyPageSelector(pageNumbers: string, totalPages: number): number[] {
|
||||
// Translated to JS from the original Java function
|
||||
const pageOrderArr = pageNumbers.split(",")
|
||||
const newPageOrder: number[] = [];
|
||||
|
||||
// loop through the page order array
|
||||
pageOrderArr.forEach(element => {
|
||||
if (element.toLocaleLowerCase() === "all") {
|
||||
for (var i = 0; i < totalPages; i++) {
|
||||
newPageOrder.push(i);
|
||||
}
|
||||
// As all pages are already added, no need to check further
|
||||
return;
|
||||
}
|
||||
else if (element.match("\\d*n\\+?-?\\d*|\\d*\\+?n")) {
|
||||
// Handle page order as a function
|
||||
var coefficient = 0;
|
||||
var constant = 0;
|
||||
var coefficientExists = false;
|
||||
var constantExists = false;
|
||||
|
||||
if (element.includes("n")) {
|
||||
var parts = element.split("n");
|
||||
if (!parts[0]) {
|
||||
coefficient = parseInt(parts[0]);
|
||||
coefficientExists = true;
|
||||
}
|
||||
if (parts.length > 1 && parts[1]) {
|
||||
constant = parseInt(parts[1]);
|
||||
constantExists = true;
|
||||
}
|
||||
} else if (element.includes("+")) {
|
||||
constant = parseInt(element.replace("+", ""));
|
||||
constantExists = true;
|
||||
}
|
||||
|
||||
for (var i = 1; i <= totalPages; i++) {
|
||||
var pageNum = coefficientExists ? coefficient * i : i;
|
||||
pageNum += constantExists ? constant : 0;
|
||||
|
||||
if (pageNum <= totalPages && pageNum > 0) {
|
||||
newPageOrder.push(pageNum - 1);
|
||||
}
|
||||
}
|
||||
} else if (element.includes("-")) {
|
||||
// split the range into start and end page
|
||||
const range = element.split("-");
|
||||
const start = parseInt(range[0]);
|
||||
var end = parseInt(range[1]);
|
||||
// check if the end page is greater than total pages
|
||||
if (end > totalPages) {
|
||||
end = totalPages;
|
||||
}
|
||||
// loop through the range of pages
|
||||
for (var j = start; j <= end; j++) {
|
||||
// print the current index
|
||||
newPageOrder.push(j - 1);
|
||||
}
|
||||
} else {
|
||||
// if the element is a single page
|
||||
newPageOrder.push(parseInt(element) - 1);
|
||||
}
|
||||
});
|
||||
|
||||
return newPageOrder;
|
||||
}
|
||||
|
||||
function invertSelection(selection: number[], pageIndecies: number[]): number[] {
|
||||
const pageIndeciesCopy = [...pageIndecies];
|
||||
return pageIndeciesCopy.filter(x => !selection.includes(x));
|
||||
}
|
||||
|
||||
//////////////////
|
||||
// Page Sorters //
|
||||
//////////////////
|
||||
function reverseSort(totalPages: number): number[] {
|
||||
return [...Array(totalPages).keys()].reverse();
|
||||
}
|
||||
|
||||
function duplexSort(totalPages: number): number[] {
|
||||
// Translated to JS from the original Java function
|
||||
const newPageOrder: number[] = [];
|
||||
const half = Math.floor((totalPages + 1) / 2); // This ensures proper behavior with odd numbers of pages
|
||||
|
||||
for (let i = 1; i <= half; i++) {
|
||||
newPageOrder.push(i - 1);
|
||||
if (i <= totalPages - half) {
|
||||
// Avoid going out of bounds
|
||||
newPageOrder.push(totalPages - i);
|
||||
}
|
||||
}
|
||||
|
||||
return newPageOrder;
|
||||
}
|
||||
|
||||
function bookletSort(totalPages: number): number[] {
|
||||
const newPageOrder: number[] = [];
|
||||
for (let i = 0; i < totalPages / 2; i++) {
|
||||
newPageOrder.push(i);
|
||||
newPageOrder.push(totalPages - i - 1);
|
||||
}
|
||||
return newPageOrder;
|
||||
}
|
||||
|
||||
function sideStitchBooklet(totalPages: number): number[] {
|
||||
const newPageOrder: number[] = [];
|
||||
for (let i = 0; i < (totalPages + 3) / 4; i++) {
|
||||
const begin = i * 4;
|
||||
newPageOrder.push(Math.min(begin + 3, totalPages - 1));
|
||||
newPageOrder.push(Math.min(begin, totalPages - 1));
|
||||
newPageOrder.push(Math.min(begin + 1, totalPages - 1));
|
||||
newPageOrder.push(Math.min(begin + 2, totalPages - 1));
|
||||
}
|
||||
return newPageOrder;
|
||||
}
|
||||
|
||||
function oddEvenSplit(totalPages: number): number[] {
|
||||
const newPageOrder: number[] = [];
|
||||
for (let i = 1; i <= totalPages; i += 2) {
|
||||
newPageOrder.push(i - 1);
|
||||
}
|
||||
for (let i = 2; i <= totalPages; i += 2) {
|
||||
newPageOrder.push(i - 1);
|
||||
}
|
||||
return newPageOrder;
|
||||
}
|
||||
|
||||
function removeFirst(totalPages: number): number[] {
|
||||
return [...Array(totalPages-1).keys()].map(i => i+1);
|
||||
}
|
||||
|
||||
function removeLast(totalPages: number): number[] {
|
||||
return [...Array(totalPages-1).keys()];
|
||||
}
|
||||
|
||||
function removeFirstAndLast(totalPages: number): number[] {
|
||||
return [...Array(totalPages-2).keys()].map(i => i+1);
|
||||
}
|
||||
|
||||
export type SortFunction = (totalPages: number) => number[];
|
||||
type Sorts = {
|
||||
[key: string]: SortFunction;
|
||||
};
|
||||
export const sorts: Sorts = Object.freeze({
|
||||
"REVERSE_ORDER": reverseSort,
|
||||
"DUPLEX_SORT": duplexSort,
|
||||
"BOOKLET_SORT": bookletSort,
|
||||
"SIDE_STITCH_BOOKLET_SORT": sideStitchBooklet,
|
||||
"ODD_EVEN_SPLIT": oddEvenSplit,
|
||||
"REMOVE_FIRST": removeFirst,
|
||||
"REMOVE_LAST": removeLast,
|
||||
"REMOVE_FIRST_AND_LAST": removeFirstAndLast,
|
||||
});
|
||||
56
shared-operations/src/functions/updateMetadata.ts
Normal file
56
shared-operations/src/functions/updateMetadata.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
|
||||
import { PDFDocument, ParseSpeeds } from 'pdf-lib';
|
||||
import { PdfFile, fromPdfLib } from '../wrappers/PdfFile';
|
||||
|
||||
export type Metadata = {
|
||||
deleteAll?: boolean, // Delete all metadata if set to true
|
||||
author?: string, // The author of the document
|
||||
creationDate?: Date, // The creation date of the document (format: yyyy/MM/dd HH:mm:ss)
|
||||
creator?: string, // The creator of the document
|
||||
keywords?: string, // The keywords for the document
|
||||
modificationDate?: Date, // The modification date of the document (format: yyyy/MM/dd HH:mm:ss)
|
||||
producer?: string, // The producer of the document
|
||||
subject?: string, // The subject of the document
|
||||
title?: string, // The title of the document
|
||||
//trapped?: string, // The trapped status of the document
|
||||
//allRequestParams?: {[key: string]: [key: string]}, // Map list of key and value of custom parameters. Note these must start with customKey and customValue if they are non-standard
|
||||
}
|
||||
|
||||
export async function updateMetadata(file: PdfFile, metadata: Metadata|null): Promise<PdfFile> {
|
||||
const pdfDoc = await file.getAsPdfLib();
|
||||
|
||||
if (!metadata || metadata.deleteAll) {
|
||||
pdfDoc.setAuthor("");
|
||||
pdfDoc.setCreationDate(new Date(0))
|
||||
pdfDoc.setCreator("")
|
||||
pdfDoc.setKeywords([])
|
||||
pdfDoc.setModificationDate(new Date(0))
|
||||
pdfDoc.setProducer("")
|
||||
pdfDoc.setSubject("")
|
||||
pdfDoc.setTitle("")
|
||||
}
|
||||
if (!metadata) {
|
||||
return fromPdfLib(pdfDoc, file.filename);
|
||||
}
|
||||
|
||||
if(metadata.author)
|
||||
pdfDoc.setAuthor(metadata.author);
|
||||
if(metadata.creationDate)
|
||||
pdfDoc.setCreationDate(metadata.creationDate)
|
||||
if(metadata.creator)
|
||||
pdfDoc.setCreator(metadata.creator)
|
||||
if(metadata.keywords)
|
||||
pdfDoc.setKeywords(metadata.keywords.split(","))
|
||||
if(metadata.modificationDate)
|
||||
pdfDoc.setModificationDate(metadata.modificationDate)
|
||||
if(metadata.producer)
|
||||
pdfDoc.setProducer(metadata.producer)
|
||||
if(metadata.subject)
|
||||
pdfDoc.setSubject(metadata.subject)
|
||||
if(metadata.title)
|
||||
pdfDoc.setTitle(metadata.title)
|
||||
|
||||
// TODO add trapped and custom metadata. May need another library
|
||||
|
||||
return fromPdfLib(pdfDoc, file.filename);
|
||||
};
|
||||
24
shared-operations/src/index.ts
Normal file
24
shared-operations/src/index.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
|
||||
// Import injected libraries here!
|
||||
|
||||
import { sortPagesWithPreset, rearrangePages, selectPages, removePages, removeBlankPages } from "./functions/subDocumentFunctions";
|
||||
import { impose } from "./functions/impose";
|
||||
import { mergePDFs } from './functions/mergePDFs';
|
||||
import { rotatePages } from './functions/rotatePages';
|
||||
import { scaleContent} from './functions/scaleContent';
|
||||
import { scalePage } from './functions/scalePage';
|
||||
import { splitOn } from './functions/splitOn';
|
||||
import { splitPDF } from './functions/splitPDF';
|
||||
import { updateMetadata } from "./functions/updateMetadata";
|
||||
|
||||
export default {
|
||||
sortPagesWithPreset, rearrangePages, selectPages, removePages, removeBlankPages,
|
||||
impose,
|
||||
mergePDFs,
|
||||
rotatePages,
|
||||
scaleContent,
|
||||
scalePage,
|
||||
splitOn,
|
||||
splitPDF,
|
||||
updateMetadata,
|
||||
}
|
||||
8
shared-operations/src/wasm/browserfs.min.js
vendored
Normal file
8
shared-operations/src/wasm/browserfs.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
104
shared-operations/src/wasm/pdfcpu/pdfcpu-wrapper-browser.js
Normal file
104
shared-operations/src/wasm/pdfcpu/pdfcpu-wrapper-browser.js
Normal file
@@ -0,0 +1,104 @@
|
||||
// imports browserfs via index.html script-tag
|
||||
|
||||
let wasmLocation = "/wasm/pdfcpu/";
|
||||
|
||||
let fs;
|
||||
let Buffer;
|
||||
|
||||
// TODO: This can later be defered to load asynchronously
|
||||
configureFs();
|
||||
loadWasm();
|
||||
|
||||
function configureFs() {
|
||||
BrowserFS.configure(
|
||||
{
|
||||
fs: "InMemory",
|
||||
},
|
||||
function (e) {
|
||||
if (e) {
|
||||
// An error happened!
|
||||
throw e;
|
||||
}
|
||||
fs = BrowserFS.BFSRequire("fs");
|
||||
Buffer = BrowserFS.BFSRequire("buffer").Buffer;
|
||||
|
||||
window.fs = fs;
|
||||
window.Buffer = Buffer;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
function loadWasm() {
|
||||
const script = document.createElement("script");
|
||||
script.src = wasmLocation + "/wasm_exec.js";
|
||||
script.async = true;
|
||||
document.body.appendChild(script);
|
||||
}
|
||||
|
||||
const runWasm = async (param) => {
|
||||
if (window.cachedWasmResponse === undefined) {
|
||||
const response = await fetch(wasmLocation + "/pdfcpu.wasm");
|
||||
const buffer = await response.arrayBuffer();
|
||||
window.cachedWasmResponse = buffer;
|
||||
window.go = new Go();
|
||||
}
|
||||
const { instance } = await WebAssembly.instantiate(
|
||||
window.cachedWasmResponse,
|
||||
window.go.importObject
|
||||
);
|
||||
window.go.argv = param;
|
||||
await window.go.run(instance);
|
||||
return window.go.exitCode;
|
||||
};
|
||||
|
||||
async function loadFileAsync(data) {
|
||||
console.log(`Writing file to MemoryFS`);
|
||||
await fs.writeFile(`/input.pdf`, data);
|
||||
console.log(`Write done. Validating...`);
|
||||
let exitcode = await runWasm([
|
||||
"pdfcpu.wasm",
|
||||
"validate",
|
||||
"-c",
|
||||
"disable",
|
||||
`/input.pdf`,
|
||||
]);
|
||||
|
||||
if (exitcode !== 0)
|
||||
throw new Error("There was an error validating your PDFs");
|
||||
|
||||
console.log(`File is Valid`);
|
||||
}
|
||||
|
||||
export async function impose(snapshot, nup, format) {
|
||||
|
||||
};
|
||||
|
||||
export async function oneToOne(wasmArray, snapshot) {
|
||||
await loadFileAsync(Buffer.from(snapshot));
|
||||
|
||||
console.log("Nuping File");
|
||||
let exitcode = await runWasm(wasmArray);
|
||||
|
||||
if (exitcode !== 0) {
|
||||
console.error("There was an error nuping your PDFs");
|
||||
return;
|
||||
}
|
||||
|
||||
await fs.unlink("input.pdf");
|
||||
const contents = fs.readFileSync("output.pdf");
|
||||
fs.unlink("output.pdf");
|
||||
console.log("Your File ist Ready!");
|
||||
return new Uint8Array(contents);
|
||||
}
|
||||
|
||||
export async function manyToOne() {
|
||||
//TODO: Do this of neccesary for some operations
|
||||
}
|
||||
|
||||
export async function oneToMany() {
|
||||
//TODO: Do this of neccesary for some operations
|
||||
}
|
||||
|
||||
export async function manyToMany() {
|
||||
//TODO: Do this of neccesary for some operations
|
||||
}
|
||||
145
shared-operations/src/wasm/pdfcpu/pdfcpu-wrapper-node.js
Normal file
145
shared-operations/src/wasm/pdfcpu/pdfcpu-wrapper-node.js
Normal file
@@ -0,0 +1,145 @@
|
||||
import { WasmFs } from '@wasmer/wasmfs';
|
||||
import path from "path";
|
||||
|
||||
let webWasmLocation = "/wasm/";
|
||||
let nodeWasmLocation = "./public/wasm/";
|
||||
|
||||
let fs;
|
||||
const wasmfs = new WasmFs();
|
||||
|
||||
// TODO: This can later be defered to load asynchronously
|
||||
(async () => {
|
||||
await loadWasm();
|
||||
await configureFs();
|
||||
})();
|
||||
|
||||
async function configureFs() {
|
||||
// Can't use BrowserFS: https://github.com/jvilk/BrowserFS/issues/271
|
||||
fs = wasmfs.fs;
|
||||
global.fs = fs;
|
||||
|
||||
console.log("InMemoryFs configured");
|
||||
}
|
||||
|
||||
async function loadWasm() {
|
||||
global.crypto = (await import("crypto")).webcrypto; // wasm dependecy
|
||||
await import("./wasm_exec.js");
|
||||
}
|
||||
|
||||
const runWasm = async (param) => {
|
||||
if (global.cachedWasmResponse === undefined) {
|
||||
const buffer = (await import("fs")).readFileSync(nodeWasmLocation + "/pdfcpu.wasm");
|
||||
global.cachedWasmResponse = buffer;
|
||||
global.go = new Go();
|
||||
}
|
||||
const { instance } = await WebAssembly.instantiate(
|
||||
global.cachedWasmResponse,
|
||||
global.go.importObject
|
||||
);
|
||||
global.go.argv = param;
|
||||
await global.go.run(instance);
|
||||
return global.go.exitCode;
|
||||
};
|
||||
|
||||
async function loadFileAsync(data) {
|
||||
console.log(`Writing file to Disk`);
|
||||
fs.writeFileSync(`input.pdf`, data);
|
||||
console.log(`Write done. Validating...`);
|
||||
let exitcode = await runWasm([
|
||||
"pdfcpu.wasm",
|
||||
"validate",
|
||||
"-c",
|
||||
"disable",
|
||||
`input.pdf`,
|
||||
]);
|
||||
if (exitcode !== 0)
|
||||
throw new Error("There was an error validating your PDFs");
|
||||
|
||||
// // Get logs of command
|
||||
// wasmfs.getStdOut().then(response => {
|
||||
// console.log(response);
|
||||
// });
|
||||
|
||||
console.log(`File is Valid`);
|
||||
}
|
||||
|
||||
export async function oneToOne(wasmArray, snapshot) {
|
||||
await loadFileAsync(Buffer.from(snapshot));
|
||||
|
||||
console.log("Nuping File");
|
||||
|
||||
let exitcode = await runWasm(wasmArray);
|
||||
if (exitcode !== 0) {
|
||||
console.error("There was an error nuping your PDFs");
|
||||
return;
|
||||
}
|
||||
console.log("Nuping Done");
|
||||
|
||||
await checkExistsWithTimeout("/output.pdf", 1000);
|
||||
console.log("Write started...");
|
||||
|
||||
|
||||
// TODO: Make this more elegant, this waits for the write to finish.
|
||||
// Maybe replace wasmfs with https://github.com/streamich/memfs
|
||||
let fileSize;
|
||||
while (true) {
|
||||
fileSize = fs.statSync("/output.pdf").size;
|
||||
await new Promise((resolve, reject) => {
|
||||
setTimeout(() => {
|
||||
resolve();
|
||||
}, 50);
|
||||
});
|
||||
if(fileSize > 0 && fileSize == fs.statSync("/output.pdf").size) // Wait for file Size not changing anymore.
|
||||
break;
|
||||
}
|
||||
|
||||
console.log("Could be done?");
|
||||
|
||||
fs.unlinkSync("input.pdf");
|
||||
|
||||
const data = fs.readFileSync("/output.pdf");
|
||||
if(data.length == 0) {
|
||||
throw Error("File Size 0 that should not happen. The write probably didn't finish in time.");
|
||||
}
|
||||
fs.unlinkSync("output.pdf");
|
||||
console.log("Your File ist Ready!");
|
||||
return new Uint8Array(data);
|
||||
}
|
||||
|
||||
export async function manyToOne() {
|
||||
//TODO: Do this if necessary for some pdfcpu operations
|
||||
}
|
||||
|
||||
export async function oneToMany() {
|
||||
//TODO: Do this if necessary for some pdfcpu operations
|
||||
}
|
||||
|
||||
export async function manyToMany() {
|
||||
//TODO: Do this if necessary for some pdfcpu operations
|
||||
}
|
||||
|
||||
// THX: https://stackoverflow.com/questions/26165725/nodejs-check-file-exists-if-not-wait-till-it-exist
|
||||
function checkExistsWithTimeout(filePath, timeout) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
|
||||
var timer = setTimeout(function () {
|
||||
watcher.close();
|
||||
reject(new Error('File did not exists and was not created during the timeout.'));
|
||||
}, timeout);
|
||||
|
||||
fs.access(filePath, fs.constants.R_OK, function (err) {
|
||||
if (!err) {
|
||||
clearTimeout(timer);
|
||||
watcher.close();
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
|
||||
var dir = path.dirname(filePath);
|
||||
var watcher = fs.watch(dir, function (eventType, filename) {
|
||||
clearTimeout(timer);
|
||||
watcher.close();
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
BIN
shared-operations/src/wasm/pdfcpu/pdfcpu.wasm
Normal file
BIN
shared-operations/src/wasm/pdfcpu/pdfcpu.wasm
Normal file
Binary file not shown.
872
shared-operations/src/wasm/pdfcpu/wasm_exec.js
Normal file
872
shared-operations/src/wasm/pdfcpu/wasm_exec.js
Normal file
@@ -0,0 +1,872 @@
|
||||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
(() => {
|
||||
// Map multiple JavaScript environments to a single common API,
|
||||
// preferring web standards over Node.js API.
|
||||
//
|
||||
// Environments considered:
|
||||
// - Browsers
|
||||
// - Node.js
|
||||
// - Electron
|
||||
// - Parcel
|
||||
// - Webpack
|
||||
|
||||
console.log("imported")
|
||||
if (typeof global !== "undefined") {
|
||||
// global already exists
|
||||
} else if (typeof window !== "undefined") {
|
||||
window.global = window;
|
||||
} else if (typeof self !== "undefined") {
|
||||
self.global = self;
|
||||
} else {
|
||||
throw new Error("cannot export Go (neither global, window nor self is defined)");
|
||||
}
|
||||
|
||||
let logFS = false
|
||||
var handler = {
|
||||
get: function (target, property) {
|
||||
if (property in target && target[property] instanceof Function) {
|
||||
return function () {
|
||||
if (logFS) {
|
||||
console.log(property, 'called', arguments);
|
||||
}
|
||||
// 将callback替换
|
||||
if (arguments[arguments.length - 1] instanceof Function) {
|
||||
var origCB = arguments[arguments.length - 1];
|
||||
var newCB = function () {
|
||||
if (logFS) {
|
||||
console.log('callback for', property, 'get called with args:', arguments);
|
||||
}
|
||||
return Reflect.apply(origCB, arguments.callee, arguments);
|
||||
}
|
||||
arguments[arguments.length - 1] = newCB;
|
||||
}
|
||||
return Reflect.apply(target[property], target, arguments);
|
||||
}
|
||||
} else {
|
||||
return target[property]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!global.require && typeof require !== "undefined") {
|
||||
global.require = require;
|
||||
}
|
||||
|
||||
|
||||
if (!global.fs && global.require) {
|
||||
|
||||
//const fs = require("fs");
|
||||
if (typeof fs === "object" && fs !== null && Object.keys(fs).length !== 0) {
|
||||
global.fs = fs;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const enosys = () => {
|
||||
const err = new Error("not implemented");
|
||||
err.code = "ENOSYS";
|
||||
return err;
|
||||
};
|
||||
|
||||
if (!global.fs) {
|
||||
let outputBuf = "";
|
||||
global.fs = {
|
||||
constants: {
|
||||
O_WRONLY: -1,
|
||||
O_RDWR: -1,
|
||||
O_CREAT: -1,
|
||||
O_TRUNC: -1,
|
||||
O_APPEND: -1,
|
||||
O_EXCL: -1
|
||||
}, // unused
|
||||
writeSync(fd, buf) {
|
||||
outputBuf += decoder.decode(buf);
|
||||
const nl = outputBuf.lastIndexOf("\n");
|
||||
if (nl != -1) {
|
||||
console.log(outputBuf.substr(0, nl));
|
||||
outputBuf = outputBuf.substr(nl + 1);
|
||||
}
|
||||
return buf.length;
|
||||
},
|
||||
write(fd, buf, offset, length, position, callback) {
|
||||
if (offset !== 0 || length !== buf.length || position !== null) {
|
||||
callback(enosys());
|
||||
return;
|
||||
}
|
||||
const n = this.writeSync(fd, buf);
|
||||
callback(null, n);
|
||||
},
|
||||
chmod(path, mode, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
chown(path, uid, gid, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
close(fd, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
fchmod(fd, mode, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
fchown(fd, uid, gid, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
fstat(fd, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
fsync(fd, callback) {
|
||||
callback(null);
|
||||
},
|
||||
ftruncate(fd, length, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
lchown(path, uid, gid, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
link(path, link, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
lstat(path, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
mkdir(path, perm, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
open(path, flags, mode, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
read(fd, buffer, offset, length, position, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
readdir(path, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
readlink(path, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
rename(from, to, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
rmdir(path, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
stat(path, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
symlink(path, link, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
truncate(path, length, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
unlink(path, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
utimes(path, atime, mtime, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (!global.process) {
|
||||
global.process = {
|
||||
getuid() {
|
||||
return -1;
|
||||
},
|
||||
getgid() {
|
||||
return -1;
|
||||
},
|
||||
geteuid() {
|
||||
return -1;
|
||||
},
|
||||
getegid() {
|
||||
return -1;
|
||||
},
|
||||
getgroups() {
|
||||
throw enosys();
|
||||
},
|
||||
pid: -1,
|
||||
ppid: -1,
|
||||
umask() {
|
||||
throw enosys();
|
||||
},
|
||||
cwd() {
|
||||
throw enosys();
|
||||
},
|
||||
chdir() {
|
||||
throw enosys();
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (!global.crypto && global.require) {
|
||||
const nodeCrypto = require("crypto");
|
||||
global.crypto = {
|
||||
getRandomValues(b) {
|
||||
nodeCrypto.randomFillSync(b);
|
||||
},
|
||||
};
|
||||
}
|
||||
if (!global.crypto) {
|
||||
throw new Error("global.crypto is not available, polyfill required (getRandomValues only)");
|
||||
}
|
||||
|
||||
if (!global.performance) {
|
||||
global.performance = {
|
||||
now() {
|
||||
const [sec, nsec] = process.hrtime();
|
||||
return sec * 1000 + nsec / 1000000;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (!global.TextEncoder && global.require) {
|
||||
global.TextEncoder = require("util").TextEncoder;
|
||||
}
|
||||
if (!global.TextEncoder) {
|
||||
throw new Error("global.TextEncoder is not available, polyfill required");
|
||||
}
|
||||
|
||||
if (!global.TextDecoder && global.require) {
|
||||
global.TextDecoder = require("util").TextDecoder;
|
||||
}
|
||||
if (!global.TextDecoder) {
|
||||
throw new Error("global.TextDecoder is not available, polyfill required");
|
||||
}
|
||||
|
||||
|
||||
const isNodeJS = global.process && global.process.title === "node";
|
||||
|
||||
if (!isNodeJS) {
|
||||
// console.log("ini browser fs")
|
||||
// var myfs = global.BrowserFS.BFSRequire('fs');
|
||||
// global.Buffer = global.BrowserFS.BFSRequire('buffer').Buffer;
|
||||
// global.fs = myfs;
|
||||
|
||||
global.fs.constants = {
|
||||
O_RDONLY: 0,
|
||||
O_WRONLY: 1,
|
||||
O_RDWR: 2,
|
||||
O_CREAT: 64,
|
||||
O_CREATE: 64,
|
||||
O_EXCL: 128,
|
||||
O_NOCTTY: 256,
|
||||
O_TRUNC: 512,
|
||||
O_APPEND: 1024,
|
||||
O_DIRECTORY: 65536,
|
||||
O_NOATIME: 262144,
|
||||
O_NOFOLLOW: 131072,
|
||||
O_SYNC: 1052672,
|
||||
O_DIRECT: 16384,
|
||||
O_NONBLOCK: 2048,
|
||||
};
|
||||
|
||||
let outputBuf = "";
|
||||
|
||||
global.fs.writeSyncOriginal = global.fs.writeSync
|
||||
global.fs.writeSync = function (fd, buf) {
|
||||
if (fd === 1 || fd === 2) {
|
||||
outputBuf += decoder.decode(buf);
|
||||
const nl = outputBuf.lastIndexOf("\n");
|
||||
if (nl != -1) {
|
||||
console.log(outputBuf.substr(0, nl));
|
||||
outputBuf = outputBuf.substr(nl + 1);
|
||||
}
|
||||
return buf.length;
|
||||
} else {
|
||||
return global.fs.writeSyncOriginal(...arguments);
|
||||
}
|
||||
};
|
||||
|
||||
global.fs.writeOriginal = global.fs.write
|
||||
global.fs.write = function (fd, buf, offset, length, position, callback) {
|
||||
// (corresponding to STDOUT/STDERR)
|
||||
if (fd === 1 || fd === 2) {
|
||||
if (offset !== 0 || length !== buf.length || position !== null) {
|
||||
throw new Error("not implemented");
|
||||
}
|
||||
const n = this.writeSync(fd, buf);
|
||||
callback(null, n, buf);
|
||||
} else {
|
||||
// buf: read buf first
|
||||
arguments[1] = global.Buffer.from(arguments[1]);
|
||||
return global.fs.writeOriginal(...arguments);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
global.fs.openOriginal = global.fs.open
|
||||
global.fs.open = function (path, flags, mode, callback) {
|
||||
var myflags = 'r';
|
||||
var O = global.fs.constants;
|
||||
|
||||
// Convert numeric flags to string flags
|
||||
// FIXME: maybe wrong...
|
||||
console.log("open dir?", path, 'flag', flags, myflags)
|
||||
if (flags & O.O_WRONLY) { // 'w'
|
||||
myflags = 'w';
|
||||
if (flags & O.O_EXCL) {
|
||||
myflags = 'wx';
|
||||
}
|
||||
} else if (flags & O.O_RDWR) { // 'r+' or 'w+'
|
||||
if (flags & O.O_CREAT && flags & O.O_TRUNC) { // w+
|
||||
if (flags & O.O_EXCL) {
|
||||
myflags = 'wx+';
|
||||
} else {
|
||||
myflags = 'w+';
|
||||
}
|
||||
} else { // r+
|
||||
myflags = 'r+';
|
||||
}
|
||||
} else if (flags & O.O_APPEND) { // 'a'
|
||||
console.log("append error")
|
||||
throw new Error("Not implmented");
|
||||
} else {
|
||||
// 打开文件
|
||||
myflags = 'r+';
|
||||
console.log("open dir?", path, 'flag', flags, myflags)
|
||||
}
|
||||
|
||||
|
||||
return global.fs.openOriginal(path, myflags, mode, callback);
|
||||
};
|
||||
|
||||
global.fs.fstatOriginal = global.fs.fstat;
|
||||
global.fs.fstat = function (fd, callback) {
|
||||
return global.fs.fstatOriginal(fd, function () {
|
||||
var retStat = arguments[1];
|
||||
delete retStat['fileData'];
|
||||
retStat.atimeMs = retStat.atime.getTime();
|
||||
retStat.mtimeMs = retStat.mtime.getTime();
|
||||
retStat.ctimeMs = retStat.ctime.getTime();
|
||||
retStat.birthtimeMs = retStat.birthtime.getTime();
|
||||
return callback(arguments[0], retStat);
|
||||
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
global.fs.closeOriginal = global.fs.close;
|
||||
global.fs.close = function (fd, callback) {
|
||||
return global.fs.closeOriginal(fd, function () {
|
||||
if (typeof arguments[0] === 'undefined') arguments[0] = null;
|
||||
return callback(...arguments);
|
||||
});
|
||||
}
|
||||
|
||||
// global.fs.renameOriginal = global.fs.rename
|
||||
// global.fs.rename = function (from, to, callback) {
|
||||
// console.log("rename a0", arguments[0])
|
||||
// global.fs.renameOriginal(from, to);
|
||||
// callback(arguments[0])
|
||||
// }
|
||||
|
||||
// global.fs.renameSyncOriginal = global.fs.renameSync
|
||||
// global.fs.renameSync = function(fd, options) {
|
||||
// console.log("Sync")
|
||||
// }
|
||||
|
||||
|
||||
global.fs = new Proxy(global.fs, handler);
|
||||
}
|
||||
|
||||
// End of polyfills for common API.
|
||||
|
||||
const encoder = new TextEncoder("utf-8");
|
||||
const decoder = new TextDecoder("utf-8");
|
||||
|
||||
global.Go = class {
|
||||
constructor() {
|
||||
this.argv = ["js"];
|
||||
this.env = {};
|
||||
this.exit = (code) => {
|
||||
this.exitCode = code;
|
||||
if (code !== 0) {
|
||||
console.warn("exit code:", code);
|
||||
}
|
||||
};
|
||||
this._exitPromise = new Promise((resolve) => {
|
||||
this._resolveExitPromise = resolve;
|
||||
});
|
||||
this._pendingEvent = null;
|
||||
this._scheduledTimeouts = new Map();
|
||||
this._nextCallbackTimeoutID = 1;
|
||||
|
||||
const setInt64 = (addr, v) => {
|
||||
this.mem.setUint32(addr + 0, v, true);
|
||||
this.mem.setUint32(addr + 4, Math.floor(v / 4294967296), true);
|
||||
}
|
||||
|
||||
const getInt64 = (addr) => {
|
||||
const low = this.mem.getUint32(addr + 0, true);
|
||||
const high = this.mem.getInt32(addr + 4, true);
|
||||
return low + high * 4294967296;
|
||||
}
|
||||
|
||||
const loadValue = (addr) => {
|
||||
const f = this.mem.getFloat64(addr, true);
|
||||
if (f === 0) {
|
||||
return undefined;
|
||||
}
|
||||
if (!isNaN(f)) {
|
||||
return f;
|
||||
}
|
||||
|
||||
const id = this.mem.getUint32(addr, true);
|
||||
return this._values[id];
|
||||
}
|
||||
|
||||
const storeValue = (addr, v) => {
|
||||
const nanHead = 0x7FF80000;
|
||||
|
||||
if (typeof v === "number" && v !== 0) {
|
||||
if (isNaN(v)) {
|
||||
this.mem.setUint32(addr + 4, nanHead, true);
|
||||
this.mem.setUint32(addr, 0, true);
|
||||
return;
|
||||
}
|
||||
this.mem.setFloat64(addr, v, true);
|
||||
return;
|
||||
}
|
||||
|
||||
if (v === undefined) {
|
||||
this.mem.setFloat64(addr, 0, true);
|
||||
return;
|
||||
}
|
||||
|
||||
let id = this._ids.get(v);
|
||||
if (id === undefined) {
|
||||
id = this._idPool.pop();
|
||||
if (id === undefined) {
|
||||
id = this._values.length;
|
||||
}
|
||||
this._values[id] = v;
|
||||
this._goRefCounts[id] = 0;
|
||||
this._ids.set(v, id);
|
||||
}
|
||||
this._goRefCounts[id]++;
|
||||
let typeFlag = 0;
|
||||
switch (typeof v) {
|
||||
case "object":
|
||||
if (v !== null) {
|
||||
typeFlag = 1;
|
||||
}
|
||||
break;
|
||||
case "string":
|
||||
typeFlag = 2;
|
||||
break;
|
||||
case "symbol":
|
||||
typeFlag = 3;
|
||||
break;
|
||||
case "function":
|
||||
typeFlag = 4;
|
||||
break;
|
||||
}
|
||||
this.mem.setUint32(addr + 4, nanHead | typeFlag, true);
|
||||
this.mem.setUint32(addr, id, true);
|
||||
}
|
||||
|
||||
const loadSlice = (addr) => {
|
||||
const array = getInt64(addr + 0);
|
||||
const len = getInt64(addr + 8);
|
||||
return new Uint8Array(this._inst.exports.mem.buffer, array, len);
|
||||
}
|
||||
|
||||
const loadSliceOfValues = (addr) => {
|
||||
const array = getInt64(addr + 0);
|
||||
const len = getInt64(addr + 8);
|
||||
const a = new Array(len);
|
||||
for (let i = 0; i < len; i++) {
|
||||
a[i] = loadValue(array + i * 8);
|
||||
}
|
||||
return a;
|
||||
}
|
||||
|
||||
const loadString = (addr) => {
|
||||
const saddr = getInt64(addr + 0);
|
||||
const len = getInt64(addr + 8);
|
||||
return decoder.decode(new DataView(this._inst.exports.mem.buffer, saddr, len));
|
||||
}
|
||||
|
||||
const timeOrigin = Date.now() - performance.now();
|
||||
this.importObject = {
|
||||
go: {
|
||||
// Go's SP does not change as long as no Go code is running. Some operations (e.g. calls, getters and setters)
|
||||
// may synchronously trigger a Go event handler. This makes Go code get executed in the middle of the imported
|
||||
// function. A goroutine can switch to a new stack if the current stack is too small (see morestack function).
|
||||
// This changes the SP, thus we have to update the SP used by the imported function.
|
||||
|
||||
// func wasmExit(code int32)
|
||||
"runtime.wasmExit": (sp) => {
|
||||
sp >>>= 0;
|
||||
const code = this.mem.getInt32(sp + 8, true);
|
||||
this.exited = true;
|
||||
delete this._inst;
|
||||
delete this._values;
|
||||
delete this._goRefCounts;
|
||||
delete this._ids;
|
||||
delete this._idPool;
|
||||
this.exit(code);
|
||||
},
|
||||
|
||||
// func wasmWrite(fd uintptr, p unsafe.Pointer, n int32)
|
||||
"runtime.wasmWrite": (sp) => {
|
||||
sp >>>= 0;
|
||||
const fd = getInt64(sp + 8);
|
||||
const p = getInt64(sp + 16);
|
||||
const n = this.mem.getInt32(sp + 24, true);
|
||||
fs.writeSync(fd, new Uint8Array(this._inst.exports.mem.buffer, p, n));
|
||||
},
|
||||
|
||||
// func resetMemoryDataView()
|
||||
"runtime.resetMemoryDataView": (sp) => {
|
||||
sp >>>= 0;
|
||||
this.mem = new DataView(this._inst.exports.mem.buffer);
|
||||
},
|
||||
|
||||
// func nanotime1() int64
|
||||
"runtime.nanotime1": (sp) => {
|
||||
sp >>>= 0;
|
||||
setInt64(sp + 8, (timeOrigin + performance.now()) * 1000000);
|
||||
},
|
||||
|
||||
// func walltime1() (sec int64, nsec int32)
|
||||
"runtime.walltime1": (sp) => {
|
||||
sp >>>= 0;
|
||||
const msec = (new Date).getTime();
|
||||
setInt64(sp + 8, msec / 1000);
|
||||
this.mem.setInt32(sp + 16, (msec % 1000) * 1000000, true);
|
||||
},
|
||||
|
||||
// func scheduleTimeoutEvent(delay int64) int32
|
||||
"runtime.scheduleTimeoutEvent": (sp) => {
|
||||
sp >>>= 0;
|
||||
const id = this._nextCallbackTimeoutID;
|
||||
this._nextCallbackTimeoutID++;
|
||||
this._scheduledTimeouts.set(id, setTimeout(
|
||||
() => {
|
||||
this._resume();
|
||||
while (this._scheduledTimeouts.has(id)) {
|
||||
// for some reason Go failed to register the timeout event, log and try again
|
||||
// (temporary workaround for https://github.com/golang/go/issues/28975)
|
||||
console.warn("scheduleTimeoutEvent: missed timeout event");
|
||||
this._resume();
|
||||
}
|
||||
},
|
||||
getInt64(sp + 8) + 1, // setTimeout has been seen to fire up to 1 millisecond early
|
||||
));
|
||||
this.mem.setInt32(sp + 16, id, true);
|
||||
},
|
||||
|
||||
// func clearTimeoutEvent(id int32)
|
||||
"runtime.clearTimeoutEvent": (sp) => {
|
||||
sp >>>= 0;
|
||||
const id = this.mem.getInt32(sp + 8, true);
|
||||
clearTimeout(this._scheduledTimeouts.get(id));
|
||||
this._scheduledTimeouts.delete(id);
|
||||
},
|
||||
|
||||
// func getRandomData(r []byte)
|
||||
"runtime.getRandomData": (sp) => {
|
||||
sp >>>= 0;
|
||||
crypto.getRandomValues(loadSlice(sp + 8));
|
||||
},
|
||||
|
||||
// func finalizeRef(v ref)
|
||||
"syscall/js.finalizeRef": (sp) => {
|
||||
sp >>>= 0;
|
||||
const id = this.mem.getUint32(sp + 8, true);
|
||||
this._goRefCounts[id]--;
|
||||
if (this._goRefCounts[id] === 0) {
|
||||
const v = this._values[id];
|
||||
this._values[id] = null;
|
||||
this._ids.delete(v);
|
||||
this._idPool.push(id);
|
||||
}
|
||||
},
|
||||
|
||||
// func stringVal(value string) ref
|
||||
"syscall/js.stringVal": (sp) => {
|
||||
sp >>>= 0;
|
||||
storeValue(sp + 24, loadString(sp + 8));
|
||||
},
|
||||
|
||||
// func valueGet(v ref, p string) ref
|
||||
"syscall/js.valueGet": (sp) => {
|
||||
sp >>>= 0;
|
||||
const result = Reflect.get(loadValue(sp + 8), loadString(sp + 16));
|
||||
sp = this._inst.exports.getsp() >>> 0; // see comment above
|
||||
storeValue(sp + 32, result);
|
||||
},
|
||||
|
||||
// func valueSet(v ref, p string, x ref)
|
||||
"syscall/js.valueSet": (sp) => {
|
||||
sp >>>= 0;
|
||||
Reflect.set(loadValue(sp + 8), loadString(sp + 16), loadValue(sp + 32));
|
||||
},
|
||||
|
||||
// func valueDelete(v ref, p string)
|
||||
"syscall/js.valueDelete": (sp) => {
|
||||
sp >>>= 0;
|
||||
Reflect.deleteProperty(loadValue(sp + 8), loadString(sp + 16));
|
||||
},
|
||||
|
||||
// func valueIndex(v ref, i int) ref
|
||||
"syscall/js.valueIndex": (sp) => {
|
||||
sp >>>= 0;
|
||||
storeValue(sp + 24, Reflect.get(loadValue(sp + 8), getInt64(sp + 16)));
|
||||
},
|
||||
|
||||
// valueSetIndex(v ref, i int, x ref)
|
||||
"syscall/js.valueSetIndex": (sp) => {
|
||||
sp >>>= 0;
|
||||
Reflect.set(loadValue(sp + 8), getInt64(sp + 16), loadValue(sp + 24));
|
||||
},
|
||||
|
||||
// func valueCall(v ref, m string, args []ref) (ref, bool)
|
||||
"syscall/js.valueCall": (sp) => {
|
||||
sp >>>= 0;
|
||||
try {
|
||||
const v = loadValue(sp + 8);
|
||||
const m = Reflect.get(v, loadString(sp + 16));
|
||||
const args = loadSliceOfValues(sp + 32);
|
||||
const result = Reflect.apply(m, v, args);
|
||||
sp = this._inst.exports.getsp() >>> 0; // see comment above
|
||||
storeValue(sp + 56, result);
|
||||
this.mem.setUint8(sp + 64, 1);
|
||||
} catch (err) {
|
||||
storeValue(sp + 56, err);
|
||||
this.mem.setUint8(sp + 64, 0);
|
||||
}
|
||||
},
|
||||
|
||||
// func valueInvoke(v ref, args []ref) (ref, bool)
|
||||
"syscall/js.valueInvoke": (sp) => {
|
||||
sp >>>= 0;
|
||||
try {
|
||||
const v = loadValue(sp + 8);
|
||||
const args = loadSliceOfValues(sp + 16);
|
||||
const result = Reflect.apply(v, undefined, args);
|
||||
sp = this._inst.exports.getsp() >>> 0; // see comment above
|
||||
storeValue(sp + 40, result);
|
||||
this.mem.setUint8(sp + 48, 1);
|
||||
} catch (err) {
|
||||
storeValue(sp + 40, err);
|
||||
this.mem.setUint8(sp + 48, 0);
|
||||
}
|
||||
},
|
||||
|
||||
// func valueNew(v ref, args []ref) (ref, bool)
|
||||
"syscall/js.valueNew": (sp) => {
|
||||
sp >>>= 0;
|
||||
try {
|
||||
const v = loadValue(sp + 8);
|
||||
const args = loadSliceOfValues(sp + 16);
|
||||
const result = Reflect.construct(v, args);
|
||||
sp = this._inst.exports.getsp() >>> 0; // see comment above
|
||||
storeValue(sp + 40, result);
|
||||
this.mem.setUint8(sp + 48, 1);
|
||||
} catch (err) {
|
||||
storeValue(sp + 40, err);
|
||||
this.mem.setUint8(sp + 48, 0);
|
||||
}
|
||||
},
|
||||
|
||||
// func valueLength(v ref) int
|
||||
"syscall/js.valueLength": (sp) => {
|
||||
sp >>>= 0;
|
||||
setInt64(sp + 16, parseInt(loadValue(sp + 8).length));
|
||||
},
|
||||
|
||||
// valuePrepareString(v ref) (ref, int)
|
||||
"syscall/js.valuePrepareString": (sp) => {
|
||||
sp >>>= 0;
|
||||
const str = encoder.encode(String(loadValue(sp + 8)));
|
||||
storeValue(sp + 16, str);
|
||||
setInt64(sp + 24, str.length);
|
||||
},
|
||||
|
||||
// valueLoadString(v ref, b []byte)
|
||||
"syscall/js.valueLoadString": (sp) => {
|
||||
sp >>>= 0;
|
||||
const str = loadValue(sp + 8);
|
||||
loadSlice(sp + 16).set(str);
|
||||
},
|
||||
|
||||
// func valueInstanceOf(v ref, t ref) bool
|
||||
"syscall/js.valueInstanceOf": (sp) => {
|
||||
sp >>>= 0;
|
||||
this.mem.setUint8(sp + 24, (loadValue(sp + 8) instanceof loadValue(sp + 16)) ? 1 : 0);
|
||||
},
|
||||
|
||||
// func copyBytesToGo(dst []byte, src ref) (int, bool)
|
||||
"syscall/js.copyBytesToGo": (sp) => {
|
||||
sp >>>= 0;
|
||||
const dst = loadSlice(sp + 8);
|
||||
const src = loadValue(sp + 32);
|
||||
if (!(src instanceof Uint8Array || src instanceof Uint8ClampedArray)) {
|
||||
this.mem.setUint8(sp + 48, 0);
|
||||
return;
|
||||
}
|
||||
const toCopy = src.subarray(0, dst.length);
|
||||
dst.set(toCopy);
|
||||
setInt64(sp + 40, toCopy.length);
|
||||
this.mem.setUint8(sp + 48, 1);
|
||||
},
|
||||
|
||||
// func copyBytesToJS(dst ref, src []byte) (int, bool)
|
||||
"syscall/js.copyBytesToJS": (sp) => {
|
||||
sp >>>= 0;
|
||||
const dst = loadValue(sp + 8);
|
||||
const src = loadSlice(sp + 16);
|
||||
if (!(dst instanceof Uint8Array || dst instanceof Uint8ClampedArray)) {
|
||||
this.mem.setUint8(sp + 48, 0);
|
||||
return;
|
||||
}
|
||||
const toCopy = src.subarray(0, dst.length);
|
||||
dst.set(toCopy);
|
||||
setInt64(sp + 40, toCopy.length);
|
||||
this.mem.setUint8(sp + 48, 1);
|
||||
},
|
||||
|
||||
"debug": (value) => {
|
||||
console.log(value);
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async run(instance) {
|
||||
if (!(instance instanceof WebAssembly.Instance)) {
|
||||
throw new Error("Go.run: WebAssembly.Instance expected");
|
||||
}
|
||||
this._inst = instance;
|
||||
this.mem = new DataView(this._inst.exports.mem.buffer);
|
||||
this._values = [ // JS values that Go currently has references to, indexed by reference id
|
||||
NaN,
|
||||
0,
|
||||
null,
|
||||
true,
|
||||
false,
|
||||
global,
|
||||
this,
|
||||
];
|
||||
this._goRefCounts = new Array(this._values.length).fill(Infinity); // number of references that Go has to a JS value, indexed by reference id
|
||||
this._ids = new Map([ // mapping from JS values to reference ids
|
||||
[0, 1],
|
||||
[null, 2],
|
||||
[true, 3],
|
||||
[false, 4],
|
||||
[global, 5],
|
||||
[this, 6],
|
||||
]);
|
||||
this._idPool = []; // unused ids that have been garbage collected
|
||||
this.exited = false; // whether the Go program has exited
|
||||
|
||||
// Pass command line arguments and environment variables to WebAssembly by writing them to the linear memory.
|
||||
let offset = 4096;
|
||||
|
||||
const strPtr = (str) => {
|
||||
const ptr = offset;
|
||||
const bytes = encoder.encode(str + "\0");
|
||||
new Uint8Array(this.mem.buffer, offset, bytes.length).set(bytes);
|
||||
offset += bytes.length;
|
||||
if (offset % 8 !== 0) {
|
||||
offset += 8 - (offset % 8);
|
||||
}
|
||||
return ptr;
|
||||
};
|
||||
|
||||
const argc = this.argv.length;
|
||||
|
||||
const argvPtrs = [];
|
||||
this.argv.forEach((arg) => {
|
||||
argvPtrs.push(strPtr(arg));
|
||||
});
|
||||
argvPtrs.push(0);
|
||||
|
||||
const keys = Object.keys(this.env).sort();
|
||||
keys.forEach((key) => {
|
||||
argvPtrs.push(strPtr(`${key}=${this.env[key]}`));
|
||||
});
|
||||
argvPtrs.push(0);
|
||||
|
||||
const argv = offset;
|
||||
argvPtrs.forEach((ptr) => {
|
||||
this.mem.setUint32(offset, ptr, true);
|
||||
this.mem.setUint32(offset + 4, 0, true);
|
||||
offset += 8;
|
||||
});
|
||||
|
||||
this._inst.exports.run(argc, argv);
|
||||
if (this.exited) {
|
||||
this._resolveExitPromise();
|
||||
}
|
||||
await this._exitPromise;
|
||||
}
|
||||
|
||||
_resume() {
|
||||
if (this.exited) {
|
||||
throw new Error("Go program has already exited");
|
||||
}
|
||||
this._inst.exports.resume();
|
||||
if (this.exited) {
|
||||
this._resolveExitPromise();
|
||||
}
|
||||
}
|
||||
|
||||
_makeFuncWrapper(id) {
|
||||
const go = this;
|
||||
return function () {
|
||||
const event = {
|
||||
id: id,
|
||||
this: this,
|
||||
args: arguments
|
||||
};
|
||||
go._pendingEvent = event;
|
||||
go._resume();
|
||||
return event.result;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
typeof module !== "undefined" &&
|
||||
global.require &&
|
||||
global.require.main === module &&
|
||||
global.process &&
|
||||
global.process.versions &&
|
||||
!global.process.versions.electron
|
||||
) {
|
||||
if (process.argv.length < 3) {
|
||||
console.error("usage: go_js_wasm_exec [wasm binary] [arguments]");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const go = new Go();
|
||||
go.argv = process.argv.slice(2);
|
||||
go.env = Object.assign({
|
||||
TMPDIR: require("os").tmpdir()
|
||||
}, process.env);
|
||||
go.exit = process.exit;
|
||||
WebAssembly.instantiate(fs.readFileSync(process.argv[2]), go.importObject).then((result) => {
|
||||
process.on("exit", (code) => { // Node.js exits if no event handler is pending
|
||||
if (code === 0 && !go.exited) {
|
||||
// deadlock, make Go print error and stack traces
|
||||
go._pendingEvent = {
|
||||
id: 0
|
||||
};
|
||||
go._resume();
|
||||
}
|
||||
});
|
||||
return go.run(result.instance);
|
||||
}).catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
})();
|
||||
44
shared-operations/src/workflow/organizeWaitOperations.ts
Normal file
44
shared-operations/src/workflow/organizeWaitOperations.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { Operation } from "../../declarations/Operation";
|
||||
|
||||
export function organizeWaitOperations(operations: Operation[]) {
|
||||
|
||||
// Initialize an object to store the counts and associated "done" operations
|
||||
const waitCounts = {};
|
||||
const doneOperations = {};
|
||||
|
||||
// Function to count "type: wait" operations and associate "done" operations per id
|
||||
function countWaitOperationsAndDone(operations: Operation[]) {
|
||||
for (const operation of operations) {
|
||||
if (operation.type === "wait") {
|
||||
const id = operation.values.id;
|
||||
if (id in waitCounts) {
|
||||
waitCounts[id]++;
|
||||
} else {
|
||||
waitCounts[id] = 1;
|
||||
}
|
||||
}
|
||||
if (operation.type === "done") {
|
||||
const id = operation.values.id;
|
||||
doneOperations[id] = operation;
|
||||
}
|
||||
if (operation.operations) {
|
||||
countWaitOperationsAndDone(operation.operations);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Start counting and associating from the root operations
|
||||
countWaitOperationsAndDone(operations);
|
||||
|
||||
// Combine counts and associated "done" operations
|
||||
const result = {};
|
||||
for (const id in waitCounts) {
|
||||
result[id] = {
|
||||
waitCount: waitCounts[id],
|
||||
doneOperation: doneOperations[id],
|
||||
input: []
|
||||
};
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
183
shared-operations/src/workflow/traverseOperations.ts
Normal file
183
shared-operations/src/workflow/traverseOperations.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
import { organizeWaitOperations } from "./organizeWaitOperations.js";
|
||||
import { Operation } from "../../declarations/Operation.js";
|
||||
import { PDF } from "../../declarations/PDF.js";
|
||||
|
||||
export async function * traverseOperations(operations: Operation[], input: PDF[] | PDF, Operations: AllOperations) {
|
||||
const waitOperations = organizeWaitOperations(operations);
|
||||
let results: PDF[] = [];
|
||||
yield* nextOperation(operations, input);
|
||||
return results;
|
||||
|
||||
async function * nextOperation(operations: Operation[], input: PDF[] | PDF) {
|
||||
if(Array.isArray(operations) && operations.length == 0) { // isEmpty
|
||||
if(Array.isArray(input)) {
|
||||
console.log("operation done: " + input[0].fileName + (input.length > 1 ? "+" : ""));
|
||||
results = results.concat(input);
|
||||
return;
|
||||
}
|
||||
else {
|
||||
console.log("operation done: " + input.fileName);
|
||||
results.push(input);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < operations.length; i++) {
|
||||
yield* computeOperation(operations[i], structuredClone(input));
|
||||
}
|
||||
}
|
||||
|
||||
async function * computeOperation(operation: Operation, input: PDF|PDF[]) {
|
||||
yield "Starting: " + operation.type;
|
||||
switch (operation.type) {
|
||||
case "done": // Skip this, because it is a valid node.
|
||||
break;
|
||||
case "wait":
|
||||
const waitOperation = waitOperations[operation.values.id];
|
||||
|
||||
if(Array.isArray(input)) {
|
||||
waitOperation.input.concat(input); // TODO: May have unexpected concequences. Needs further testing!
|
||||
}
|
||||
else {
|
||||
waitOperation.input.push(input);
|
||||
}
|
||||
|
||||
waitOperation.waitCount--;
|
||||
if(waitOperation.waitCount == 0) {
|
||||
yield* nextOperation(waitOperation.doneOperation.operations, waitOperation.input);
|
||||
}
|
||||
break;
|
||||
case "extract":
|
||||
yield* nToN(input, operation, async (input) => {
|
||||
input.fileName += "_extractedPages";
|
||||
input.buffer = await Operations.extractPages(input.buffer, operation.values["pagesToExtractArray"]);
|
||||
});
|
||||
break;
|
||||
case "impose":
|
||||
yield* nToN(input, operation, async (input) => {
|
||||
input.fileName += "_imposed";
|
||||
input.buffer = await Operations.impose(input.buffer, operation.values["nup"], operation.values["format"]);
|
||||
});
|
||||
break;
|
||||
case "merge":
|
||||
yield* nToOne(input, operation, async (inputs) => {
|
||||
return {
|
||||
originalFileName: inputs.map(input => input.originalFileName).join("_and_"),
|
||||
fileName: inputs.map(input => input.fileName).join("_and_") + "_merged",
|
||||
buffer: await Operations.mergePDFs(inputs.map(input => input.buffer))
|
||||
}
|
||||
});
|
||||
break;
|
||||
case "rotate":
|
||||
yield* nToN(input, operation, async (input) => {
|
||||
input.fileName += "_turned";
|
||||
input.buffer = await Operations.rotatePages(input.buffer, operation.values["rotation"]);
|
||||
});
|
||||
break;
|
||||
case "split":
|
||||
// TODO: A split might break the done condition, it may count multiple times. Needs further testing!
|
||||
yield* oneToN(input, operation, async (input) => {
|
||||
const splitResult = await Operations.splitPDF(input.buffer, operation.values["pagesToSplitAfterArray"]);
|
||||
|
||||
const splits = [];
|
||||
for (let j = 0; j < splitResult.length; j++) {
|
||||
splits.push({
|
||||
originalFileName: input.originalFileName,
|
||||
fileName: input.fileName + "_split" + j,
|
||||
buffer: splitResult[j]
|
||||
})
|
||||
}
|
||||
return splits;
|
||||
});
|
||||
break;
|
||||
case "updateMetadata":
|
||||
yield* nToN(input, operation, async (input) => {
|
||||
input.fileName += "_metadataEdited";
|
||||
input.buffer = await Operations.updateMetadata(input.buffer, operation.values["metadata"]);
|
||||
});
|
||||
break;
|
||||
case "organizePages":
|
||||
yield* nToN(input, operation, async (input) => {
|
||||
input.fileName += "_pagesOrganized";
|
||||
input.buffer = await Operations.organizePages(input.buffer, operation.values["operation"], operation.values["customOrderString"]);
|
||||
});
|
||||
break;
|
||||
case "removeBlankPages":
|
||||
yield* nToN(input, operation, async (input) => {
|
||||
input.fileName += "_removedBlanks";
|
||||
input.buffer = await Operations.removeBlankPages(input.buffer, operation.values["whiteThreashold"]);
|
||||
});
|
||||
break;
|
||||
case "splitOn":
|
||||
yield* oneToN(input, operation, async (input) => {
|
||||
const splitResult = await Operations.splitOn(input.buffer, operation.values["type"], operation.values["whiteThreashold"]);
|
||||
const splits = [];
|
||||
for (let j = 0; j < splitResult.length; j++) {
|
||||
splits.push({
|
||||
originalFileName: input.originalFileName,
|
||||
fileName: input.fileName + "_split" + j,
|
||||
buffer: splitResult[j]
|
||||
})
|
||||
}
|
||||
|
||||
return splits;
|
||||
});
|
||||
break;
|
||||
default:
|
||||
throw new Error(`${operation.type} not implemented yet.`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {PDF|PDF[]} input
|
||||
* @param {JSON} operation
|
||||
* @returns {undefined}
|
||||
*/
|
||||
async function * nToOne(inputs, operation, callback) {
|
||||
inputs = Array.from(inputs); // Convert single values to array, keep arrays as is.
|
||||
|
||||
inputs = await callback(inputs);
|
||||
yield* nextOperation(operation.operations, inputs);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {PDF|PDF[]} input
|
||||
* @param {JSON} operation
|
||||
* @returns {undefined}
|
||||
*/
|
||||
async function * oneToN(input, operation, callback) {
|
||||
if(Array.isArray(input)) {
|
||||
let output = [];
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
output = output.concat(await callback(input[i]));
|
||||
}
|
||||
yield* nextOperation(operation.operations, output);
|
||||
}
|
||||
else {
|
||||
input = await callback(input);
|
||||
yield* nextOperation(operation.operations, input);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {PDF|PDF[]} input
|
||||
* @param {JSON} operation
|
||||
* @returns {undefined}
|
||||
*/
|
||||
async function * nToN(input, operation, callback) {
|
||||
if(Array.isArray(input)) {
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
await callback(input[i]);
|
||||
}
|
||||
yield* nextOperation(operation.operations, input);
|
||||
}
|
||||
else {
|
||||
await callback(input);
|
||||
yield* nextOperation(operation.operations, input);
|
||||
}
|
||||
}
|
||||
}
|
||||
96
shared-operations/src/wrappers/PdfFile.ts
Normal file
96
shared-operations/src/wrappers/PdfFile.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
|
||||
import { PDFDocument } from 'pdf-lib';
|
||||
import * as PDFJS from 'pdfjs-dist';
|
||||
import { PDFDocumentProxy } from 'pdfjs-dist/types/src/display/api';
|
||||
|
||||
export class PdfFile {
|
||||
byteArray: Uint8Array | null;
|
||||
pdfLib: PDFDocument | null;
|
||||
pdfJs: PDFDocumentProxy | null;
|
||||
filename: string;
|
||||
|
||||
constructor() {
|
||||
this.byteArray = null;
|
||||
this.pdfLib = null;
|
||||
this.pdfJs = null;
|
||||
this.filename = "";
|
||||
}
|
||||
|
||||
async convertToByteArrayFile(): Promise<PdfFile> {
|
||||
if (this.byteArray) return this;
|
||||
|
||||
var byteArray: Uint8Array|null = null;
|
||||
if (this.pdfLib) {
|
||||
byteArray = await this.pdfLib.save();
|
||||
} else if (this.pdfJs) {
|
||||
byteArray = await this.pdfJs.getData();
|
||||
}
|
||||
return fromUint8Array(byteArray!, this.filename);
|
||||
}
|
||||
async convertToPdfLibFile(): Promise<PdfFile> {
|
||||
if (this.pdfLib) return this;
|
||||
|
||||
const byteFile = await this.convertToByteArrayFile();
|
||||
const pdfLib = await PDFDocument.load(byteFile.byteArray!, {
|
||||
updateMetadata: false,
|
||||
});
|
||||
return fromPdfLib(pdfLib, this.filename);
|
||||
}
|
||||
async convertToPdfJsFile(): Promise<PdfFile> {
|
||||
if (this.pdfJs) return this;
|
||||
|
||||
const byteFile = await this.convertToByteArrayFile();
|
||||
const pdfJs = await PDFJS.getDocument(byteFile.byteArray!).promise;
|
||||
return fromPdfJs(pdfJs, this.filename);
|
||||
}
|
||||
|
||||
async getAsByteArray(): Promise<Uint8Array> {
|
||||
const file = await this.convertToByteArrayFile();
|
||||
return file.byteArray!;
|
||||
}
|
||||
async getAsPdfLib(): Promise<PDFDocument> {
|
||||
const file = await this.convertToPdfLibFile();
|
||||
return file.pdfLib!;
|
||||
}
|
||||
async getAsPdfJs(): Promise<PDFDocumentProxy> {
|
||||
const file = await this.convertToPdfJsFile();
|
||||
return file.pdfJs!;
|
||||
}
|
||||
}
|
||||
|
||||
export function fromMulterFile(value: Express.Multer.File): PdfFile {
|
||||
return fromUint8Array(value.buffer, value.originalname)
|
||||
}
|
||||
export function fromUint8Array(value: Uint8Array, filename: string): PdfFile {
|
||||
const out = new PdfFile();
|
||||
out.byteArray = value;
|
||||
out.filename = filename;
|
||||
return out;
|
||||
}
|
||||
export function fromPdfLib(value: PDFDocument, filename: string): PdfFile {
|
||||
const out = new PdfFile();
|
||||
out.pdfLib = value;
|
||||
out.filename = filename;
|
||||
return out;
|
||||
}
|
||||
export function fromPdfJs(value: PDFDocumentProxy, filename: string): PdfFile {
|
||||
const out = new PdfFile();
|
||||
out.pdfJs = value;
|
||||
out.filename = filename;
|
||||
return out;
|
||||
}
|
||||
|
||||
export async function convertAllToByteArrayFile(files: PdfFile[]): Promise<(PdfFile)[]> {
|
||||
const pdfPromises = files.map(s => s.convertToByteArrayFile());
|
||||
return await Promise.all(pdfPromises);
|
||||
}
|
||||
|
||||
export async function convertAllToPdfLibFile(files: PdfFile[]): Promise<(PdfFile)[]> {
|
||||
const pdfPromises = files.map(s => s.convertToPdfLibFile());
|
||||
return await Promise.all(pdfPromises);
|
||||
}
|
||||
|
||||
export async function convertAllToPdfJsFile(files: PdfFile[]): Promise<(PdfFile)[]> {
|
||||
const pdfPromises = files.map(s => s.convertToPdfJsFile());
|
||||
return await Promise.all(pdfPromises);
|
||||
}
|
||||
Reference in New Issue
Block a user