restructured project (shared-operations)
This commit is contained in:
52
shared-operations/functions/editMetadata.js
Normal file
52
shared-operations/functions/editMetadata.js
Normal file
@@ -0,0 +1,52 @@
|
||||
/**
|
||||
* @typedef {Object} Metadata
|
||||
* @property {string | null | undefined} Title - The title of the document.
|
||||
* @property {string | null | undefined} Author - The author of the document.
|
||||
* @property {string | null | undefined} Subject - The subject of the document.
|
||||
* @property {string[] | null | undefined} Keywords - An array of keywords associated with the document.
|
||||
* @property {string | null | undefined} Producer - The producer of the document.
|
||||
* @property {string | null | undefined} Creator - The creator of the document.
|
||||
* @property {Date | null | undefined} CreationDate - The date when the document was created.
|
||||
* @property {Date | null | undefined} ModificationDate - The date when the document was last modified.
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Uint16Array} snapshot
|
||||
* @param {Metadata} metadata - Set property to null or "" to clear, undefined properties will be skipped.
|
||||
* @param {import('pdf-lib')} PDFLib
|
||||
* @returns
|
||||
*/
|
||||
export async function editMetadata(snapshot, metadata, PDFLib) {
|
||||
// Load the original PDF file
|
||||
const pdfDoc = await PDFLib.PDFDocument.load(snapshot, {
|
||||
parseSpeed: PDFLib.ParseSpeeds.Fastest,
|
||||
});
|
||||
|
||||
if(metadata.Title !== undefined)
|
||||
pdfDoc.setTitle(metadata.Title);
|
||||
|
||||
if(metadata.Author !== undefined)
|
||||
pdfDoc.setAuthor(metadata.Author)
|
||||
|
||||
if(metadata.Subject !== undefined)
|
||||
pdfDoc.setSubject(metadata.Subject)
|
||||
|
||||
if(metadata.Keywords !== undefined)
|
||||
pdfDoc.setKeywords(metadata.Keywords)
|
||||
|
||||
if(metadata.Producer !== undefined)
|
||||
pdfDoc.setProducer(metadata.Producer)
|
||||
|
||||
if(metadata.Creator !== undefined)
|
||||
pdfDoc.setCreator(metadata.Creator)
|
||||
|
||||
if(metadata.CreationDate !== undefined)
|
||||
pdfDoc.setCreationDate(metadata.CreationDate)
|
||||
|
||||
if(metadata.ModificationDate !== undefined)
|
||||
pdfDoc.setModificationDate(metadata.ModificationDate)
|
||||
|
||||
// Serialize the modified document
|
||||
return pdfDoc.save();
|
||||
};
|
||||
8
shared-operations/functions/extractPages.js
Normal file
8
shared-operations/functions/extractPages.js
Normal file
@@ -0,0 +1,8 @@
|
||||
import { createSubDocument } from "./shared/createSubDocument.js";
|
||||
|
||||
export async function extractPages(snapshot, pagesToExtractArray, PDFLib) {
|
||||
const pdfDoc = await PDFLib.PDFDocument.load(snapshot)
|
||||
|
||||
// TODO: invent a better format for pagesToExtractArray and convert it.
|
||||
return createSubDocument(pdfDoc, pagesToExtractArray, PDFLib);
|
||||
};
|
||||
12
shared-operations/functions/impose.js
Normal file
12
shared-operations/functions/impose.js
Normal file
@@ -0,0 +1,12 @@
|
||||
export async function impose(snapshot, nup, format, pdfcpuWraopper) {
|
||||
return await pdfcpuWraopper.oneToOne([
|
||||
"pdfcpu.wasm",
|
||||
"nup",
|
||||
"-c",
|
||||
"disable",
|
||||
'f:' + format,
|
||||
"/output.pdf",
|
||||
String(nup),
|
||||
"input.pdf",
|
||||
], snapshot);
|
||||
}
|
||||
13
shared-operations/functions/mergePDFs.js
Normal file
13
shared-operations/functions/mergePDFs.js
Normal file
@@ -0,0 +1,13 @@
|
||||
export const mergePDFs = async (snapshots, PDFLib) => {
|
||||
|
||||
const mergedPdf = await PDFLib.PDFDocument.create();
|
||||
|
||||
for (let i = 0; i < snapshots.length; i++) {
|
||||
const pdfToMerge = await PDFLib.PDFDocument.load(snapshots[i]);
|
||||
|
||||
const copiedPages = await mergedPdf.copyPages(pdfToMerge, pdfToMerge.getPageIndices());
|
||||
copiedPages.forEach((page) => mergedPdf.addPage(page));
|
||||
}
|
||||
|
||||
return mergedPdf.save();
|
||||
};
|
||||
115
shared-operations/functions/organizePages.js
Normal file
115
shared-operations/functions/organizePages.js
Normal file
@@ -0,0 +1,115 @@
|
||||
/**
|
||||
* @typedef {"CUSTOM_PAGE_ORDER"|"REVERSE_ORDER"|"DUPLEX_SORT"|"BOOKLET_SORT"|"ODD_EVEN_SPLIT"|"REMOVE_FIRST"|"REMOVE_LAST"|"REMOVE_FIRST_AND_LAST"} OrderOperation
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Uint16Array} snapshot
|
||||
* @param {OrderOperation} operation
|
||||
* @param {string} customOrderString
|
||||
* @param {import('pdf-lib')} PDFLib
|
||||
* @returns
|
||||
*/
|
||||
export async function organizePages(snapshot, operation, customOrderString, PDFLib) {
|
||||
const pdfDoc = await PDFLib.PDFDocument.load(snapshot);
|
||||
let subDocument = await PDFLib.PDFDocument.create();
|
||||
const copiedPages = await subDocument.copyPages(pdfDoc, pdfDoc.getPageIndices());
|
||||
|
||||
|
||||
const pageCount = pdfDoc.getPages().length;
|
||||
|
||||
switch (operation) {
|
||||
case "CUSTOM_PAGE_ORDER":
|
||||
console.log("Custom Order");
|
||||
const pageOrderArray = parseCustomPageOrder(customOrderString, pageCount);
|
||||
console.log(pageOrderArray);
|
||||
|
||||
const customOrderedPages = pageOrderArray.map((pageIndex) => copiedPages[pageIndex]);
|
||||
customOrderedPages.forEach((page) => subDocument.addPage(page));
|
||||
break;
|
||||
case "REVERSE_ORDER":
|
||||
const reversedPages = [];
|
||||
for (let i = pageCount - 1; i >= 0; i--) {
|
||||
reversedPages.push(copiedPages[i]);
|
||||
}
|
||||
reversedPages.forEach((page) => subDocument.addPage(page));
|
||||
break;
|
||||
case 'DUPLEX_SORT': //TODO: Needs to be checked by someone who knows more about duplex printing.
|
||||
const duplexPages = [];
|
||||
const half = (pageCount + 1) / 2
|
||||
for (let i = 1; i <= half; i++) {
|
||||
duplexPages.push(copiedPages[i - 1]);
|
||||
if (i <= pageCount - half) {
|
||||
duplexPages.push(copiedPages[pageCount - i]);
|
||||
}
|
||||
}
|
||||
duplexPages.forEach((page) => subDocument.addPage(page));
|
||||
break;
|
||||
case 'BOOKLET_SORT':
|
||||
const bookletPages = [];
|
||||
for (let i = 0; i < pageCount / 2; i++) {
|
||||
bookletPages.push(copiedPages[i]);
|
||||
bookletPages.push(copiedPages[pageCount - i - 1]);
|
||||
}
|
||||
bookletPages.forEach((page) => subDocument.addPage(page));
|
||||
break;
|
||||
case 'ODD_EVEN_SPLIT':
|
||||
const oddPages = [];
|
||||
const evenPages = [];
|
||||
for (let i = 0; i < pageCount; i++) {
|
||||
if (i % 2 === 0) {
|
||||
evenPages.push(copiedPages[i]);
|
||||
} else {
|
||||
oddPages.push(copiedPages[i]);
|
||||
}
|
||||
}
|
||||
oddPages.forEach((page) => subDocument.addPage(page));
|
||||
evenPages.forEach((page) => subDocument.addPage(page));
|
||||
break;
|
||||
case 'REMOVE_FIRST':
|
||||
pdfDoc.removePage(0);
|
||||
subDocument = pdfDoc;
|
||||
break;
|
||||
case 'REMOVE_LAST':
|
||||
pdfDoc.removePage(pageCount - 1);
|
||||
subDocument = pdfDoc;
|
||||
break;
|
||||
case 'REMOVE_FIRST_AND_LAST':
|
||||
pdfDoc.removePage(0);
|
||||
pdfDoc.removePage(pageCount - 2);
|
||||
subDocument = pdfDoc;
|
||||
break;
|
||||
default:
|
||||
throw new Error("Operation not supported");
|
||||
break;
|
||||
}
|
||||
|
||||
return subDocument.save();
|
||||
};
|
||||
|
||||
function parseCustomPageOrder(customOrder, pageCount) {
|
||||
const pageOrderArray = [];
|
||||
const ranges = customOrder.split(',');
|
||||
|
||||
ranges.forEach((range) => {
|
||||
if (range.includes('-')) {
|
||||
const [start, end] = range.split('-').map(Number);
|
||||
for (let i = start; i <= end; i++) {
|
||||
pageOrderArray.push(i - 1);
|
||||
}
|
||||
} else if (range.includes('n')) {
|
||||
const [even, odd] = range.split('n').map(Number);
|
||||
for (let i = 1; i <= pageCount; i++) {
|
||||
if (i % 2 === 0) {
|
||||
pageOrderArray.push((i * even) - 1);
|
||||
} else {
|
||||
pageOrderArray.push((i * odd) - 1);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
pageOrderArray.push(Number(range) - 1);
|
||||
}
|
||||
});
|
||||
|
||||
return pageOrderArray;
|
||||
}
|
||||
17
shared-operations/functions/removeBlankPages.js
Normal file
17
shared-operations/functions/removeBlankPages.js
Normal file
@@ -0,0 +1,17 @@
|
||||
import { detectEmptyPages } from "./shared/detectEmptyPages.js";
|
||||
|
||||
export async function removeBlankPages(snapshot, whiteThreashold, PDFJS, OpenCV, PDFLib) {
|
||||
|
||||
const emptyPages = await detectEmptyPages(snapshot, whiteThreashold, PDFJS, OpenCV);
|
||||
|
||||
console.log("Empty Pages: ", emptyPages);
|
||||
|
||||
const pdfDoc = await PDFLib.PDFDocument.load(snapshot);
|
||||
|
||||
// Reverse the array before looping in order to keep the indecies at the right pages. E.g. if you delete page 5 page 7 becomes page 6, if you delete page 7 page 5 remains page 5
|
||||
emptyPages.reverse().forEach(pageIndex => {
|
||||
pdfDoc.removePage(pageIndex);
|
||||
})
|
||||
|
||||
return pdfDoc.save();
|
||||
};
|
||||
16
shared-operations/functions/rotatePages.js
Normal file
16
shared-operations/functions/rotatePages.js
Normal file
@@ -0,0 +1,16 @@
|
||||
export async function rotatePages (snapshot, rotation, PDFLib) {
|
||||
// Load the original PDF file
|
||||
const pdfDoc = await PDFLib.PDFDocument.load(snapshot, {
|
||||
parseSpeed: PDFLib.ParseSpeeds.Fastest,
|
||||
});
|
||||
|
||||
const pages = pdfDoc.getPages();
|
||||
|
||||
pages.forEach(page => {
|
||||
// Change page size
|
||||
page.setRotation(PDFLib.degrees(rotation))
|
||||
});
|
||||
|
||||
// Serialize the modified document
|
||||
return pdfDoc.save();
|
||||
};
|
||||
27
shared-operations/functions/scaleContent.js
Normal file
27
shared-operations/functions/scaleContent.js
Normal file
@@ -0,0 +1,27 @@
|
||||
export async function scaleContent(snapshot, scaleFactor, PDFLib) {
|
||||
// Load the original PDF file
|
||||
const pdfDoc = await PDFLib.PDFDocument.load(snapshot, {
|
||||
parseSpeed: PDFLib.ParseSpeeds.Fastest,
|
||||
});
|
||||
|
||||
const pages = pdfDoc.getPages();
|
||||
|
||||
pages.forEach(page => {
|
||||
const width = page.getWidth();
|
||||
const height = page.getHeight();
|
||||
|
||||
// Scale content
|
||||
page.scaleContent(scaleFactor, scaleFactor);
|
||||
const scaled_diff = {
|
||||
width: Math.round(width - scaleFactor * width),
|
||||
height: Math.round(height - scaleFactor * height),
|
||||
};
|
||||
|
||||
// Center content in new page format
|
||||
page.translateContent(Math.round(scaled_diff.width / 2), Math.round(scaled_diff.height / 2));
|
||||
|
||||
});
|
||||
|
||||
// Serialize the modified document
|
||||
return pdfDoc.save();
|
||||
};
|
||||
29
shared-operations/functions/scalePage.js
Normal file
29
shared-operations/functions/scalePage.js
Normal file
@@ -0,0 +1,29 @@
|
||||
export async function scalePage(snapshot, pageSize, PDFLib) {
|
||||
// Load the original PDF file
|
||||
const pdfDoc = await PDFLib.PDFDocument.load(snapshot, {
|
||||
parseSpeed: PDFLib.ParseSpeeds.Fastest,
|
||||
});
|
||||
|
||||
const new_size = pageSize;
|
||||
|
||||
const pages = pdfDoc.getPages();
|
||||
|
||||
pages.forEach(page => {
|
||||
// Change page size
|
||||
page.setSize(new_size.width, new_size.height);
|
||||
});
|
||||
|
||||
// Serialize the modified document
|
||||
return pdfDoc.save();
|
||||
};
|
||||
|
||||
export const PageSize = {
|
||||
a4: {
|
||||
width: 594.96,
|
||||
height: 841.92
|
||||
},
|
||||
letter: {
|
||||
width: 612,
|
||||
height: 792
|
||||
}
|
||||
};
|
||||
16
shared-operations/functions/shared/createSubDocument.js
Normal file
16
shared-operations/functions/shared/createSubDocument.js
Normal file
@@ -0,0 +1,16 @@
|
||||
export async function createSubDocument(pdfDoc, pagesToExtractArray, PDFLib) {
|
||||
const subDocument = await PDFLib.PDFDocument.create();
|
||||
|
||||
// Check that array max number is not larger pdf pages number
|
||||
if(Math.max(...pagesToExtractArray) >= pdfDoc.getPageCount()) {
|
||||
throw new Error(`The PDF document only has ${pdfDoc.getPageCount()} pages and you tried to extract page ${Math.max(...pagesToExtractArray)}`);
|
||||
}
|
||||
|
||||
const copiedPages = await subDocument.copyPages(pdfDoc, pagesToExtractArray);
|
||||
|
||||
for (let i = 0; i < copiedPages.length; i++) {
|
||||
subDocument.addPage(copiedPages[i]);
|
||||
}
|
||||
|
||||
return subDocument.save();
|
||||
}
|
||||
61
shared-operations/functions/shared/detectEmptyPages.js
Normal file
61
shared-operations/functions/shared/detectEmptyPages.js
Normal file
@@ -0,0 +1,61 @@
|
||||
import { getImagesOnPage } from "./getImagesOnPage.js";
|
||||
|
||||
export async function detectEmptyPages(snapshot, whiteThreashold, PDFJS, OpenCV) {
|
||||
const pdfDoc = await PDFJS.getDocument(snapshot).promise;
|
||||
|
||||
const emptyPages = [];
|
||||
for (let i = 1; i <= pdfDoc.numPages; i++) {
|
||||
const page = await pdfDoc.getPage(i);
|
||||
console.log("Checking page " + i);
|
||||
|
||||
if(!await hasText(page)) {
|
||||
console.log(`Found text on Page ${i}, page is not empty`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if(!await areImagesBlank(page, whiteThreashold)) {
|
||||
console.log(`Found non white image on Page ${i}, page is not empty`);
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(`Page ${i} is empty.`);
|
||||
emptyPages.push(i - 1);
|
||||
}
|
||||
return emptyPages;
|
||||
|
||||
async function hasText(page) {
|
||||
const textContent = await page.getTextContent();
|
||||
return textContent.items.length === 0;
|
||||
}
|
||||
|
||||
async function areImagesBlank(page, threshold) {
|
||||
const images = await getImagesOnPage(page, PDFJS);
|
||||
for (const image of images) {
|
||||
if(!isImageBlank(image, threshold))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function isImageBlank(image, threshold) {
|
||||
const src = new OpenCV.cv.Mat(image.width, image.height, OpenCV.cv.CV_8UC4);
|
||||
src.data.set(image.data);
|
||||
// Convert the image to grayscale
|
||||
const gray = new OpenCV.cv.Mat();
|
||||
OpenCV.cv.cvtColor(src, gray, OpenCV.cv.COLOR_RGBA2GRAY);
|
||||
|
||||
// Calculate the mean value of the grayscale image
|
||||
const meanValue = OpenCV.cv.mean(gray);
|
||||
|
||||
// Free memory
|
||||
src.delete();
|
||||
gray.delete();
|
||||
|
||||
// Check if the mean value is below the threshold
|
||||
if (meanValue[0] <= threshold) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
11
shared-operations/functions/shared/getImagesOnPage.js
Normal file
11
shared-operations/functions/shared/getImagesOnPage.js
Normal file
@@ -0,0 +1,11 @@
|
||||
export async function getImagesOnPage(page, PDFJS) {
|
||||
const ops = await page.getOperatorList();
|
||||
const images = [];
|
||||
for (var j=0; j < ops.fnArray.length; j++) {
|
||||
if (ops.fnArray[j] == PDFJS.OPS.paintJpegXObject || ops.fnArray[j] == PDFJS.OPS.paintImageXObject) {
|
||||
const image = page.objs.get(ops.argsArray[j][0]);
|
||||
images.push(image);
|
||||
}
|
||||
}
|
||||
return images;
|
||||
}
|
||||
120
shared-operations/functions/splitOn.js
Normal file
120
shared-operations/functions/splitOn.js
Normal file
@@ -0,0 +1,120 @@
|
||||
import { detectEmptyPages } from "./shared/detectEmptyPages.js";
|
||||
import { getImagesOnPage } from "./shared/getImagesOnPage.js";
|
||||
import { createSubDocument } from "./shared/createSubDocument.js";
|
||||
|
||||
/**
|
||||
* @typedef {"BAR_CODE"|"QR_CODE"|"BLANK_PAGE"} SplitType
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Uint16Array} snapshot
|
||||
* @param {SplitType} type
|
||||
* @param {} PDFJS
|
||||
* @param {import('opencv-wasm')} OpenCV
|
||||
* @param {} PDFLib
|
||||
* @returns
|
||||
*/
|
||||
export async function splitOn(snapshot, type, whiteThreashold, PDFJS, OpenCV, PDFLib, jsQR) {
|
||||
|
||||
let splitAtPages = [];
|
||||
|
||||
switch (type) {
|
||||
case "BAR_CODE":
|
||||
// TODO: Implement
|
||||
throw new Error("This split-type has not been implemented yet");
|
||||
break;
|
||||
|
||||
case "QR_CODE":
|
||||
splitAtPages = await getPagesWithQRCode(snapshot);
|
||||
break;
|
||||
|
||||
case "BLANK_PAGE":
|
||||
splitAtPages = await detectEmptyPages(snapshot, whiteThreashold, PDFJS, OpenCV);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error("An invalid split-type was provided.")
|
||||
break;
|
||||
}
|
||||
|
||||
console.log("Split At Pages: ", splitAtPages);
|
||||
|
||||
// Remove detected Pages & Split
|
||||
const pdfDoc = await PDFLib.PDFDocument.load(snapshot);
|
||||
|
||||
const numberOfPages = pdfDoc.getPages().length;
|
||||
|
||||
let pagesArray = [];
|
||||
let splitAfter = splitAtPages.shift();
|
||||
const subDocuments = [];
|
||||
|
||||
for (let i = 0; i < numberOfPages; i++) {
|
||||
console.log(i);
|
||||
if(i == splitAfter) {
|
||||
if(pagesArray.length > 0) {
|
||||
subDocuments.push(await createSubDocument(pdfDoc, pagesArray, PDFLib));
|
||||
pagesArray = [];
|
||||
}
|
||||
splitAfter = splitAtPages.shift();
|
||||
}
|
||||
else { // Skip splitAtPage
|
||||
console.log("PagesArray")
|
||||
pagesArray.push(i);
|
||||
}
|
||||
}
|
||||
if(pagesArray.length > 0) {
|
||||
subDocuments.push(await createSubDocument(pdfDoc, pagesArray, PDFLib));
|
||||
}
|
||||
pagesArray = [];
|
||||
|
||||
return subDocuments;
|
||||
|
||||
async function getPagesWithQRCode(snapshot) {
|
||||
const pdfDoc = await PDFJS.getDocument(snapshot).promise;
|
||||
|
||||
const pagesWithQR = [];
|
||||
for (let i = 0; i < pdfDoc.numPages; i++) {
|
||||
console.log("Page:", i, "/", pdfDoc.numPages);
|
||||
const page = await pdfDoc.getPage(i + 1);
|
||||
|
||||
const images = await getImagesOnPage(page, PDFJS);
|
||||
console.log("images:", images);
|
||||
for (const image of images) {
|
||||
const data = await checkForQROnImage(image);
|
||||
if(data == "https://github.com/Frooodle/Stirling-PDF") {
|
||||
pagesWithQR.push(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
if(pagesWithQR.length == 0) {
|
||||
console.warn("Could not find any QR Codes in the provided PDF.")
|
||||
}
|
||||
return pagesWithQR;
|
||||
}
|
||||
|
||||
async function checkForQROnImage(image) {
|
||||
// TODO: There is an issue with the jsQR package (The package expects rgba but sometimes we have rgb), and the package seems to be stale, we could create a fork and fix the issue. In the meanwhile we just force rgba:
|
||||
// Check for rgb and convert to rgba
|
||||
|
||||
if(image.data.length == image.width * image.height * 3) {
|
||||
const tmpArray = new Uint8ClampedArray(image.width * image.height * 4);
|
||||
|
||||
// Iterate through the original array and add an alpha channel
|
||||
for (let i = 0, j = 0; i < image.data.length; i += 3, j += 4) {
|
||||
tmpArray[j] = image.data[i]; // Red channel
|
||||
tmpArray[j + 1] = image.data[i + 1]; // Green channel
|
||||
tmpArray[j + 2] = image.data[i + 2]; // Blue channel
|
||||
tmpArray[j + 3] = 255; // Alpha channel (fully opaque)
|
||||
}
|
||||
|
||||
image.data = tmpArray;
|
||||
}
|
||||
|
||||
const code = jsQR(image.data, image.width, image.height);
|
||||
if(code)
|
||||
return code.data;
|
||||
else
|
||||
return null;
|
||||
}
|
||||
};
|
||||
24
shared-operations/functions/splitPDF.js
Normal file
24
shared-operations/functions/splitPDF.js
Normal file
@@ -0,0 +1,24 @@
|
||||
import { createSubDocument } from "./shared/createSubDocument.js";
|
||||
|
||||
export async function splitPDF(snapshot, splitAfterPageArray, PDFLib) {
|
||||
const pdfDoc = await PDFLib.PDFDocument.load(snapshot)
|
||||
|
||||
const numberOfPages = pdfDoc.getPages().length;
|
||||
|
||||
let pagesArray = [];
|
||||
let splitAfter = splitAfterPageArray.shift();
|
||||
const subDocuments = [];
|
||||
|
||||
for (let i = 0; i < numberOfPages; i++) {
|
||||
if(i > splitAfter && pagesArray.length > 0) {
|
||||
subDocuments.push(await createSubDocument(pdfDoc, pagesArray, PDFLib));
|
||||
splitAfter = splitAfterPageArray.shift();
|
||||
pagesArray = [];
|
||||
}
|
||||
pagesArray.push(i);
|
||||
}
|
||||
subDocuments.push(await createSubDocument(pdfDoc, pagesArray, PDFLib));
|
||||
pagesArray = [];
|
||||
|
||||
return subDocuments;
|
||||
};
|
||||
43
shared-operations/organizeWaitOperations.js
Normal file
43
shared-operations/organizeWaitOperations.js
Normal file
@@ -0,0 +1,43 @@
|
||||
|
||||
export function organizeWaitOperations(operations) {
|
||||
|
||||
// Initialize an object to store the counts and associated "done" operations
|
||||
const waitCounts = {};
|
||||
const doneOperations = {};
|
||||
|
||||
// Function to count "type: wait" operations and associate "done" operations per id
|
||||
function countWaitOperationsAndDone(operations) {
|
||||
for (const operation of operations) {
|
||||
if (operation.type === "wait") {
|
||||
const id = operation.values.id;
|
||||
if (id in waitCounts) {
|
||||
waitCounts[id]++;
|
||||
} else {
|
||||
waitCounts[id] = 1;
|
||||
}
|
||||
}
|
||||
if (operation.type === "done") {
|
||||
const id = operation.values.id;
|
||||
doneOperations[id] = operation;
|
||||
}
|
||||
if (operation.operations) {
|
||||
countWaitOperationsAndDone(operation.operations);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Start counting and associating from the root operations
|
||||
countWaitOperationsAndDone(operations);
|
||||
|
||||
// Combine counts and associated "done" operations
|
||||
const result = {};
|
||||
for (const id in waitCounts) {
|
||||
result[id] = {
|
||||
waitCount: waitCounts[id],
|
||||
doneOperation: doneOperations[id],
|
||||
input: []
|
||||
};
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
207
shared-operations/traverseOperations.js
Normal file
207
shared-operations/traverseOperations.js
Normal file
@@ -0,0 +1,207 @@
|
||||
import { organizeWaitOperations } from "./organizeWaitOperations.js";
|
||||
|
||||
/**
|
||||
* @typedef PDF
|
||||
* @property {string} originalFileName
|
||||
* @property {string} fileName
|
||||
* @property {Uint8Array} buffer
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {JSON} operations
|
||||
* @param {PDF|PDF[]} input
|
||||
* @param {import('./functions.js')} Functions
|
||||
* @returns {}
|
||||
*/
|
||||
export async function * traverseOperations(operations, input, Functions) {
|
||||
const waitOperations = organizeWaitOperations(operations);
|
||||
/** @type {PDF[]} */ let results = [];
|
||||
yield* nextOperation(operations, input);
|
||||
return results;
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {JSON} operations
|
||||
* @param {PDF|PDF[]} input
|
||||
* @returns {undefined}
|
||||
*/
|
||||
async function * nextOperation(operations, input) {
|
||||
if(Array.isArray(operations) && operations.length == 0) { // isEmpty
|
||||
if(Array.isArray(input)) {
|
||||
console.log("operation done: " + input[0].fileName + input.length > 1 ? "+" : "");
|
||||
results = results.concat(input);
|
||||
return;
|
||||
}
|
||||
else {
|
||||
console.log("operation done: " + input.fileName);
|
||||
results.push(input);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < operations.length; i++) {
|
||||
yield* computeOperation(operations[i], structuredClone(input));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {JSON} operation
|
||||
* @param {PDF|PDF[]} input
|
||||
* @returns {undefined}
|
||||
*/
|
||||
async function * computeOperation(operation, input) {
|
||||
yield "Starting: " + operation.type;
|
||||
switch (operation.type) {
|
||||
case "done": // Skip this, because it is a valid node.
|
||||
break;
|
||||
case "wait":
|
||||
const waitOperation = waitOperations[operation.values.id];
|
||||
|
||||
if(Array.isArray(input)) {
|
||||
waitOperation.input.concat(input); // TODO: May have unexpected concequences. Needs further testing!
|
||||
}
|
||||
else {
|
||||
waitOperation.input.push(input);
|
||||
}
|
||||
|
||||
waitOperation.waitCount--;
|
||||
if(waitOperation.waitCount == 0) {
|
||||
yield* nextOperation(waitOperation.doneOperation.operations, waitOperation.input);
|
||||
}
|
||||
break;
|
||||
case "extract":
|
||||
yield* nToN(input, operation, async (input) => {
|
||||
input.fileName += "_extractedPages";
|
||||
input.buffer = await Functions.extractPages(input.buffer, operation.values["pagesToExtractArray"]);
|
||||
});
|
||||
break;
|
||||
case "impose":
|
||||
yield* nToN(input, operation, async (input) => {
|
||||
input.fileName += "_imposed";
|
||||
input.buffer = await Functions.impose(input.buffer, operation.values["nup"], operation.values["format"]);
|
||||
});
|
||||
break;
|
||||
case "merge":
|
||||
yield* nToOne(input, operation, async (inputs) => {
|
||||
return {
|
||||
originalFileName: inputs.map(input => input.originalFileName).join("_and_"),
|
||||
fileName: inputs.map(input => input.fileName).join("_and_") + "_merged",
|
||||
buffer: await Functions.mergePDFs(inputs.map(input => input.buffer))
|
||||
}
|
||||
});
|
||||
break;
|
||||
case "rotate":
|
||||
yield* nToN(input, operation, async (input) => {
|
||||
input.fileName += "_turned";
|
||||
input.buffer = await Functions.rotatePages(input.buffer, operation.values["rotation"]);
|
||||
});
|
||||
break;
|
||||
case "split":
|
||||
// TODO: A split might break the done condition, it may count multiple times. Needs further testing!
|
||||
yield* oneToN(input, operation, async (input) => {
|
||||
const splitResult = await Functions.splitPDF(input.buffer, operation.values["pagesToSplitAfterArray"]);
|
||||
|
||||
const splits = [];
|
||||
for (let j = 0; j < splitResult.length; j++) {
|
||||
splits.push({
|
||||
originalFileName: input.originalFileName,
|
||||
fileName: input.fileName + "_split" + j,
|
||||
buffer: splitResult[j]
|
||||
})
|
||||
}
|
||||
return splits;
|
||||
});
|
||||
break;
|
||||
case "editMetadata":
|
||||
yield* nToN(input, operation, async (input) => {
|
||||
input.fileName += "_metadataEdited";
|
||||
input.buffer = await Functions.editMetadata(input.buffer, operation.values["metadata"]);
|
||||
});
|
||||
break;
|
||||
case "organizePages":
|
||||
yield* nToN(input, operation, async (input) => {
|
||||
input.fileName += "_pagesOrganized";
|
||||
input.buffer = await Functions.organizePages(input.buffer, operation.values["operation"], operation.values["customOrderString"]);
|
||||
});
|
||||
break;
|
||||
case "removeBlankPages":
|
||||
yield* nToN(input, operation, async (input) => {
|
||||
input.fileName += "_removedBlanks";
|
||||
input.buffer = await Functions.removeBlankPages(input.buffer, operation.values["whiteThreashold"]);
|
||||
});
|
||||
break;
|
||||
case "splitOn":
|
||||
yield* oneToN(input, operation, async (input) => {
|
||||
const splitResult = await Functions.splitOn(input.buffer, operation.values["type"], operation.values["whiteThreashold"]);
|
||||
const splits = [];
|
||||
for (let j = 0; j < splitResult.length; j++) {
|
||||
splits.push({
|
||||
originalFileName: input.originalFileName,
|
||||
fileName: input.fileName + "_split" + j,
|
||||
buffer: splitResult[j]
|
||||
})
|
||||
}
|
||||
|
||||
return splits;
|
||||
});
|
||||
break;
|
||||
default:
|
||||
throw new Error(`${operation.type} not implemented yet.`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {PDF|PDF[]} input
|
||||
* @param {JSON} operation
|
||||
* @returns {undefined}
|
||||
*/
|
||||
async function * nToOne(inputs, operation, callback) {
|
||||
inputs = Array.from(inputs); // Convert single values to array, keep arrays as is.
|
||||
|
||||
inputs = await callback(inputs);
|
||||
yield* nextOperation(operation.operations, inputs);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {PDF|PDF[]} input
|
||||
* @param {JSON} operation
|
||||
* @returns {undefined}
|
||||
*/
|
||||
async function * oneToN(input, operation, callback) {
|
||||
if(Array.isArray(input)) {
|
||||
let output = [];
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
output = output.concat(await callback(input[i]));
|
||||
}
|
||||
yield* nextOperation(operation.operations, output);
|
||||
}
|
||||
else {
|
||||
input = await callback(input);
|
||||
yield* nextOperation(operation.operations, input);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {PDF|PDF[]} input
|
||||
* @param {JSON} operation
|
||||
* @returns {undefined}
|
||||
*/
|
||||
async function * nToN(input, operation, callback) {
|
||||
if(Array.isArray(input)) {
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
await callback(input[i]);
|
||||
}
|
||||
yield* nextOperation(operation.operations, input);
|
||||
}
|
||||
else {
|
||||
await callback(input);
|
||||
yield* nextOperation(operation.operations, input);
|
||||
}
|
||||
}
|
||||
}
|
||||
8
shared-operations/wasm/browserfs.min.js
vendored
Normal file
8
shared-operations/wasm/browserfs.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
49
shared-operations/wasm/opencv/opencv_3_4_custom_O3.js
Normal file
49
shared-operations/wasm/opencv/opencv_3_4_custom_O3.js
Normal file
File diff suppressed because one or more lines are too long
104
shared-operations/wasm/pdfcpu/pdfcpu-wrapper-browser.js
Normal file
104
shared-operations/wasm/pdfcpu/pdfcpu-wrapper-browser.js
Normal file
@@ -0,0 +1,104 @@
|
||||
// imports browserfs via index.html script-tag
|
||||
|
||||
let wasmLocation = "/wasm/pdfcpu/";
|
||||
|
||||
let fs;
|
||||
let Buffer;
|
||||
|
||||
// TODO: This can later be defered to load asynchronously
|
||||
configureFs();
|
||||
loadWasm();
|
||||
|
||||
function configureFs() {
|
||||
BrowserFS.configure(
|
||||
{
|
||||
fs: "InMemory",
|
||||
},
|
||||
function (e) {
|
||||
if (e) {
|
||||
// An error happened!
|
||||
throw e;
|
||||
}
|
||||
fs = BrowserFS.BFSRequire("fs");
|
||||
Buffer = BrowserFS.BFSRequire("buffer").Buffer;
|
||||
|
||||
window.fs = fs;
|
||||
window.Buffer = Buffer;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
function loadWasm() {
|
||||
const script = document.createElement("script");
|
||||
script.src = wasmLocation + "/wasm_exec.js";
|
||||
script.async = true;
|
||||
document.body.appendChild(script);
|
||||
}
|
||||
|
||||
const runWasm = async (param) => {
|
||||
if (window.cachedWasmResponse === undefined) {
|
||||
const response = await fetch(wasmLocation + "/pdfcpu.wasm");
|
||||
const buffer = await response.arrayBuffer();
|
||||
window.cachedWasmResponse = buffer;
|
||||
window.go = new Go();
|
||||
}
|
||||
const { instance } = await WebAssembly.instantiate(
|
||||
window.cachedWasmResponse,
|
||||
window.go.importObject
|
||||
);
|
||||
window.go.argv = param;
|
||||
await window.go.run(instance);
|
||||
return window.go.exitCode;
|
||||
};
|
||||
|
||||
async function loadFileAsync(data) {
|
||||
console.log(`Writing file to MemoryFS`);
|
||||
await fs.writeFile(`/input.pdf`, data);
|
||||
console.log(`Write done. Validating...`);
|
||||
let exitcode = await runWasm([
|
||||
"pdfcpu.wasm",
|
||||
"validate",
|
||||
"-c",
|
||||
"disable",
|
||||
`/input.pdf`,
|
||||
]);
|
||||
|
||||
if (exitcode !== 0)
|
||||
throw new Error("There was an error validating your PDFs");
|
||||
|
||||
console.log(`File is Valid`);
|
||||
}
|
||||
|
||||
export async function impose(snapshot, nup, format) {
|
||||
|
||||
};
|
||||
|
||||
export async function oneToOne(wasmArray, snapshot) {
|
||||
await loadFileAsync(Buffer.from(snapshot));
|
||||
|
||||
console.log("Nuping File");
|
||||
let exitcode = await runWasm(wasmArray);
|
||||
|
||||
if (exitcode !== 0) {
|
||||
console.error("There was an error nuping your PDFs");
|
||||
return;
|
||||
}
|
||||
|
||||
await fs.unlink("input.pdf");
|
||||
const contents = fs.readFileSync("output.pdf");
|
||||
fs.unlink("output.pdf");
|
||||
console.log("Your File ist Ready!");
|
||||
return new Uint8Array(contents);
|
||||
}
|
||||
|
||||
export async function manyToOne() {
|
||||
//TODO: Do this of neccesary for some operations
|
||||
}
|
||||
|
||||
export async function oneToMany() {
|
||||
//TODO: Do this of neccesary for some operations
|
||||
}
|
||||
|
||||
export async function manyToMany() {
|
||||
//TODO: Do this of neccesary for some operations
|
||||
}
|
||||
145
shared-operations/wasm/pdfcpu/pdfcpu-wrapper-node.js
Normal file
145
shared-operations/wasm/pdfcpu/pdfcpu-wrapper-node.js
Normal file
@@ -0,0 +1,145 @@
|
||||
import { WasmFs } from '@wasmer/wasmfs';
|
||||
import path from "path";
|
||||
|
||||
let webWasmLocation = "/wasm/";
|
||||
let nodeWasmLocation = "./public/wasm/";
|
||||
|
||||
let fs;
|
||||
const wasmfs = new WasmFs();
|
||||
|
||||
// TODO: This can later be defered to load asynchronously
|
||||
(async () => {
|
||||
await loadWasm();
|
||||
await configureFs();
|
||||
})();
|
||||
|
||||
async function configureFs() {
|
||||
// Can't use BrowserFS: https://github.com/jvilk/BrowserFS/issues/271
|
||||
fs = wasmfs.fs;
|
||||
global.fs = fs;
|
||||
|
||||
console.log("InMemoryFs configured");
|
||||
}
|
||||
|
||||
async function loadWasm() {
|
||||
global.crypto = (await import("crypto")).webcrypto; // wasm dependecy
|
||||
await import("./wasm_exec.js");
|
||||
}
|
||||
|
||||
const runWasm = async (param) => {
|
||||
if (global.cachedWasmResponse === undefined) {
|
||||
const buffer = (await import("fs")).readFileSync(nodeWasmLocation + "/pdfcpu.wasm");
|
||||
global.cachedWasmResponse = buffer;
|
||||
global.go = new Go();
|
||||
}
|
||||
const { instance } = await WebAssembly.instantiate(
|
||||
global.cachedWasmResponse,
|
||||
global.go.importObject
|
||||
);
|
||||
global.go.argv = param;
|
||||
await global.go.run(instance);
|
||||
return global.go.exitCode;
|
||||
};
|
||||
|
||||
async function loadFileAsync(data) {
|
||||
console.log(`Writing file to Disk`);
|
||||
fs.writeFileSync(`input.pdf`, data);
|
||||
console.log(`Write done. Validating...`);
|
||||
let exitcode = await runWasm([
|
||||
"pdfcpu.wasm",
|
||||
"validate",
|
||||
"-c",
|
||||
"disable",
|
||||
`input.pdf`,
|
||||
]);
|
||||
if (exitcode !== 0)
|
||||
throw new Error("There was an error validating your PDFs");
|
||||
|
||||
// // Get logs of command
|
||||
// wasmfs.getStdOut().then(response => {
|
||||
// console.log(response);
|
||||
// });
|
||||
|
||||
console.log(`File is Valid`);
|
||||
}
|
||||
|
||||
export async function oneToOne(wasmArray, snapshot) {
|
||||
await loadFileAsync(Buffer.from(snapshot));
|
||||
|
||||
console.log("Nuping File");
|
||||
|
||||
let exitcode = await runWasm(wasmArray);
|
||||
if (exitcode !== 0) {
|
||||
console.error("There was an error nuping your PDFs");
|
||||
return;
|
||||
}
|
||||
console.log("Nuping Done");
|
||||
|
||||
await checkExistsWithTimeout("/output.pdf", 1000);
|
||||
console.log("Write started...");
|
||||
|
||||
|
||||
// TODO: Make this more elegant, this waits for the write to finish.
|
||||
// Maybe replace wasmfs with https://github.com/streamich/memfs
|
||||
let fileSize;
|
||||
while (true) {
|
||||
fileSize = fs.statSync("/output.pdf").size;
|
||||
await new Promise((resolve, reject) => {
|
||||
setTimeout(() => {
|
||||
resolve();
|
||||
}, 50);
|
||||
});
|
||||
if(fileSize > 0 && fileSize == fs.statSync("/output.pdf").size) // Wait for file Size not changing anymore.
|
||||
break;
|
||||
}
|
||||
|
||||
console.log("Could be done?");
|
||||
|
||||
fs.unlinkSync("input.pdf");
|
||||
|
||||
const data = fs.readFileSync("/output.pdf");
|
||||
if(data.length == 0) {
|
||||
throw Error("File Size 0 that should not happen. The write probably didn't finish in time.");
|
||||
}
|
||||
fs.unlinkSync("output.pdf");
|
||||
console.log("Your File ist Ready!");
|
||||
return new Uint8Array(data);
|
||||
}
|
||||
|
||||
export async function manyToOne() {
|
||||
//TODO: Do this if necessary for some pdfcpu operations
|
||||
}
|
||||
|
||||
export async function oneToMany() {
|
||||
//TODO: Do this if necessary for some pdfcpu operations
|
||||
}
|
||||
|
||||
export async function manyToMany() {
|
||||
//TODO: Do this if necessary for some pdfcpu operations
|
||||
}
|
||||
|
||||
// THX: https://stackoverflow.com/questions/26165725/nodejs-check-file-exists-if-not-wait-till-it-exist
|
||||
function checkExistsWithTimeout(filePath, timeout) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
|
||||
var timer = setTimeout(function () {
|
||||
watcher.close();
|
||||
reject(new Error('File did not exists and was not created during the timeout.'));
|
||||
}, timeout);
|
||||
|
||||
fs.access(filePath, fs.constants.R_OK, function (err) {
|
||||
if (!err) {
|
||||
clearTimeout(timer);
|
||||
watcher.close();
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
|
||||
var dir = path.dirname(filePath);
|
||||
var watcher = fs.watch(dir, function (eventType, filename) {
|
||||
clearTimeout(timer);
|
||||
watcher.close();
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
BIN
shared-operations/wasm/pdfcpu/pdfcpu.wasm
Normal file
BIN
shared-operations/wasm/pdfcpu/pdfcpu.wasm
Normal file
Binary file not shown.
872
shared-operations/wasm/pdfcpu/wasm_exec.js
Normal file
872
shared-operations/wasm/pdfcpu/wasm_exec.js
Normal file
@@ -0,0 +1,872 @@
|
||||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
(() => {
|
||||
// Map multiple JavaScript environments to a single common API,
|
||||
// preferring web standards over Node.js API.
|
||||
//
|
||||
// Environments considered:
|
||||
// - Browsers
|
||||
// - Node.js
|
||||
// - Electron
|
||||
// - Parcel
|
||||
// - Webpack
|
||||
|
||||
console.log("imported")
|
||||
if (typeof global !== "undefined") {
|
||||
// global already exists
|
||||
} else if (typeof window !== "undefined") {
|
||||
window.global = window;
|
||||
} else if (typeof self !== "undefined") {
|
||||
self.global = self;
|
||||
} else {
|
||||
throw new Error("cannot export Go (neither global, window nor self is defined)");
|
||||
}
|
||||
|
||||
let logFS = false
|
||||
var handler = {
|
||||
get: function (target, property) {
|
||||
if (property in target && target[property] instanceof Function) {
|
||||
return function () {
|
||||
if (logFS) {
|
||||
console.log(property, 'called', arguments);
|
||||
}
|
||||
// 将callback替换
|
||||
if (arguments[arguments.length - 1] instanceof Function) {
|
||||
var origCB = arguments[arguments.length - 1];
|
||||
var newCB = function () {
|
||||
if (logFS) {
|
||||
console.log('callback for', property, 'get called with args:', arguments);
|
||||
}
|
||||
return Reflect.apply(origCB, arguments.callee, arguments);
|
||||
}
|
||||
arguments[arguments.length - 1] = newCB;
|
||||
}
|
||||
return Reflect.apply(target[property], target, arguments);
|
||||
}
|
||||
} else {
|
||||
return target[property]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!global.require && typeof require !== "undefined") {
|
||||
global.require = require;
|
||||
}
|
||||
|
||||
|
||||
if (!global.fs && global.require) {
|
||||
|
||||
//const fs = require("fs");
|
||||
if (typeof fs === "object" && fs !== null && Object.keys(fs).length !== 0) {
|
||||
global.fs = fs;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const enosys = () => {
|
||||
const err = new Error("not implemented");
|
||||
err.code = "ENOSYS";
|
||||
return err;
|
||||
};
|
||||
|
||||
if (!global.fs) {
|
||||
let outputBuf = "";
|
||||
global.fs = {
|
||||
constants: {
|
||||
O_WRONLY: -1,
|
||||
O_RDWR: -1,
|
||||
O_CREAT: -1,
|
||||
O_TRUNC: -1,
|
||||
O_APPEND: -1,
|
||||
O_EXCL: -1
|
||||
}, // unused
|
||||
writeSync(fd, buf) {
|
||||
outputBuf += decoder.decode(buf);
|
||||
const nl = outputBuf.lastIndexOf("\n");
|
||||
if (nl != -1) {
|
||||
console.log(outputBuf.substr(0, nl));
|
||||
outputBuf = outputBuf.substr(nl + 1);
|
||||
}
|
||||
return buf.length;
|
||||
},
|
||||
write(fd, buf, offset, length, position, callback) {
|
||||
if (offset !== 0 || length !== buf.length || position !== null) {
|
||||
callback(enosys());
|
||||
return;
|
||||
}
|
||||
const n = this.writeSync(fd, buf);
|
||||
callback(null, n);
|
||||
},
|
||||
chmod(path, mode, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
chown(path, uid, gid, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
close(fd, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
fchmod(fd, mode, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
fchown(fd, uid, gid, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
fstat(fd, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
fsync(fd, callback) {
|
||||
callback(null);
|
||||
},
|
||||
ftruncate(fd, length, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
lchown(path, uid, gid, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
link(path, link, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
lstat(path, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
mkdir(path, perm, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
open(path, flags, mode, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
read(fd, buffer, offset, length, position, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
readdir(path, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
readlink(path, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
rename(from, to, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
rmdir(path, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
stat(path, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
symlink(path, link, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
truncate(path, length, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
unlink(path, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
utimes(path, atime, mtime, callback) {
|
||||
callback(enosys());
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (!global.process) {
|
||||
global.process = {
|
||||
getuid() {
|
||||
return -1;
|
||||
},
|
||||
getgid() {
|
||||
return -1;
|
||||
},
|
||||
geteuid() {
|
||||
return -1;
|
||||
},
|
||||
getegid() {
|
||||
return -1;
|
||||
},
|
||||
getgroups() {
|
||||
throw enosys();
|
||||
},
|
||||
pid: -1,
|
||||
ppid: -1,
|
||||
umask() {
|
||||
throw enosys();
|
||||
},
|
||||
cwd() {
|
||||
throw enosys();
|
||||
},
|
||||
chdir() {
|
||||
throw enosys();
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (!global.crypto && global.require) {
|
||||
const nodeCrypto = require("crypto");
|
||||
global.crypto = {
|
||||
getRandomValues(b) {
|
||||
nodeCrypto.randomFillSync(b);
|
||||
},
|
||||
};
|
||||
}
|
||||
if (!global.crypto) {
|
||||
throw new Error("global.crypto is not available, polyfill required (getRandomValues only)");
|
||||
}
|
||||
|
||||
if (!global.performance) {
|
||||
global.performance = {
|
||||
now() {
|
||||
const [sec, nsec] = process.hrtime();
|
||||
return sec * 1000 + nsec / 1000000;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (!global.TextEncoder && global.require) {
|
||||
global.TextEncoder = require("util").TextEncoder;
|
||||
}
|
||||
if (!global.TextEncoder) {
|
||||
throw new Error("global.TextEncoder is not available, polyfill required");
|
||||
}
|
||||
|
||||
if (!global.TextDecoder && global.require) {
|
||||
global.TextDecoder = require("util").TextDecoder;
|
||||
}
|
||||
if (!global.TextDecoder) {
|
||||
throw new Error("global.TextDecoder is not available, polyfill required");
|
||||
}
|
||||
|
||||
|
||||
const isNodeJS = global.process && global.process.title === "node";
|
||||
|
||||
if (!isNodeJS) {
|
||||
// console.log("ini browser fs")
|
||||
// var myfs = global.BrowserFS.BFSRequire('fs');
|
||||
// global.Buffer = global.BrowserFS.BFSRequire('buffer').Buffer;
|
||||
// global.fs = myfs;
|
||||
|
||||
global.fs.constants = {
|
||||
O_RDONLY: 0,
|
||||
O_WRONLY: 1,
|
||||
O_RDWR: 2,
|
||||
O_CREAT: 64,
|
||||
O_CREATE: 64,
|
||||
O_EXCL: 128,
|
||||
O_NOCTTY: 256,
|
||||
O_TRUNC: 512,
|
||||
O_APPEND: 1024,
|
||||
O_DIRECTORY: 65536,
|
||||
O_NOATIME: 262144,
|
||||
O_NOFOLLOW: 131072,
|
||||
O_SYNC: 1052672,
|
||||
O_DIRECT: 16384,
|
||||
O_NONBLOCK: 2048,
|
||||
};
|
||||
|
||||
let outputBuf = "";
|
||||
|
||||
global.fs.writeSyncOriginal = global.fs.writeSync
|
||||
global.fs.writeSync = function (fd, buf) {
|
||||
if (fd === 1 || fd === 2) {
|
||||
outputBuf += decoder.decode(buf);
|
||||
const nl = outputBuf.lastIndexOf("\n");
|
||||
if (nl != -1) {
|
||||
console.log(outputBuf.substr(0, nl));
|
||||
outputBuf = outputBuf.substr(nl + 1);
|
||||
}
|
||||
return buf.length;
|
||||
} else {
|
||||
return global.fs.writeSyncOriginal(...arguments);
|
||||
}
|
||||
};
|
||||
|
||||
global.fs.writeOriginal = global.fs.write
|
||||
global.fs.write = function (fd, buf, offset, length, position, callback) {
|
||||
// (corresponding to STDOUT/STDERR)
|
||||
if (fd === 1 || fd === 2) {
|
||||
if (offset !== 0 || length !== buf.length || position !== null) {
|
||||
throw new Error("not implemented");
|
||||
}
|
||||
const n = this.writeSync(fd, buf);
|
||||
callback(null, n, buf);
|
||||
} else {
|
||||
// buf: read buf first
|
||||
arguments[1] = global.Buffer.from(arguments[1]);
|
||||
return global.fs.writeOriginal(...arguments);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
global.fs.openOriginal = global.fs.open
|
||||
global.fs.open = function (path, flags, mode, callback) {
|
||||
var myflags = 'r';
|
||||
var O = global.fs.constants;
|
||||
|
||||
// Convert numeric flags to string flags
|
||||
// FIXME: maybe wrong...
|
||||
console.log("open dir?", path, 'flag', flags, myflags)
|
||||
if (flags & O.O_WRONLY) { // 'w'
|
||||
myflags = 'w';
|
||||
if (flags & O.O_EXCL) {
|
||||
myflags = 'wx';
|
||||
}
|
||||
} else if (flags & O.O_RDWR) { // 'r+' or 'w+'
|
||||
if (flags & O.O_CREAT && flags & O.O_TRUNC) { // w+
|
||||
if (flags & O.O_EXCL) {
|
||||
myflags = 'wx+';
|
||||
} else {
|
||||
myflags = 'w+';
|
||||
}
|
||||
} else { // r+
|
||||
myflags = 'r+';
|
||||
}
|
||||
} else if (flags & O.O_APPEND) { // 'a'
|
||||
console.log("append error")
|
||||
throw new Error("Not implmented");
|
||||
} else {
|
||||
// 打开文件
|
||||
myflags = 'r+';
|
||||
console.log("open dir?", path, 'flag', flags, myflags)
|
||||
}
|
||||
|
||||
|
||||
return global.fs.openOriginal(path, myflags, mode, callback);
|
||||
};
|
||||
|
||||
global.fs.fstatOriginal = global.fs.fstat;
|
||||
global.fs.fstat = function (fd, callback) {
|
||||
return global.fs.fstatOriginal(fd, function () {
|
||||
var retStat = arguments[1];
|
||||
delete retStat['fileData'];
|
||||
retStat.atimeMs = retStat.atime.getTime();
|
||||
retStat.mtimeMs = retStat.mtime.getTime();
|
||||
retStat.ctimeMs = retStat.ctime.getTime();
|
||||
retStat.birthtimeMs = retStat.birthtime.getTime();
|
||||
return callback(arguments[0], retStat);
|
||||
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
global.fs.closeOriginal = global.fs.close;
|
||||
global.fs.close = function (fd, callback) {
|
||||
return global.fs.closeOriginal(fd, function () {
|
||||
if (typeof arguments[0] === 'undefined') arguments[0] = null;
|
||||
return callback(...arguments);
|
||||
});
|
||||
}
|
||||
|
||||
// global.fs.renameOriginal = global.fs.rename
|
||||
// global.fs.rename = function (from, to, callback) {
|
||||
// console.log("rename a0", arguments[0])
|
||||
// global.fs.renameOriginal(from, to);
|
||||
// callback(arguments[0])
|
||||
// }
|
||||
|
||||
// global.fs.renameSyncOriginal = global.fs.renameSync
|
||||
// global.fs.renameSync = function(fd, options) {
|
||||
// console.log("Sync")
|
||||
// }
|
||||
|
||||
|
||||
global.fs = new Proxy(global.fs, handler);
|
||||
}
|
||||
|
||||
// End of polyfills for common API.
|
||||
|
||||
const encoder = new TextEncoder("utf-8");
|
||||
const decoder = new TextDecoder("utf-8");
|
||||
|
||||
global.Go = class {
|
||||
constructor() {
|
||||
this.argv = ["js"];
|
||||
this.env = {};
|
||||
this.exit = (code) => {
|
||||
this.exitCode = code;
|
||||
if (code !== 0) {
|
||||
console.warn("exit code:", code);
|
||||
}
|
||||
};
|
||||
this._exitPromise = new Promise((resolve) => {
|
||||
this._resolveExitPromise = resolve;
|
||||
});
|
||||
this._pendingEvent = null;
|
||||
this._scheduledTimeouts = new Map();
|
||||
this._nextCallbackTimeoutID = 1;
|
||||
|
||||
const setInt64 = (addr, v) => {
|
||||
this.mem.setUint32(addr + 0, v, true);
|
||||
this.mem.setUint32(addr + 4, Math.floor(v / 4294967296), true);
|
||||
}
|
||||
|
||||
const getInt64 = (addr) => {
|
||||
const low = this.mem.getUint32(addr + 0, true);
|
||||
const high = this.mem.getInt32(addr + 4, true);
|
||||
return low + high * 4294967296;
|
||||
}
|
||||
|
||||
const loadValue = (addr) => {
|
||||
const f = this.mem.getFloat64(addr, true);
|
||||
if (f === 0) {
|
||||
return undefined;
|
||||
}
|
||||
if (!isNaN(f)) {
|
||||
return f;
|
||||
}
|
||||
|
||||
const id = this.mem.getUint32(addr, true);
|
||||
return this._values[id];
|
||||
}
|
||||
|
||||
const storeValue = (addr, v) => {
|
||||
const nanHead = 0x7FF80000;
|
||||
|
||||
if (typeof v === "number" && v !== 0) {
|
||||
if (isNaN(v)) {
|
||||
this.mem.setUint32(addr + 4, nanHead, true);
|
||||
this.mem.setUint32(addr, 0, true);
|
||||
return;
|
||||
}
|
||||
this.mem.setFloat64(addr, v, true);
|
||||
return;
|
||||
}
|
||||
|
||||
if (v === undefined) {
|
||||
this.mem.setFloat64(addr, 0, true);
|
||||
return;
|
||||
}
|
||||
|
||||
let id = this._ids.get(v);
|
||||
if (id === undefined) {
|
||||
id = this._idPool.pop();
|
||||
if (id === undefined) {
|
||||
id = this._values.length;
|
||||
}
|
||||
this._values[id] = v;
|
||||
this._goRefCounts[id] = 0;
|
||||
this._ids.set(v, id);
|
||||
}
|
||||
this._goRefCounts[id]++;
|
||||
let typeFlag = 0;
|
||||
switch (typeof v) {
|
||||
case "object":
|
||||
if (v !== null) {
|
||||
typeFlag = 1;
|
||||
}
|
||||
break;
|
||||
case "string":
|
||||
typeFlag = 2;
|
||||
break;
|
||||
case "symbol":
|
||||
typeFlag = 3;
|
||||
break;
|
||||
case "function":
|
||||
typeFlag = 4;
|
||||
break;
|
||||
}
|
||||
this.mem.setUint32(addr + 4, nanHead | typeFlag, true);
|
||||
this.mem.setUint32(addr, id, true);
|
||||
}
|
||||
|
||||
const loadSlice = (addr) => {
|
||||
const array = getInt64(addr + 0);
|
||||
const len = getInt64(addr + 8);
|
||||
return new Uint8Array(this._inst.exports.mem.buffer, array, len);
|
||||
}
|
||||
|
||||
const loadSliceOfValues = (addr) => {
|
||||
const array = getInt64(addr + 0);
|
||||
const len = getInt64(addr + 8);
|
||||
const a = new Array(len);
|
||||
for (let i = 0; i < len; i++) {
|
||||
a[i] = loadValue(array + i * 8);
|
||||
}
|
||||
return a;
|
||||
}
|
||||
|
||||
const loadString = (addr) => {
|
||||
const saddr = getInt64(addr + 0);
|
||||
const len = getInt64(addr + 8);
|
||||
return decoder.decode(new DataView(this._inst.exports.mem.buffer, saddr, len));
|
||||
}
|
||||
|
||||
const timeOrigin = Date.now() - performance.now();
|
||||
this.importObject = {
|
||||
go: {
|
||||
// Go's SP does not change as long as no Go code is running. Some operations (e.g. calls, getters and setters)
|
||||
// may synchronously trigger a Go event handler. This makes Go code get executed in the middle of the imported
|
||||
// function. A goroutine can switch to a new stack if the current stack is too small (see morestack function).
|
||||
// This changes the SP, thus we have to update the SP used by the imported function.
|
||||
|
||||
// func wasmExit(code int32)
|
||||
"runtime.wasmExit": (sp) => {
|
||||
sp >>>= 0;
|
||||
const code = this.mem.getInt32(sp + 8, true);
|
||||
this.exited = true;
|
||||
delete this._inst;
|
||||
delete this._values;
|
||||
delete this._goRefCounts;
|
||||
delete this._ids;
|
||||
delete this._idPool;
|
||||
this.exit(code);
|
||||
},
|
||||
|
||||
// func wasmWrite(fd uintptr, p unsafe.Pointer, n int32)
|
||||
"runtime.wasmWrite": (sp) => {
|
||||
sp >>>= 0;
|
||||
const fd = getInt64(sp + 8);
|
||||
const p = getInt64(sp + 16);
|
||||
const n = this.mem.getInt32(sp + 24, true);
|
||||
fs.writeSync(fd, new Uint8Array(this._inst.exports.mem.buffer, p, n));
|
||||
},
|
||||
|
||||
// func resetMemoryDataView()
|
||||
"runtime.resetMemoryDataView": (sp) => {
|
||||
sp >>>= 0;
|
||||
this.mem = new DataView(this._inst.exports.mem.buffer);
|
||||
},
|
||||
|
||||
// func nanotime1() int64
|
||||
"runtime.nanotime1": (sp) => {
|
||||
sp >>>= 0;
|
||||
setInt64(sp + 8, (timeOrigin + performance.now()) * 1000000);
|
||||
},
|
||||
|
||||
// func walltime1() (sec int64, nsec int32)
|
||||
"runtime.walltime1": (sp) => {
|
||||
sp >>>= 0;
|
||||
const msec = (new Date).getTime();
|
||||
setInt64(sp + 8, msec / 1000);
|
||||
this.mem.setInt32(sp + 16, (msec % 1000) * 1000000, true);
|
||||
},
|
||||
|
||||
// func scheduleTimeoutEvent(delay int64) int32
|
||||
"runtime.scheduleTimeoutEvent": (sp) => {
|
||||
sp >>>= 0;
|
||||
const id = this._nextCallbackTimeoutID;
|
||||
this._nextCallbackTimeoutID++;
|
||||
this._scheduledTimeouts.set(id, setTimeout(
|
||||
() => {
|
||||
this._resume();
|
||||
while (this._scheduledTimeouts.has(id)) {
|
||||
// for some reason Go failed to register the timeout event, log and try again
|
||||
// (temporary workaround for https://github.com/golang/go/issues/28975)
|
||||
console.warn("scheduleTimeoutEvent: missed timeout event");
|
||||
this._resume();
|
||||
}
|
||||
},
|
||||
getInt64(sp + 8) + 1, // setTimeout has been seen to fire up to 1 millisecond early
|
||||
));
|
||||
this.mem.setInt32(sp + 16, id, true);
|
||||
},
|
||||
|
||||
// func clearTimeoutEvent(id int32)
|
||||
"runtime.clearTimeoutEvent": (sp) => {
|
||||
sp >>>= 0;
|
||||
const id = this.mem.getInt32(sp + 8, true);
|
||||
clearTimeout(this._scheduledTimeouts.get(id));
|
||||
this._scheduledTimeouts.delete(id);
|
||||
},
|
||||
|
||||
// func getRandomData(r []byte)
|
||||
"runtime.getRandomData": (sp) => {
|
||||
sp >>>= 0;
|
||||
crypto.getRandomValues(loadSlice(sp + 8));
|
||||
},
|
||||
|
||||
// func finalizeRef(v ref)
|
||||
"syscall/js.finalizeRef": (sp) => {
|
||||
sp >>>= 0;
|
||||
const id = this.mem.getUint32(sp + 8, true);
|
||||
this._goRefCounts[id]--;
|
||||
if (this._goRefCounts[id] === 0) {
|
||||
const v = this._values[id];
|
||||
this._values[id] = null;
|
||||
this._ids.delete(v);
|
||||
this._idPool.push(id);
|
||||
}
|
||||
},
|
||||
|
||||
// func stringVal(value string) ref
|
||||
"syscall/js.stringVal": (sp) => {
|
||||
sp >>>= 0;
|
||||
storeValue(sp + 24, loadString(sp + 8));
|
||||
},
|
||||
|
||||
// func valueGet(v ref, p string) ref
|
||||
"syscall/js.valueGet": (sp) => {
|
||||
sp >>>= 0;
|
||||
const result = Reflect.get(loadValue(sp + 8), loadString(sp + 16));
|
||||
sp = this._inst.exports.getsp() >>> 0; // see comment above
|
||||
storeValue(sp + 32, result);
|
||||
},
|
||||
|
||||
// func valueSet(v ref, p string, x ref)
|
||||
"syscall/js.valueSet": (sp) => {
|
||||
sp >>>= 0;
|
||||
Reflect.set(loadValue(sp + 8), loadString(sp + 16), loadValue(sp + 32));
|
||||
},
|
||||
|
||||
// func valueDelete(v ref, p string)
|
||||
"syscall/js.valueDelete": (sp) => {
|
||||
sp >>>= 0;
|
||||
Reflect.deleteProperty(loadValue(sp + 8), loadString(sp + 16));
|
||||
},
|
||||
|
||||
// func valueIndex(v ref, i int) ref
|
||||
"syscall/js.valueIndex": (sp) => {
|
||||
sp >>>= 0;
|
||||
storeValue(sp + 24, Reflect.get(loadValue(sp + 8), getInt64(sp + 16)));
|
||||
},
|
||||
|
||||
// valueSetIndex(v ref, i int, x ref)
|
||||
"syscall/js.valueSetIndex": (sp) => {
|
||||
sp >>>= 0;
|
||||
Reflect.set(loadValue(sp + 8), getInt64(sp + 16), loadValue(sp + 24));
|
||||
},
|
||||
|
||||
// func valueCall(v ref, m string, args []ref) (ref, bool)
|
||||
"syscall/js.valueCall": (sp) => {
|
||||
sp >>>= 0;
|
||||
try {
|
||||
const v = loadValue(sp + 8);
|
||||
const m = Reflect.get(v, loadString(sp + 16));
|
||||
const args = loadSliceOfValues(sp + 32);
|
||||
const result = Reflect.apply(m, v, args);
|
||||
sp = this._inst.exports.getsp() >>> 0; // see comment above
|
||||
storeValue(sp + 56, result);
|
||||
this.mem.setUint8(sp + 64, 1);
|
||||
} catch (err) {
|
||||
storeValue(sp + 56, err);
|
||||
this.mem.setUint8(sp + 64, 0);
|
||||
}
|
||||
},
|
||||
|
||||
// func valueInvoke(v ref, args []ref) (ref, bool)
|
||||
"syscall/js.valueInvoke": (sp) => {
|
||||
sp >>>= 0;
|
||||
try {
|
||||
const v = loadValue(sp + 8);
|
||||
const args = loadSliceOfValues(sp + 16);
|
||||
const result = Reflect.apply(v, undefined, args);
|
||||
sp = this._inst.exports.getsp() >>> 0; // see comment above
|
||||
storeValue(sp + 40, result);
|
||||
this.mem.setUint8(sp + 48, 1);
|
||||
} catch (err) {
|
||||
storeValue(sp + 40, err);
|
||||
this.mem.setUint8(sp + 48, 0);
|
||||
}
|
||||
},
|
||||
|
||||
// func valueNew(v ref, args []ref) (ref, bool)
|
||||
"syscall/js.valueNew": (sp) => {
|
||||
sp >>>= 0;
|
||||
try {
|
||||
const v = loadValue(sp + 8);
|
||||
const args = loadSliceOfValues(sp + 16);
|
||||
const result = Reflect.construct(v, args);
|
||||
sp = this._inst.exports.getsp() >>> 0; // see comment above
|
||||
storeValue(sp + 40, result);
|
||||
this.mem.setUint8(sp + 48, 1);
|
||||
} catch (err) {
|
||||
storeValue(sp + 40, err);
|
||||
this.mem.setUint8(sp + 48, 0);
|
||||
}
|
||||
},
|
||||
|
||||
// func valueLength(v ref) int
|
||||
"syscall/js.valueLength": (sp) => {
|
||||
sp >>>= 0;
|
||||
setInt64(sp + 16, parseInt(loadValue(sp + 8).length));
|
||||
},
|
||||
|
||||
// valuePrepareString(v ref) (ref, int)
|
||||
"syscall/js.valuePrepareString": (sp) => {
|
||||
sp >>>= 0;
|
||||
const str = encoder.encode(String(loadValue(sp + 8)));
|
||||
storeValue(sp + 16, str);
|
||||
setInt64(sp + 24, str.length);
|
||||
},
|
||||
|
||||
// valueLoadString(v ref, b []byte)
|
||||
"syscall/js.valueLoadString": (sp) => {
|
||||
sp >>>= 0;
|
||||
const str = loadValue(sp + 8);
|
||||
loadSlice(sp + 16).set(str);
|
||||
},
|
||||
|
||||
// func valueInstanceOf(v ref, t ref) bool
|
||||
"syscall/js.valueInstanceOf": (sp) => {
|
||||
sp >>>= 0;
|
||||
this.mem.setUint8(sp + 24, (loadValue(sp + 8) instanceof loadValue(sp + 16)) ? 1 : 0);
|
||||
},
|
||||
|
||||
// func copyBytesToGo(dst []byte, src ref) (int, bool)
|
||||
"syscall/js.copyBytesToGo": (sp) => {
|
||||
sp >>>= 0;
|
||||
const dst = loadSlice(sp + 8);
|
||||
const src = loadValue(sp + 32);
|
||||
if (!(src instanceof Uint8Array || src instanceof Uint8ClampedArray)) {
|
||||
this.mem.setUint8(sp + 48, 0);
|
||||
return;
|
||||
}
|
||||
const toCopy = src.subarray(0, dst.length);
|
||||
dst.set(toCopy);
|
||||
setInt64(sp + 40, toCopy.length);
|
||||
this.mem.setUint8(sp + 48, 1);
|
||||
},
|
||||
|
||||
// func copyBytesToJS(dst ref, src []byte) (int, bool)
|
||||
"syscall/js.copyBytesToJS": (sp) => {
|
||||
sp >>>= 0;
|
||||
const dst = loadValue(sp + 8);
|
||||
const src = loadSlice(sp + 16);
|
||||
if (!(dst instanceof Uint8Array || dst instanceof Uint8ClampedArray)) {
|
||||
this.mem.setUint8(sp + 48, 0);
|
||||
return;
|
||||
}
|
||||
const toCopy = src.subarray(0, dst.length);
|
||||
dst.set(toCopy);
|
||||
setInt64(sp + 40, toCopy.length);
|
||||
this.mem.setUint8(sp + 48, 1);
|
||||
},
|
||||
|
||||
"debug": (value) => {
|
||||
console.log(value);
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async run(instance) {
|
||||
if (!(instance instanceof WebAssembly.Instance)) {
|
||||
throw new Error("Go.run: WebAssembly.Instance expected");
|
||||
}
|
||||
this._inst = instance;
|
||||
this.mem = new DataView(this._inst.exports.mem.buffer);
|
||||
this._values = [ // JS values that Go currently has references to, indexed by reference id
|
||||
NaN,
|
||||
0,
|
||||
null,
|
||||
true,
|
||||
false,
|
||||
global,
|
||||
this,
|
||||
];
|
||||
this._goRefCounts = new Array(this._values.length).fill(Infinity); // number of references that Go has to a JS value, indexed by reference id
|
||||
this._ids = new Map([ // mapping from JS values to reference ids
|
||||
[0, 1],
|
||||
[null, 2],
|
||||
[true, 3],
|
||||
[false, 4],
|
||||
[global, 5],
|
||||
[this, 6],
|
||||
]);
|
||||
this._idPool = []; // unused ids that have been garbage collected
|
||||
this.exited = false; // whether the Go program has exited
|
||||
|
||||
// Pass command line arguments and environment variables to WebAssembly by writing them to the linear memory.
|
||||
let offset = 4096;
|
||||
|
||||
const strPtr = (str) => {
|
||||
const ptr = offset;
|
||||
const bytes = encoder.encode(str + "\0");
|
||||
new Uint8Array(this.mem.buffer, offset, bytes.length).set(bytes);
|
||||
offset += bytes.length;
|
||||
if (offset % 8 !== 0) {
|
||||
offset += 8 - (offset % 8);
|
||||
}
|
||||
return ptr;
|
||||
};
|
||||
|
||||
const argc = this.argv.length;
|
||||
|
||||
const argvPtrs = [];
|
||||
this.argv.forEach((arg) => {
|
||||
argvPtrs.push(strPtr(arg));
|
||||
});
|
||||
argvPtrs.push(0);
|
||||
|
||||
const keys = Object.keys(this.env).sort();
|
||||
keys.forEach((key) => {
|
||||
argvPtrs.push(strPtr(`${key}=${this.env[key]}`));
|
||||
});
|
||||
argvPtrs.push(0);
|
||||
|
||||
const argv = offset;
|
||||
argvPtrs.forEach((ptr) => {
|
||||
this.mem.setUint32(offset, ptr, true);
|
||||
this.mem.setUint32(offset + 4, 0, true);
|
||||
offset += 8;
|
||||
});
|
||||
|
||||
this._inst.exports.run(argc, argv);
|
||||
if (this.exited) {
|
||||
this._resolveExitPromise();
|
||||
}
|
||||
await this._exitPromise;
|
||||
}
|
||||
|
||||
_resume() {
|
||||
if (this.exited) {
|
||||
throw new Error("Go program has already exited");
|
||||
}
|
||||
this._inst.exports.resume();
|
||||
if (this.exited) {
|
||||
this._resolveExitPromise();
|
||||
}
|
||||
}
|
||||
|
||||
_makeFuncWrapper(id) {
|
||||
const go = this;
|
||||
return function () {
|
||||
const event = {
|
||||
id: id,
|
||||
this: this,
|
||||
args: arguments
|
||||
};
|
||||
go._pendingEvent = event;
|
||||
go._resume();
|
||||
return event.result;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
typeof module !== "undefined" &&
|
||||
global.require &&
|
||||
global.require.main === module &&
|
||||
global.process &&
|
||||
global.process.versions &&
|
||||
!global.process.versions.electron
|
||||
) {
|
||||
if (process.argv.length < 3) {
|
||||
console.error("usage: go_js_wasm_exec [wasm binary] [arguments]");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const go = new Go();
|
||||
go.argv = process.argv.slice(2);
|
||||
go.env = Object.assign({
|
||||
TMPDIR: require("os").tmpdir()
|
||||
}, process.env);
|
||||
go.exit = process.exit;
|
||||
WebAssembly.instantiate(fs.readFileSync(process.argv[2]), go.importObject).then((result) => {
|
||||
process.on("exit", (code) => { // Node.js exits if no event handler is pending
|
||||
if (code === 0 && !go.exited) {
|
||||
// deadlock, make Go print error and stack traces
|
||||
go._pendingEvent = {
|
||||
id: 0
|
||||
};
|
||||
go._resume();
|
||||
}
|
||||
});
|
||||
return go.run(result.instance);
|
||||
}).catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
})();
|
||||
Reference in New Issue
Block a user