123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367 |
- const fs = require('fs')
- const path = require('path')
- const compressing = require('compressing')
- const rimrif = require('rimraf')
- const shell = require('shelljs');
- const crypto = require('crypto');
- const PizZip = require("pizzip");
- const Docxtemplater = require("docxtemplater");
- import { Chromiumly } from "chromiumly";
- Chromiumly.configure({ endpoint: "http://123.57.204.89/docs" });
- const { LibreOffice } = require("chromiumly");
- const { PDFEngines } = require("chromiumly");
- const tempDir = path.join(__dirname , "temp");
- if(!fs.existsSync(tempDir)){fs.mkdirSync(tempDir)};
- const OSS = require("ali-oss");
- const ALI_OSS_BUCKET = process.env.ALI_OSS_BUCKET || "hep-textbook"
- const ALI_OSS_ACCESS_KEY_ID = process.env.ALI_OSS_ACCESS_KEY_ID || "LTAI5t6AbTiAvXmeoVdJZhL3"
- const ALI_OSS_ACCESS_KEY_SECRET = process.env.ALI_OSS_ACCESS_KEY_SECRET || "KLtQRdIW69KLP7jnzHNUf7eKmdptxH"
- const bwipjs = require("bwip-js")
- export async function toBarCode(text){
- return new Promise(resolve=>{
- bwipjs.toBuffer({
- bcid:"code128",
- text:text,
- scale:1.5,
- height:3,
- includetext:false,
- textalign:"center"
- },(err,png)=>{
- if(err){
- console.error(err)
- resolve(null)
- }else{
- resolve(png)
- }
- })
- })
- }
- export async function uploadFileToOSS(filePath){
- let client = new OSS({
-
- region: "oss-cn-beijing",
- accessKeyId: ALI_OSS_ACCESS_KEY_ID,
- accessKeySecret: ALI_OSS_ACCESS_KEY_SECRET,
-
- bucket: ALI_OSS_BUCKET || "hep-textbook",
- });
- let now = new Date();
- let fileName = getFileName(filePath);
- let fileKey = `export/report/${fileName}`;
- const r1 = await client?.put(fileKey, filePath);
- console.log('put success: %j', r1);
- return r1
- }
- export function getFileName(filePath) {
-
- const parts = filePath.split(/[/\\]/);
-
- return parts.pop();
- }
- module.exports.uploadFileToOSS = uploadFileToOSS
- export function createZip(filePathList, outputZipName) {
- let zipStream = new compressing.zip.Stream();
- return new Promise((resolve)=>{
- try {
- let outputPath = path.join(tempDir,outputZipName)
-
- for (const filePath of filePathList) {
-
- if (fs.existsSync(filePath)) {
-
- zipStream.addEntry(filePath);
- } else {
- console.error(`文件不存在: ${filePath}`);
- }
- }
-
-
- const output = fs.createWriteStream(outputPath);
-
-
-
-
-
- zipStream.pipe(output);
- output.on('finish', () => {
-
- resolve(outputPath)
- });
- output.on('error', (error) => {
- console.error('写入压缩包时出错:', error);
- resolve(null)
- });
-
-
- } catch (error) {
- console.error('创建压缩包时出错:', error);
- return null
- }
- })
- }
- module.exports.createZip = createZip
- const download = require('download')
- async function downloadUrl(url) {
- let md5 = crypto.createHash('md5');
- let filename = md5.update(url).digest('hex') + path.extname(url)
- let filepath = path.join(tempDir,filename)
-
- try{
-
- if(fs.existsSync(filepath)){return filepath}
- fs.writeFileSync(filepath, await download(url));
- return filepath
- }catch(err){
- console.error(err)
- return null
- }
- }
- export async function docxToPdf(docxPath, outputPath,options) {
- let mergeFiles = options?.mergeFiles || []
- let merge = false;
- let mergeFileMap = {};
- if(mergeFiles?.length){
- let plist = []
- for (let index = 0; index < mergeFiles.length; index++) {
- let filePath
- plist.push((async ()=>{
- try{
- filePath = await downloadUrl(mergeFiles[index]);
- }catch(err){}
- if(filePath){
- mergeFileMap[index] = filePath
-
- }
- return
- })())
- }
- await Promise.all(plist);
- merge = true;
- }
- let filePathList = mergeFiles?.map((item,index)=>mergeFileMap[index]).filter(item=>item)
- console.log("DOWNLOADED:",filePathList)
- try {
- let docxBuffer = fs.readFileSync(docxPath);
- let files = [
-
- { data: docxBuffer, ext: "docx" },
- ...filePathList
- ];
-
- let pdfBuffer = await LibreOffice.convert({
- files,
- properties: {
-
- pageSize: 'A4',
- orientation: 'portrait',
- margin: {
- top: 0,
- right: 0,
- bottom: 0,
- left: 0
- }
- },
- pdfa: false,
- pdfUA: false,
- merge: merge,
-
-
-
-
-
-
-
- });
-
- fs.writeFileSync(outputPath, pdfBuffer);
- console.log(`成功输出 ${outputPath}`);
- return outputPath
- } catch (error) {
- console.error('转换失败:', error);
- return null
- }
- }
- module.exports.docxToPdf = docxToPdf
- const ImageModule = require("@slosarek/docxtemplater-image-module-free");
- const sizeOf = require("image-size");
- export function renderDocx(inputDocxPath, outputDocxName, options){
- let imageOptions = {
- getImage(tagValue,tagName) {
- if(!fs.existsSync(tagValue)){
- throw new Error(`Image not found: ${tagValue}`);
- }
- return fs.readFileSync(tagValue);
- },
- getSize(img) {
- const sizeObj = sizeOf(img);
- console.log(sizeObj);
- return [sizeObj.width, sizeObj.height];
- },
- };
-
- let outputDocxPath = path.join(tempDir,outputDocxName)
-
- let content = fs.readFileSync(
- inputDocxPath,
- "binary"
- );
-
- let zip = new PizZip(content);
- let doc = new Docxtemplater(zip, {
- paragraphLoop: true,
- linebreaks: true,
- modules: [new ImageModule(imageOptions)],
- });
-
- Object.keys(options).forEach(key=>{
- if(options[key]==undefined){
- options[key] = ""
- }
- })
- doc.render(options);
-
- let buf = doc.getZip().generate({
- type: "nodebuffer",
-
-
- compression: "DEFLATE",
- });
-
-
- fs.writeFileSync(outputDocxPath, buf);
- return outputDocxPath
- }
- export function replaceDocx(inputDocxPath, outputDocxPath, options,eventMap) {
- return new Promise((resolve,reject)=>{
-
- let md5 = crypto.createHash('md5');
- let outmd5 = md5.update(outputDocxPath).digest('hex')
- let tempDocxPath = path.join(tempDir , outmd5)
-
- let tempDocxXMLName = path.join(tempDocxPath,`word/document.xml`)
-
- let dir_to_docx = (inputFilePath, outputFilePath) => {
- outputFilePath = path.join(tempDir,outputFilePath)
-
- let zipStream = new compressing.zip.Stream()
-
- let outStream = fs.createWriteStream(outputFilePath)
- fs.readdir(inputFilePath, null, (err, files) => {
- if (!err) {
- files.map(file => path.join(inputFilePath, file))
- .forEach(file => {
- zipStream.addEntry(file)
- })
- }
- })
-
- zipStream.pipe(outStream)
- .on('close', () => {
-
-
- eventMap["onDocxComplete"]&&eventMap["onDocxComplete"](outputFilePath)
- shell.rm("-r",tempDocxPath)
-
- resolve(true)
- })
- }
-
- let replaceXML = (data, text) => {
- Object.keys(data).forEach(key => {
- text = text.replaceAll(`{{${key}}}`, data[key])
- })
- return text
- }
-
- compressing.zip.uncompress(inputDocxPath, tempDocxPath)
- .then(() => {
-
- fs.readFile(tempDocxXMLName, null, (err, data) => {
- if (!err) {
- let text = data.toString()
- text = replaceXML(options, text)
- fs.writeFile(tempDocxXMLName, text, (err) => {
- if (!err) {
- dir_to_docx(tempDocxPath, outputDocxPath)
- } else {
- reject(err)
- }
- })
- } else {
- reject(err)
- }
- })
- }).catch(err => {
- reject(err)
- })
- })
- }
- module.exports.replaceDocx = replaceDocx
- function generateObjectId(inputString) {
- inputString = inputString || ""
- inputString = String(inputString)
- const hash = crypto.createHash('sha256').update(inputString).digest('hex');
- const objectId = hash;
- return objectId;
- }
|