node-canvas.js 4.1 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798
  1. /**
  2. * FaceAPI Demo for NodeJS
  3. * - Uses external library [canvas](https://www.npmjs.com/package/canvas) to decode image
  4. * - Loads image from provided param
  5. * - Outputs results to console
  6. */
  7. // canvas library provides full canvas (load/draw/write) functionality for nodejs
  8. // must be installed manually as it just a demo dependency and not actual face-api dependency
  9. const canvas = require('canvas'); // eslint-disable-line node/no-missing-require
  10. const fs = require('fs');
  11. const path = require('path');
  12. const process = require('process');
  13. const log = require('@vladmandic/pilogger');
  14. const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
  15. const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
  16. // const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
  17. const modelPathRoot = '../model';
  18. const imgPathRoot = './demo'; // modify to include your sample images
  19. const minConfidence = 0.15;
  20. const maxResults = 5;
  21. let optionsSSDMobileNet;
  22. async function image(input) {
  23. const img = await canvas.loadImage(input);
  24. const c = canvas.createCanvas(img.width, img.height);
  25. const ctx = c.getContext('2d');
  26. ctx.drawImage(img, 0, 0, img.width, img.height);
  27. // const out = fs.createWriteStream('test.jpg');
  28. // const stream = c.createJPEGStream({ quality: 0.6, progressive: true, chromaSubsampling: true });
  29. // stream.pipe(out);
  30. return c;
  31. }
  32. async function detect(tensor) {
  33. const result = await faceapi
  34. .detectAllFaces(tensor, optionsSSDMobileNet)
  35. .withFaceLandmarks()
  36. .withFaceExpressions()
  37. .withFaceDescriptors()
  38. .withAgeAndGender();
  39. return result;
  40. }
  41. function print(face) {
  42. const expression = Object.entries(face.expressions).reduce((acc, val) => ((val[1] > acc[1]) ? val : acc), ['', 0]);
  43. const box = [face.alignedRect._box._x, face.alignedRect._box._y, face.alignedRect._box._width, face.alignedRect._box._height];
  44. const gender = `Gender: ${Math.round(100 * face.genderProbability)}% ${face.gender}`;
  45. log.data(`Detection confidence: ${Math.round(100 * face.detection._score)}% ${gender} Age: ${Math.round(10 * face.age) / 10} Expression: ${Math.round(100 * expression[1])}% ${expression[0]} Box: ${box.map((a) => Math.round(a))}`);
  46. }
  47. async function main() {
  48. log.header();
  49. log.info('FaceAPI single-process test');
  50. faceapi.env.monkeyPatch({ Canvas: canvas.Canvas, Image: canvas.Image, ImageData: canvas.ImageData });
  51. await faceapi.tf.setBackend('tensorflow');
  52. await faceapi.tf.ready();
  53. log.state(`Version: FaceAPI ${faceapi.version} TensorFlow/JS ${tf.version_core} Backend: ${faceapi.tf?.getBackend()}`);
  54. log.info('Loading FaceAPI models');
  55. const modelPath = path.join(__dirname, modelPathRoot);
  56. await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath);
  57. await faceapi.nets.ageGenderNet.loadFromDisk(modelPath);
  58. await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
  59. await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
  60. await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
  61. optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults });
  62. if (process.argv.length !== 3) {
  63. const t0 = process.hrtime.bigint();
  64. const dir = fs.readdirSync(imgPathRoot);
  65. let numImages = 0;
  66. for (const img of dir) {
  67. if (!img.toLocaleLowerCase().endsWith('.jpg')) continue;
  68. numImages += 1;
  69. const c = await image(path.join(imgPathRoot, img));
  70. const result = await detect(c);
  71. log.data('Image:', img, 'Detected faces:', result.length);
  72. for (const face of result) print(face);
  73. }
  74. const t1 = process.hrtime.bigint();
  75. log.info('Processed', numImages, 'images in', Math.trunc(Number((t1 - t0).toString()) / 1000 / 1000), 'ms');
  76. } else {
  77. const param = process.argv[2];
  78. if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) {
  79. const c = await image(param);
  80. const result = await detect(c);
  81. log.data('Image:', param, 'Detected faces:', result.length);
  82. for (const face of result) print(face);
  83. }
  84. }
  85. }
  86. main();