tfjs-models/handpose execution failing with TypeError: forwardFunc is not a function

Created on 4 Sep 2020  路  11Comments  路  Source: tensorflow/tfjs

TensorFlow.js v2.3.0

I am following this guide to build a simple hand pose detection pipeline. For now, I'm trying to replicate the example with an Image Tensor as input and a @tensorflow/tfjs-backend-cpu backend. Code fails with TypeError: forwardFunc is not a function.

Instructions to reproduce the bug

Here is my package.json

{
  "name": "mediapipe-gesture-recognition",
  "version": "0.0.1",
  "description": "Gesture Recognition using Google's MediaPipe",
  "private": true,
  "author": "",
  "license": "MIT",
  "dependencies": {
    "@tensorflow-models/handpose": "^0.0.6",
    "@tensorflow/tfjs-core": "^2.3.0",
    "@tensorflow/tfjs-node": "^2.3.0",
    "@tensorflow/tfjs-converter": "^2.3.0"
  }
}

Here is the index.js file which I'm trying to execute

const tf = require('@tensorflow/tfjs-node'),
    handpose = require('@tensorflow-models/handpose'),
    fs = require('fs');

require('@tensorflow/tfjs-core');
require('@tensorflow/tfjs-backend-cpu');

const detect = async (imagePath) => {

    // Load the image.
    const image = fs.readFileSync(imagePath);
    const decodedImage = tf.node.decodeImage(image, 3);

    console.log("Decoded Image : ")
    console.log(decodedImage)

    // Load the MediaPipe handpose model.
    console.log("Loading model ...")
    const model = await handpose.load();

    console.log('Detecting hand landmarks ...')
    // Pass in a video stream (or an image, canvas, or 3D tensor) to obtain a hand prediction from the MediaPipe graph.
    const hands = await model.estimateHands(decodedImage);

    if (hands.length > 0) {
        hands.forEach(hand => console.log(hand.landmarks));
    }
}

if (process.argv.length !== 3) 
    throw new Error('Usage: node index.js <image-file>')

detect(process.argv[2])

Execute code using

node index.js <image_file>

I'm using this image as the input, and node v14.9.0.

The output that I get on Ubuntu 20.04:

node-pre-gyp info This Node instance does not support builds for N-API version 6
node-pre-gyp info This Node instance does not support builds for N-API version 6
2020-09-05 02:00:46.752504: I tensorflow/core/platform/cpu_feature_guard.cc:142] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2 FMA
2020-09-05 02:00:46.785540: I tensorflow/core/platform/profile_utils/cpu_utils.cc:94] CPU Frequency: 2299965000 Hz
2020-09-05 02:00:46.785928: I tensorflow/compiler/xla/service/service.cc:168] XLA service 0x142ee50 initialized for platform Host (this does not guarantee that XLA will be used). Devices:
2020-09-05 02:00:46.785953: I tensorflow/compiler/xla/service/service.cc:176]   StreamExecutor device (0): Host, Default Version
Decoded Image : 
Tensor {
  kept: false,
  isDisposedInternal: false,
  shape: [ 346, 275, 3 ],
  dtype: 'int32',
  size: 285450,
  strides: [ 825, 3 ],
  dataId: {},
  id: 2,
  rankType: '3',
  scopeId: 0 }
Loading model ...
Detecting hand landmarks ...
(node:403530) UnhandledPromiseRejectionWarning: TypeError: forwardFunc is not a function
    at /home/sanchit/Desktop/CN/projects/mediapipe-gesture-recognition/node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js:2866:55
    at /home/sanchit/Desktop/CN/projects/mediapipe-gesture-recognition/node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js:2705:22
    at Engine.scopedRun (/home/sanchit/Desktop/CN/projects/mediapipe-gesture-recognition/node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js:2715:23)
    at Engine.tidy (/home/sanchit/Desktop/CN/projects/mediapipe-gesture-recognition/node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js:2704:21)
    at kernelFunc (/home/sanchit/Desktop/CN/projects/mediapipe-gesture-recognition/node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js:2866:29)
    at /home/sanchit/Desktop/CN/projects/mediapipe-gesture-recognition/node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js:2878:27
    at Engine.scopedRun (/home/sanchit/Desktop/CN/projects/mediapipe-gesture-recognition/node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js:2715:23)
    at Engine.runKernelFunc (/home/sanchit/Desktop/CN/projects/mediapipe-gesture-recognition/node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js:2876:14)
    at Engine.runKernel (/home/sanchit/Desktop/CN/projects/mediapipe-gesture-recognition/node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js:2773:21)
    at rotateWithOffset_ (/home/sanchit/Desktop/CN/projects/mediapipe-gesture-recognition/node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js:17877:22)
(node:403530) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). (rejection id: 1)
(node:403530) [DEP0018] DeprecationWarning: Unhandled promise rejections are deprecated. In the future, promise rejections that are not handled will terminate the Node.js process with a non-zero exit code.

Tried Solutions

I have already tried https://github.com/tensorflow/tfjs/issues/2329. Got the same output as listed above.
I tried downgrading each tfjs package that I'm using to v2.1.0. Still got the same output as listed above.

Would be great if someone could help resolve this issue. Thanks!

models builinstall

Most helpful comment

Hi @sanchit-gupta-cn - ah I think the issue is that by running the model in node, you are by default using the node tensorflow backend, but our node backend does not yet support handpose. However you can still run the model in node by explicitly setting the backend to either 'cpu' or 'wasm' (with await tf.setBackend('cpu') or await tf.setBackend('wasm')).

Then rather than passing the node decoded image to estimateHands, you could pass in a PixelData object: https://github.com/tensorflow/tfjs/blob/tfjs-v2.3.0/tfjs-core/src/types.ts#L162

Hope this helps!

All 11 comments

@sanchit-gupta-cn did you try with previous tfjs versions ?

I didn't try any previous version of @tensorflow-models/handpose package, for which I'm using v0.0.6,
but I did try v2.1.0 of @tensorflow/tfjs-core, @tensorflow/tfjs-node, and @tensorflow/tfjs-converter.
Got the same error: TypeError: forwardFunc is not a function.

Hi @sanchit-gupta-cn - sorry you're having trouble running the model. Just to make sure - are you also loading a backend, e.g. @tensorflow/tfjs-backend-webgl, or @tensorflow/tfjs-backend-wasm ? In TFJS 2.x core no longer includes a backend, so you have to make sure to install one explicitly, or use the union package @tensorflow/tfjs.

Hi @annxingyuan, there is no need to apologize! I greatly appreciate you contributing to this library, and taking out your time to help us solve issues.

Initially, I was trying to execute the code using node v14.9.0 with @tensorflow/tfjs-backend-cpu as the backend. Based on your comment, I also tried adding a @tensorflow/tfjs-backend-webgl backend, but still getting the same error.
Are you able to reproduce the problem on your end (as I mentioned in the Issue)?

Hi @sanchit-gupta-cn - ah I think the issue is that by running the model in node, you are by default using the node tensorflow backend, but our node backend does not yet support handpose. However you can still run the model in node by explicitly setting the backend to either 'cpu' or 'wasm' (with await tf.setBackend('cpu') or await tf.setBackend('wasm')).

Then rather than passing the node decoded image to estimateHands, you could pass in a PixelData object: https://github.com/tensorflow/tfjs/blob/tfjs-v2.3.0/tfjs-core/src/types.ts#L162

Hope this helps!

Hi @annxingyuan, sorry for the late reply. I tried as you suggested and it's working now! Thank you so much for your help!

Here is the working code for anyone facing the same issue:

const handpose = require('@tensorflow-models/handpose'),
    tf = require('@tensorflow/tfjs-core'),
    pixels = require('image-pixels');

require("@tensorflow/tfjs-backend-cpu");

const detect = async (imagePath) => {

    await tf.setBackend('cpu');

    const img = await pixels(imagePath);
    console.log("Image PixelData: ");
    console.log(img);

    // Load the MediaPipe handpose model.
    console.log("Loading model ...");
    const model = await handpose.load();

    console.log('Detecting hand landmarks ...');

    // Pass in a video stream (or an image, canvas, or 3D tensor) to obtain a hand prediction from the MediaPipe graph.
    const hands = await model.estimateHands(img);

    console.log(hands);
}

if (process.argv.length !== 3) 
    throw new Error('Usage: node index.js <image-file>');

detect(process.argv[2]);

Thank you closing this issue for now.

Are you satisfied with the resolution of your issue?
Yes
No

Hi @sanchit-gupta-cn - ah I think the issue is that by running the model in node, you are by default using the node tensorflow backend, but our node backend does not yet support handpose. However you can still run the model in node by explicitly setting the backend to either 'cpu' or 'wasm' (with await tf.setBackend('cpu') or await tf.setBackend('wasm')).

Then rather than passing the node decoded image to estimateHands, you could pass in a PixelData object: https://github.com/tensorflow/tfjs/blob/tfjs-v2.3.0/tfjs-core/src/types.ts#L162

Hope this helps!

This is working fine but only for 'cpu'. 'tensorflow' should work fine as far as I understand but the error is always the same:
TypeError: forwardFunc is not a function

It is the same for both @tensorflow/tfjs-node-gpu and @tensorflow/tfjs-node

Hi @cagbal - our tensorflow backend does not yet support handpose because handpose runs a custom operation 'rotateWithOffset' which we have so far only implemented in the 'cpu'|'webgl'|'wasm' backends.

Oh ok, thank you.

Was this page helpful?
0 / 5 - 0 ratings