Tensorflow react native cli app : TypeError: Cannot read property 'fetch' of undefined

I’m a beginner i need help , I’m trying to download the model but I get the following error: TypeError: Cannot read property ‘fetch’ of undefined. I need your help and explanations to understand. here are my codes :
package.json:

{
  "name": "Piece",
  "version": "0.0.1",
  "private": true,
  "scripts": {
    "android": "react-native run-android",
    "ios": "react-native run-ios",
    "lint": "eslint .",
    "start": "react-native start",
    "test": "jest"
  },
  "dependencies": {
    "@gorhom/bottom-sheet": "^4.6.1",
    "@gorhom/portal": "^1.0.14",
    "@react-native-community/blur": "^4.4.0",
    "@react-navigation/bottom-tabs": "^6.5.20",
    "@react-navigation/native": "^6.1.17",
    "@react-navigation/native-stack": "^6.9.26",
    "@tensorflow-models/coco-ssd": "^2.2.3",
    "@tensorflow-models/mobilenet": "^2.1.1",
    "@tensorflow/tfjs": "^4.20.0",
    "@tensorflow/tfjs-react-native": "^1.0.0",
    "axios": "^1.6.8",
    "i18next": "^23.11.2",
    "mobx": "^6.12.3",
    "mobx-persist-store": "^1.1.5",
    "mobx-react-lite": "^4.0.7",
    "moment": "^2.30.1",
    "react": "18.2.0",
    "react-i18next": "^14.1.1",
    "react-native": "0.72.0",
    "react-native-app-intro-slider": "^4.0.4",
    "react-native-device-info": "^10.13.2",
    "react-native-fs": "^2.20.0",
    "react-native-gesture-handler": "^2.16.0",
    "react-native-image-crop-picker": "^0.41.0",
    "react-native-image-picker": "^7.1.2",
    "react-native-inappbrowser-reborn": "^3.7.0",
    "react-native-keyboard-aware-scroll-view": "^0.9.5",
    "react-native-linear-gradient": "^2.8.3",
    "react-native-mmkv": "^2.12.2",
    "react-native-modal": "^13.0.1",
    "react-native-parsed-text": "^0.0.22",
    "react-native-reanimated": "^3.3.0",
    "react-native-responsive-screen": "^1.4.2",
    "react-native-safe-area-context": "^4.10.1",
    "react-native-screens": "^3.31.1",
    "react-native-spinkit": "^1.5.1",
    "react-native-svg": "^15.2.0",
    "react-native-vision-camera": "^2.15.2"
  },
  "devDependencies": {
    "@babel/core": "^7.20.0",
    "@babel/preset-env": "^7.20.0",
    "@babel/runtime": "^7.20.0",
    "@react-native/eslint-config": "^0.72.2",
    "@react-native/metro-config": "^0.72.6",
    "@tsconfig/react-native": "^3.0.0",
    "@types/metro-config": "^0.76.3",
    "@types/react": "^18.0.24",
    "@types/react-test-renderer": "^18.0.0",
    "babel-jest": "^29.2.1",
    "babel-plugin-module-resolver": "^5.0.2",
    "eslint": "^8.19.0",
    "jest": "^29.2.1",
    "metro-react-native-babel-preset": "0.76.5",
    "prettier": "^2.4.1",
    "react-test-renderer": "18.2.0",
    "typescript": "4.8.4"
  },
  "engines": {
    "node": ">=16"
  }
}

metro.config :


const { getDefaultConfig, mergeConfig } = require('@react-native/metro-config');

module.exports = (async () => {

    const defaultConfig = await getDefaultConfig(__dirname);
    const customConfig = {
        transformer: {
            getTransformOptions: async () => ({
                transform: {
                    experimentalImportSupport: false,
                    inlineRequires: true,
                },
            }),
        },
        resolver: {
            assetExts: [...defaultConfig.resolver.assetExts, 'bin'],
            sourceExts: [...defaultConfig.resolver.sourceExts, 'jsx', 'js', 'ts', 'tsx', 'json'],
        },
    };
    return mergeConfig(defaultConfig, customConfig);
})();

code of the screen :

import React, { useState, useEffect, useRef } from "react";
import * as tf from '@tensorflow/tfjs';
import '@tensorflow/tfjs-react-native';
import * as mobilenet from '@tensorflow-models/mobilenet';
import {widthPercentageToDP as wp, heightPercentageToDP as hp} from 'react-native-responsive-screen';
import Svg, { Path, G, Defs, LinearGradient, Stop, Circle } from 'react-native-svg';
import { Camera, useCameraDevices } from 'react-native-vision-camera'
import {PermissionsAndroid} from 'react-native';
import RNFS from 'react-native-fs';
import {
  StyleSheet,
  View,
  Text,
  Image,
  TextInput,
  ScrollView,
  TouchableOpacity,useWindowDimensions,
  Alert,ImageBackground
} from 'react-native';

import {
  AppText,
  Container,
} from '../Components'


import { isAndroid } from '../Utils'
import { useNavigation } from '@react-navigation/native';
import { useTranslation } from 'react-i18next'
import { appStore } from '../Stores';
import { LeftRightSvg } from '../Assets/Svg';

const Photo_search = () => {
const navigation = useNavigation();
const { t } = useTranslation()
const size = 25
const sizeAlert = 42
const devices =  useCameraDevices()

const device = devices.back;
const camera = useRef(null)
const layout = useWindowDimensions();
const [capturedPhoto, setCapturedPhoto] = useState('');
const [showPreview, setShowPreview] = useState(false);
const [cameraPermission, setCameraPermission] = useState(null);

const [isTfReady, setIsTfReady] = useState(false);
const [model, setModel] = useState(null);
const [prediction, setPrediction] = useState(null);

useEffect(() => {
   checkCameraPermission()
}, []);


useEffect(() => {
    const loadModel = async () => {
      try {
        // Initialize the TensorFlow.js environment for React Native
        await tf.ready();
        const model = await tf.loadGraphModel('https://tfhub.dev/tensorflow/ssd_mobilenet_v2/2/default/1',{ fromTFHub: true });
        setModel(model);
        setIsModelLoaded(true);
      } catch (error) {
        console.error('Error loading the model:', error);
      }
    };
    loadModel();
}, []);


const checkCameraPermission = async () => {
    const status = await Camera.getCameraPermissionStatus();
    if (status === 'granted') {
      setCameraPermission(true);
    } else if (status === 'notDetermined') {
      const permission = await Camera.requestCameraPermission();
      setCameraPermission(permission === 'authorized');
    } else {
      setCameraPermission(false);
    }
  };

if (device == null) {
  return(
    <View>
      <Text> fqfafsafsdad</Text>
    </View>
  )
}


const reTakePhoto = ()=>{
  setCapturedPhoto('');
  setShowPreview(false);
}

const confirmPhoto = async ()=>{
    const imageTensor = await imageToTensor(capturedPhoto);
    const prediction = model.predict(imageTensor);
    setPrediction(prediction);
}

const imageToTensor = async (uri) => {
    const response = await fetch(uri, {}, { isBinary: true });
    const imageData = await response.arrayBuffer();
    const imageTensor = tf.browser.fromPixels(new Uint8Array(imageData)).resizeNearestNeighbor([224, 224]).toFloat().expandDims();
    return imageTensor;
  };


const takePhoto = async () => {
  try {
    // Vérifie si la référence de la caméra est définie
    if (camera.current) {
      const photo = await camera.current.takePhoto();
      if (photo) {
        setCapturedPhoto(`file://${photo.path}`);
        setShowPreview(true);
      } else {
        console.error('Photo captured is undefined or empty.');
      }
    } else {
      console.error('Camera reference is not available.');
    }
  } catch (error) {
    console.error('Error capturing photo:', error);
  }
};

return (
    <View style={{flex:1, alignItems:'center'}}>
      <Camera
          ref={camera}
          style={StyleSheet.absoluteFill}
          device={device}
          isActive={true}
          photo={true}
        />
       

      {showPreview && capturedPhoto ? (
        <View style={{ flex: 1, justifyContent: 'center', alignItems: 'center' ,backgroundColor:'#fff', padding:10}}>
          <Image
            resizeMode="cover"
            source={{ uri: capturedPhoto }} // Assuming the photo is a valid URI
            style={{ width: hp('80%'), aspectRatio: 1 }}
          />
          <View style={{backgroundColor:'#fff', padding:10, position:'absolute', flexDirection: 'row',  
          borderRadius:10, width:wp('30%'), opacity:.8, lignSelf:'center', bottom:hp('14%'), justifyContent:'space-between'}}>
            <TouchableOpacity
              onPress={()=> reTakePhoto()}
              hitSlop={20}
              style={styles.saveLoginBtn}
            >
              <View >
                <Svg
                    width={35}
                    height={35}
                    viewBox="0 0 24 24"
                    xmlns="http://www.w3.org/2000/svg"
                  >
                    <G data-name="Layer 2">
                      <Path
                        fill='#ff0000'
                        d="m13.41 12 4.3-4.29a1 1 0 1 0-1.42-1.42L12 10.59l-4.29-4.3a1 1 0 0 0-1.42 1.42l4.3 4.29-4.3 4.29a1 1 0 0 0 0 1.42 1 1 0 0 0 1.42 0l4.29-4.3 4.29 4.3a1 1 0 0 0 1.42 0 1 1 0 0 0 0-1.42z"
                        data-name="close"
                      />
                    </G>
                  </Svg>
              </View>
            </TouchableOpacity>

            <TouchableOpacity
              onPress={()=> confirmPhoto()}
              hitSlop={20}
              style={{fontSize:hp('2.5')}}
            >
              <View >
                <Svg
                  width={35}
                  height={35}
                  viewBox="0 0 24 24"
                >
                  <Path
                    d="M6.753 11.841a1 1 0 0 0-1.505 1.317l3.5 4a1 1 0 0 0 1.48.028l8.5-9a1 1 0 0 0-1.455-1.373l-7.745 8.2Z"
                    fill='#000'
                  />
                </Svg>
              </View>
            </TouchableOpacity>
          </View>
        </View>
      ) : (
        <View style={{alignItems:'center',position:'absolute',bottom:hp('12%')}}>
        <TouchableOpacity onPress={()=> takePhoto ()}>
          <View style={{alignItems:'center'}}>
            <Svg width={50} height={50} viewBox="0 0 24 24" fill="none">
              <Path
                fillRule="evenodd"
                clipRule="evenodd"
                d="M9.77778 21H14.2222C17.3433 21 18.9038 21 20.0248 20.2646C20.51 19.9462 20.9267 19.5371 21.251 19.0607C22 17.9601 22 16.4279 22 13.3636C22 10.2994 22 8.76721 21.251 7.6666C20.9267 7.19014 20.51 6.78104 20.0248 6.46268C19.3044 5.99013 18.4027 5.82123 17.022 5.76086C16.3631 5.76086 15.7959 5.27068 15.6667 4.63636C15.4728 3.68489 14.6219 3 13.6337 3H10.3663C9.37805 3 8.52715 3.68489 8.33333 4.63636C8.20412 5.27068 7.63685 5.76086 6.978 5.76086C5.59733 5.82123 4.69555 5.99013 3.97524 6.46268C3.48995 6.78104 3.07328 7.19014 2.74902 7.6666C2 8.76721 2 10.2994 2 13.3636C2 16.4279 2 17.9601 2.74902 19.0607C3.07328 19.5371 3.48995 19.9462 3.97524 20.2646C5.09624 21 6.65675 21 9.77778 21ZM12 9.27273C9.69881 9.27273 7.83333 11.1043 7.83333 13.3636C7.83333 15.623 9.69881 17.4545 12 17.4545C14.3012 17.4545 16.1667 15.623 16.1667 13.3636C16.1667 11.1043 14.3012 9.27273 12 9.27273ZM12 10.9091C10.6193 10.9091 9.5 12.008 9.5 13.3636C9.5 14.7192 10.6193 15.8182 12 15.8182C13.3807 15.8182 14.5 14.7192 14.5 13.3636C14.5 12.008 13.3807 10.9091 12 10.9091ZM16.7222 10.0909C16.7222 9.63904 17.0953 9.27273 17.5556 9.27273H18.6667C19.1269 9.27273 19.5 9.63904 19.5 10.0909C19.5 10.5428 19.1269 10.9091 18.6667 10.9091H17.5556C17.0953 10.9091 16.7222 10.5428 16.7222 10.0909Z"
                fill="#fff"
              />
            </Svg>
            </View>
          <Text style={{color:'#fff'}}>prendre une photo</Text>
        </TouchableOpacity>
      </View>
  )}
    </View>
  );
};

thank 
1 Like

I don’t think most of the info there is necessary:

const imageToTensor = async (uri) => {
    const response = await fetch(uri, {}, { isBinary: true });  <--
    // ...
  };

The fetch here is being passed a uri of undefined. URI is supposed to be a universal resource identified (or so) i.e a filepath.

As a side note, I’d write it this way, not using anonymous functions, then you’d probably get info about where it happens.

async function ImageToTensor(uri) => {
    // ...
     return imageTensor;
  };

So you need to sort out why uri is undefined.

Thank for reply,
please my probleme is not ImageToTensor but it’s there :

useEffect(() => {
    const loadModel = async () => {
      try {
        await tf.ready();
        const model = await tf.loadGraphModel('https://tfhub.dev/tensorflow/ssd_mobilenet_v2/2/default/1',{ fromTFHub: true });
        setModel(model);
      } catch (error) {
        console.error('Error loading the model:', error);
      }
    };

    loadModel();
}, []);

normaly after loading of the model the console.log('MODEL ',model) must give me the response, but only give me this error : TypeError: Cannot read property ‘fetch’ of undefined This is what I want to understand, I tried several things but still this error. thank

Maybe this is the reason:

TensorFlow Hub has moved to Kaggle Models. Starting November 15th 2023 , links to tfhub. dev redirect to their counterparts on Kaggle Models.

If you try that link that you have it redirects to kaggle.

More info here:

And in this forum as well:

http://discuss.ai.google.dev/t/tensorflow-hub-is-moving-to-kaggle-models/20770

ok thank for your detail

my new code :

import React, { useState, useEffect, useRef } from "react";
import * as tf from "@tensorflow/tfjs";
import * as cocoSsd from '@tensorflow-models/coco-ssd';
import {fetch, decodeJpeg} from '@tensorflow/tfjs-react-native';
useEffect(() => {
    const loadModel = async () => {
      try {
        // Initialize the TensorFlow.js environment for React Native
        await tf.ready();
        console.log('FETCH ',fetch);  
        const model = await cocoSsd.load();
          // console.log(model);  
        setModel(model);
        setIsTfReady(true)
        setLoading(false);
        // checkCameraPermission()
        // console.log('Model loaded successfully:', model);
      } catch (error) {
        console.error('Error loading the model:', error);
      }finally {
        setLoading(false);
      }
    };

    loadModel();
}, []);

it working only if i put the console.log('FETCH ',fetch); why? but if i remove the console log i have the same error : TypeError: Cannot read property ‘fetch’ of undefine . I don’t understand please

Why do you need fetch isn’t the model loaded by the line:

const model = await cocoSsd.load();

???

sorry for not answer quickly, i was sick, all my excuses>
Im not need fetch but in the documentation it is recommended to import {fetch, decodeJpeg} from '@tensorflow/tfjs-react-native' so it’s the raison
but what I don’t understand if i remove the console.log(fetch) i have an error.