Type: 'invalid_parameters', reason: 'you must pass a <video> element to start video cap…er, and chrome without sharedarraybuffer support.'} reason : "you must pass a <video> element to start video capture for chromium browser, andro

I am getting this error when I try to start the video again.
zoomStream.startVideo()
zoomStream is the stream I created my own variable for that.
zoomStream.stopVideo() is working but not startVideo()

      <canvas id="rightSideVideo" width="1920" height="1080"></canvas>

I tried to convert it to video tag but when I convert it into video tag the video is not rendering.

    const joinVideoRoom = (roomName, token, name, passcode) => {
      return new Promise((resolve, reject) => {
        var ZoomVid = ZoomVideo.createClient();
        // var stream;

        // Check system requirements before proceeding (Krunal made some changes below)
        if (ZoomVideo.checkSystemRequirements().video && ZoomVideo.checkSystemRequirements().audio) {
          // Initialize Zoom SDK
          ZoomVid.init('en-US', 'Global', { patchJsMedia: true }).then(() => {
            // Join the Zoom room
            ZoomVid.join(roomName, token, name, passcode).then(() => {
              console.log("Successfully joined the Zoom room");

              // Get the media stream
              zoomStream = ZoomVid.getMediaStream();
              console.log("zoomStream object:", zoomStream);

              // Start video and audio ---< needed to be in the separate function (temp. purpose only)
              if (zoomStream.isRenderSelfViewWithVideoElement()) {
                // If rendering with a video element
                zoomStream.startVideo({ videoElement: document.querySelector('#my-self-view-video') }).then(() => {
                  // Video successfully started and rendered
                  console.log("Video started successfully");
                  //it will add white border when video starts 
                  $('#my-self-view-video').css({
                    'border': '2px solid white'
                  });
                  // stream.startAudio();
                  zoomStream.startAudio()
                  ZoomVid.on('user-added', (payload) => {
                    console.log(payload[0].userId + ' joined the session');
                    // updateUserJoinedUI(payload[0].userId);
                  });

                  ZoomVid.on('active-speaker', (payload) => {
                    console.log('Active speaker, use for CSS visuals', payload);
                  });

                  // Resolve the promise when all Zoom SDK operations are completed
                  resolve({ ZoomVid, zoomStream });
                }).catch((error) => {
                  console.log("Error starting video:", error);
                  reject(error);
                });
              }

              // Adding the peer video state change event handler
              ZoomVid.on('peer-video-state-change', (payload) => {
                if (payload.action === 'Start') {
                  // Start rendering peer video
                  console.log("Peer videoe is on");
                  zoomStream.renderVideo(document.querySelector('#rightSideVideo'), payload.userId, 1920, 1080, 0, 0, 3);
                } else if (payload.action === 'Stop') {
                  // Stop rendering peer video
                  zoomStream.stopRenderVideo(document.querySelector('#rightSideVideo'), payload.userId);
                }
              });

              // Retrieve all users after joining (this seems to be in a separate function)
              ZoomVid.getAllUser().forEach((user) => {
                if (user.bVideoOn) {
                  zoomStream.renderVideo(document.querySelector('#rightSideVideo'), user.userId, 1920, 1080, 0, 0, 3);
                }
              });
            }).catch(error => {
              console.error("Error joining Zoom room:", error);
              reject(error);
            });
          }).catch(error => {
            console.error("Error initializing Zoom SDK:", error);
            reject(error);
          });
        } else {
          console.error("System requirements not met for video and audio");
          reject(new Error("System requirements not met for video and audio"));
        }
      });
    };

Thank you for posting in the Zoom Developer Forum, @anjalitejasvi! Can you share a screenshot of the error you are encountering?


I tried to change the canvas element into video element but video was not rendering.

zoomStream.renderVideo(document.querySelector('#rightSideVideo'), payload.userId, 1920, 1080, 0, 0, 3); this the code to render the remote video.

    const turnOnCamera = () => {
      console.log("---turnoncamera-------")
      // document.querySelector('.participantClientSide').classList.remove("hideclientCamera");
      const url = window.location.href
      let segments = url.split('/');
      let jobId = segments[segments.length - 2];
      let candidateId = segments[segments.length - 1];
      var sharedproUserRoom = "${Client}"
      if (sharedproUserRoom == "Client") {
        stompClient.send('/app/application', {}, JSON.stringify({ candidateId: candidateId, jobId: jobId, camera: "false", activity: "hideClientCamera" }))
      }
      // zoomStream.startVideo()

      if (!zoomStream) {
        console.error("zoomStream is not initialized.");
        return;
      }

      zoomStream.startVideo().then(() => {
        console.log("Camera turned on.");
        renderToCanvas();
      }).catch(error => {
        console.error("Error turning on camera:", error);
      });

      // zoomStream.renderVideo(document.querySelector('#rightSideVideo'), 0, 1920, 1080, 0, 0, 3);
      $("#turnOnCamera").hide();
      $("#turnOffCamera").css("display", "flex");
    }
    const turnOffCamera = () => {
      console.log("-------turnoffcamera------")
      // document.querySelector('.participantClientSide').classList.add("hideclientCamera");
      const url = window.location.href
      let segments = url.split('/');
      let jobId = segments[segments.length - 2];
      let candidateId = segments[segments.length - 1];
      var sharedproUserRoom = "${Client}"
      if (sharedproUserRoom == "Client") {
        stompClient.send('/app/application', {}, JSON.stringify({ candidateId: candidateId, jobId: jobId, camera: "true", activity: "showClientCamera" }))
      } rightSideVideo
      // zoomStream.stopVideo()

      if (!zoomStream) {
        console.error("zoomStream is not initialized.");
        return;
      }

      zoomStream.stopVideo().then(() => {
        console.log("Camera turned off.");
      }).catch(error => {
        console.error("Error turning off camera:", error);
      });

      // zoomStream.stopRenderVideo(document.querySelector('#rightSideVideo'), 0);
      $("#turnOffCamera").hide();
      $("#turnOnCamera").css("display", "flex");
    }

any idea why this is happing ?

Sorry, I did not get back to you sooner. Does this issue occur exclusively when you convert it to a video tag? The error seems to suggest that this is related to ShareArraybuffer. Could you confirm whether it’s enabled or not?

In the meantime, here is our documentation on this topic :

It’s not enabled. when I enabled this then my other scripts are not working like jquery and all

then I enabled SharedArrayBuffer but then I am getting this error

You are going to want to cover both cases, Canvas and Video. See support documentation which includes code snippets on this:

we are already using it. It is working fine for 2 user’s but now I want add more than 2 users so in that case I enabled sharedArrayBuffer but rest scripts are not working.


I Checked in in console and it was giving true value but now rest scripts are not working in that case.

This topic was automatically closed 30 days after the last reply. New replies are no longer allowed.