asyncfunctionquery(data) {constresponse=awaitfetch("http://localhost:3000/api/v1/prediction/<chatflow-id>", { method:"POST", body: data } );constresult=awaitresponse.json();return result;}query({"question":"Hey, how are you?","socketIOClientId": socketIOClientId}).then((response) => {console.log(response);});
Listen to token stream
// When LLM start streamingsocket.on('start', () => {console.log('start');});// The delta token/word when streamingsocket.on('token', (token) => {console.log('token:', token);});// Source documents returned from the chatflowsocket.on('sourceDocuments', (sourceDocuments) => {console.log('sourceDocuments:', sourceDocuments);});// Tools used during executionsocket.on('usedTools', (usedTools) => {console.log('usedTools:', usedTools);});// When LLM finished streamingsocket.on('end', () => {console.log('end');});//------------------- For Multi Agents ----------------------// The next agent in linesocket.on('nextAgent', (nextAgent) => {console.log('nextAgent:', nextAgent);});// The whole multi agents thoughts, reasoning and outputsocket.on('agentReasoning', (agentReasoning) => {console.log('agentReasoning:', agentReasoning);});// When execution is aborted/interruptedsocket.on('abort', () => {console.log('abort');});