How To Transcrib Audio To Text In Dialogflow-ES - dialogflow-es

I have a chatbot in dialogflow, it is conected with whatsapp by landbot, in this moment when an user send me an audio, landbot send the audio to dialogflow(literal in the format .ogv) but I need to transcrib it, because the dialogflow must understand. For example, this is the intent:
{
"id": "ffdc721f-8e94-4a9e-9ec8-a45b9b7f21be-37284719",
"fulfillmentText": "🙁 Pronunciaste mal. \n La palabra era: Dog",
"language_code": "en",
"queryText": "https://media.eu-1.smooch.io/apps/5d2370ef6667cd00102fb9c2/conversations/31f2d6d5440d03fde4066b35/hJ9ob-c2Ogho4NTDLEuyFMg_/5zKf3-SgGMuS3RxcBUw5B6dj.oga",
"webhookPayload": {},
"intentDetectionConfidence": 0.3,
"action": "",
"webhookSource": "",
"parameters": {
"pronunciacion": "https://media.eu-1.smooch.io/apps/5d2370ef6667cd00102fb9c2/conversations/31f2d6d5440d03fde4066b35/hJ9ob-c2Ogho4NTDLEuyFMg_/5zKf3-SgGMuS3RxcBUw5B6dj.oga",
"palabra": "Dog"
},
"fulfillmentMessages": [
{
"text": {
"text": [
"🙁 Pronunciaste mal. \n La palabra era: Dog"
]
}
}
],
"diagnosticInfo": {
"webhook_latency_ms": "1871.0"
},
"webhookStatus": {
"webhookStatus": {
"message": "Webhook execution successful"
},
"webhookUsed": true
},
"intent": {
"isFallback": false,
"displayName": "Pronunciar",
"id": "4dd12af2-94a6-486b-a1cd-daa2c65d6671"
}
}
I searched that I can do that with Speech-To-Text but I don't know how can I put it in dialogflow

Related

Dialogflow webhook failure

I just integrated Dialogflow with Whatsapp but some of the webhooks are failing to send responses. I need a way around this problem. The telegram integration works well but the Whatsapp version fails terrible.
{
"responseId": "1d7878da-1909-4189-aa12-edd87bcbdbfd-21554733",
"queryResult": {
"queryText": "07089456758",
"action": "input.welcome",
"parameters": {
"phonenumber": "07089456758"
},
"allRequiredParamsPresent": true,
"fulfillmentMessages": [
{
"payload": {
"telegram": {
"text": "Hi! �� Welcome to Sendme. What would you like to order today?",
"parse_mode": "html"
}
},
"platform": "TELEGRAM"
},
{
"text": {
"text": [
""
]
}
}
],
"outputContexts": [
{
"name": "projects/sendme-bot-vrgl/locations/global/agent/sessions/c5173fc0-613e-cf88-ea69-e5615b38c1dc/contexts/auth",
"lifespanCount": 5,
"parameters": {
"phonenumber.original": "07089456758",
"phonenumber": "07089456758"
}
}
],
"intent": {
"name": "projects/sendme-bot-vrgl/locations/global/agent/intents/4bf9dd88-e6dd-4ff2-9c4b-8d49bf45ef77",
"displayName": "welcome"
},
"intentDetectionConfidence": 1,
"diagnosticInfo": {
"webhook_latency_ms": 4312
},
"languageCode": "en",
"sentimentAnalysisResult": {
"queryTextSentiment": {
"score": 0.4,
"magnitude": 0.4
}
}
},
"webhookStatus": {
"code": 14,
"message": "Webhook call failed. Error: UNAVAILABLE, State: URL_UNREACHABLE, Reason: UNREACHABLE_5xx, HTTP status code: 500."
},
"agentId": "6b1be60b-e9f3-4ae4-a983-5c4d7c9734f8",
"agentSettings": {
"enableAgentWideKnowledgeConnector": true
}
}

Malformed Response: Failed to parse Dialogflow response into AppResponse because of empty speech response

I am using firebase function for the webhook fulfillment in Dialogflow. I am getting webhook successful as a fulfillment status but it is not working. I am using version 1. When I test it on Google Assistant simulator, it says "App is not responding".
firebase function
const functions = require('firebase-functions');
exports.webhook = functions.https.onRequest((request, response) => {
response.send({
"google":{
"richResponse":{
"items":[
{
"simpleResponse":{
"textToSpeech":"Hey! Good to see you."
}
},
{
"mediaResponse":{
"mediaType":"AUDIO",
"mediaObjects":[
{
"name":"Exercises",
"description":"ex",
"largeImage":{
"url":"http://res.freestockphotos.biz/pictures/17/17903-balloons-pv.jpg",
"accessibilityText":"..."
},
"contentUrl":"https://theislam360.me:8080/hbd.mp3"
}
]
}
}
],
"suggestions":[
{
"title":"chips"
}
]
}
}
}
)
});`
When I copy paste the response from {google... to the end in the custom payload manually via GUI, It works. While for webhook, it is not working.
RAW API RESPONSE
{
"id": "eaf627ed-26b5-4965-b0b0-bc77144e144b",
"timestamp": "2019-04-15T11:54:18.948Z",
"lang": "en",
"result": {
"source": "agent",
"resolvedQuery": "play hbd",
"action": "",
"actionIncomplete": false,
"parameters": {
"any": "hbd"
},
"contexts": [],
"metadata": {
"isFallbackIntent": "false",
"webhookResponseTime": 34,
"intentName": "play",
"intentId": "e60071cd-ce31-4ef9-ae9b-cc370c3362b3",
"webhookUsed": "true",
"webhookForSlotFillingUsed": "false"
},
"fulfillment": {
"messages": []
},
"score": 1
},
"status": {
"code": 200,
"errorType": "success"
},
"sessionId": "e91bd62f-766b-b19d-d37b-2917ac20caa6"
}
FULFILLMENT REQUEST
{
"id": "eaf627ed-26b5-4965-b0b0-bc77144e144b",
"timestamp": "2019-04-15T11:54:18.948Z",
"lang": "en",
"result": {
"source": "agent",
"resolvedQuery": "play hbd",
"speech": "",
"action": "",
"actionIncomplete": false,
"parameters": {
"any": "hbd"
},
"contexts": [],
"metadata": {
"intentId": "e60071cd-ce31-4ef9-ae9b-cc370c3362b3",
"webhookUsed": "true",
"webhookForSlotFillingUsed": "false",
"isFallbackIntent": "false",
"intentName": "play"
},
"fulfillment": {
"speech": "",
"messages": []
},
"score": 1
},
"status": {
"code": 200,
"errorType": "success"
},
"sessionId": "e91bd62f-766b-b19d-d37b-2917ac20caa6"
}
FULFILLMENT RESPONSE
{
"google": {
"richResponse": {
"items": [
{
"simpleResponse": {
"textToSpeech": "Hey! Good to see you."
}
},
{
"mediaResponse": {
"mediaType": "AUDIO",
"mediaObjects": [
{
"name": "Exercises",
"description": "ex",
"largeImage": {
"url": "http://res.freestockphotos.biz/pictures/17/17903-balloons-pv.jpg",
"accessibilityText": "..."
},
"contentUrl": "https://theislam360.me:8080/hbd.mp3"
}
]
}
}
],
"suggestions": [
{
"title": "chips"
}
]
}
}
}
FULFILLMENT STATUS
Webhook execution successful
Firebase Logs
Google Assistant Simulator Logs
You're not using the correct JSON in the response. By putting it in the GUI in the "custom payload" section, it is creating a larger JSON response for you. The google object needs to be under the data object for Dialogflow v1 or payload for Dialogflow v2. (And if you haven't switched to v2 - you should do so immediately, since v1 will be switched off in about a month.)
So what you're returning should look more like
{
"payload": {
"google": {
...
}
}
}

dialogflow webhook not considering payload using fulfillmentText, fulfillmentMessages

FULFILLMENT REQUEST
{
"responseId": "4955f972-058c-44c2-a9c6-fe2c1d846fcd",
"queryResult": {
"queryText": "dsnaf",
"action": "intentNotMatched",
"parameters": {},
"allRequiredParamsPresent": true,
"fulfillmentText": "I think I may have misunderstood your last statement.",
"fulfillmentMessages": [
{
"text": {
"text": [
"I'm afraid I don't understand."
]
}
}
],
"outputContexts": [
{
"name": "****",
"lifespanCount": 1
}
],
"intent": {
"name": "****",
"displayName": "Default Fallback Intent",
"isFallback": true
},
"intentDetectionConfidence": 1,
"languageCode": "en"
},
"originalDetectIntentRequest": {
"payload": {}
},
"session": "****"
}
FULFILLMENT RESPONSE
{
"payload": {
"google": {
"expectUserResponse": true,
"richResponse": {
"items": [
{
"simpleResponse": {
"textToSpeech": "I'm sorry. I didn't quite grasp what you just said."
}
}
]
},
"userStorage": "{\"data\":{}}"
}
},
"outputContexts": [
{
"name": "***",
"lifespanCount": 99,
"parameters": {
"data": "{}"
}
}
]
}
RAW API RESPONSE
{
"responseId": "4955f972-058c-44c2-a9c6-fe2c1d846fcd",
"queryResult": {
"queryText": "dsnaf",
"action": "intentNotMatched",
"parameters": {},
"allRequiredParamsPresent": true,
"fulfillmentMessages": [
{
"text": {
"text": [
"I'm afraid I don't understand."
]
}
}
],
"webhookPayload": {
"google": {
"userStorage": "{\"data\":{}}",
"richResponse": {
"items": [
{
"simpleResponse": {
"textToSpeech": "I'm sorry. I didn't quite grasp what you just said."
}
}
]
},
"expectUserResponse": true
}
},
"outputContexts": [
{
"name": "*****",
"lifespanCount": 99,
"parameters": {
"data": "{}"
}
},
{
"name": "******",
"lifespanCount": 1
}
],
"intent": {
"name": "****",
"displayName": "Default Fallback Intent",
"isFallback": true
},
"intentDetectionConfidence": 1,
"diagnosticInfo": {
"webhook_latency_ms": 286
},
"languageCode": "en"
},
"webhookStatus": {
"message": "Webhook execution successful"
}
}
Google Assistance response
USER SAYS dsnaf
DEFAULT RESPONSE I'm afraid I don't understand.
CONTEXTS
_actions_on_google,initial_chat
INTENT Default Fallback Intent
IN Google Assistance response is default fulfillmentText instead payload google richresponse
Where are you testing this? If you're in the test console, it's always going to show you the simple text response. You'll need to specifically select Google Assistant in the test console to see the rich response for that platform:

Dialogflow v2 API - cards not shown in the simulator

I've a webhook for fulfillment.
Below is the code that's responding back
let result_obj = {
"fulfillmentText": "This is a text response",
"fulfillmentMessages": [
{
"text": {
"text": [
"this is test"
]
}
},
{
"card": {
"title": "card title",
"subtitle": "card text",
"imageUri": "https://assistant.google.com/static/images/molecule/Molecule-Formation-stop.png",
"buttons": [
{
"text": "button text",
"postback": "https://assistant.google.com/"
}
]
}
}
]
}
Below is the result from dialogflow GUI
Below is what I get when I run from the simulator or from the Google Assistant application on the Android phone
Both the simulator and phone are not showing the cards. Am I missing something obvious here?
For rich responses like cards to show on Google Assistant you have to use the payload part of response JSON, here is an example:
{
"fulfillmentText": "This is a text response",
"fulfillmentMessages": [],
"source": "example.com",
"payload": {
"google": {
"expectUserResponse": true,
"richResponse": {
"items": [
{
"simpleResponse": {
"textToSpeech": "This is a Basic Card:"
}
},
{
"basicCard": {
"title": "card title",
"image": {
"url": "https://www.google.com/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png",
"accessibilityText": "Google Logo"
},
"buttons": [
{
"title": "Button Title",
"openUrlAction": {
"url": "https://www.google.com"
}
}
],
"imageDisplayOptions": "WHITE"
}
}
]
}
}
},
"outputContexts": [],
"followupEventInput": {}
}
Check out this github repo for all rich-responses' JSON formats.

How to determine the type of the platform in Dialogflow (API.AI)

How do to identify from which platform the message came?
I want to support different platforms like Telegram and Facebook Messenger, When my webhook receive a message, I want to reply according to the platform the message came form.
For example, if the message came from Telegram I want to return a text message but if the message came from messenger I want to return a card.
You have a property source in originalRequest object, see fulfillment docs here.
{
"lang": "en",
"status": {
"errorType": "success",
"code": 200
},
"timestamp": "2017-02-09T16:06:01.908Z",
"sessionId": "1486656220806",
"result": {
"parameters": {
"city": "Rome",
"name": "Ana"
},
"contexts": [],
"resolvedQuery": "my name is Ana and I live in Rome",
"source": "agent",
"score": 1.0,
"speech": "",
"fulfillment": {
"messages": [
{
"speech": "Hi Ana! Nice to meet you!",
"type": 0
}
],
"speech": "Hi Ana! Nice to meet you!"
},
"actionIncomplete": false,
"action": "greetings",
"metadata": {
"intentId": "9f41ef7c-82fa-42a7-9a30-49a93e2c14d0",
"webhookForSlotFillingUsed": "false",
"intentName": "greetings",
"webhookUsed": "true"
}
},
"id": "ab30d214-f4bb-4cdd-ae36-31caac7a6693",
"originalRequest": {
"source": "google",
"data": {
"inputs": [
{
"raw_inputs": [
{
"query": "my name is Ana and I live in Rome",
"input_type": 2
}
],
"intent": "assistant.intent.action.TEXT",
"arguments": [
{
"text_value": "my name is Ana and I live in Rome",
"raw_text": "my name is Ana and I live in Rome",
"name": "text"
}
]
}
],
"user": {
"user_id": "PuQndWs1OMjUYwVJMYqwJv0/KT8satJHAUQGiGPDQ7A="
},
"conversation": {
"conversation_id": "1486656220806",
"type": 2,
"conversation_token": "[]"
}
}
} }

Resources