0

I'm trying building my first app with actions-on-google / google-assistant-sdk, I wanted to start using 3 intents, the MAIN, respond to input TEXT, and HELP that the user can call anytime:

The action.json is:

{
  "actions": [
    {
      "description": "Default Welcome Intent",
      "name": "MAIN",
      "fulfillment": {
        "conversationName": "conversation_1"
      },
      "intent": {
        "name": "actions.intent.MAIN"
      }
    },

    {
      "description": "Help Intent",
      "name": "Help",
      "fulfillment": {
        "conversationName": "conversation_1"
      },
      "intent": {
        "name": "app.StandardIntents.HELP",
        "trigger": {
           "queryPatterns": [
            "Help",
            "HELP",
            "help"
        ]
      }
    }
    }

  ],
  "conversations": {
    "conversation_1": {
      "name": "conversation_1",
      "url": "https://us-central1-sillytest-16570.cloudfunctions.net/sayNumber",
      "fulfillmentApiVersion": 2      
    }
  }
}

The index.js:

'use strict';

process.env.DEBUG = 'actions-on-google:*';

const ActionsSdkApp = require('actions-on-google').ActionsSdkApp;
const functions = require('firebase-functions');

const NO_INPUTS = [
  'I didn\'t hear that.',
  'If you\'re still there, say that again.',
  'We can stop here. See you soon.'
];

exports.sayNumber = functions.https.onRequest((request, response) => {
  const app = new ActionsSdkApp({request, response});

  function mainIntent (app) {
    console.log('mainIntent');
    let inputPrompt = app.buildInputPrompt(true, '<speak>Hi! <break time="1"/> ' +
      'I can read out an ordinal like ' +
      '<say-as interpret-as="ordinal">123</say-as>. Say a number.</speak>', NO_INPUTS);
    app.ask(inputPrompt);
  }

  function rawInput (app) {
    console.log('rawInput');
    if (app.getRawInput() === 'bye') {
      app.tell('Goodbye!');
    } else {
      let inputPrompt = app.buildInputPrompt(true, '<speak>You said, <say-as interpret-as="ordinal">' +
        app.getRawInput() + '</say-as></speak>', NO_INPUTS);
      app.ask(inputPrompt);
    }
  }

  function helpHandler (app) {
    console.log('rawInput');
    app.ask('<speak>What kind of help do you need?</speak>');
  }

  let actionMap = new Map();
  actionMap.set(app.StandardIntents.MAIN, mainIntent);
  actionMap.set(app.StandardIntents.TEXT, rawInput);
  actionMap.set(app.StandardIntents.HELP, helpHandler);

  app.handleRequest(actionMap);
});

I pushed the firebase as:

firebase deploy --only functions

And pushed the Google Actions as:

gactions update --action_package action.json --project <YOUR_PROJECT_ID>

While testing the assistant here, it started in a good way, and repeat the number that I enter, wait for another number, and so on, but when I enter help it is terminated and not responding!

UPDATE

I tried the below, but did not work:

actionMap.set("app.StandardIntents.HELP", helpHandler);

I should expect the app to "What kind of help do you need?" when I enter/say "Help", but what happened is just re-writing it, same way it do with any other number.

enter image description here

Hasan A Yousef
  • 22,789
  • 24
  • 132
  • 203

2 Answers2

2

Non-built-in Intents are only supported for the first message in a conversation. After that, while you can use them for speech biasing, you will only get a built-in one such as the TEXT Intent.

Prisoner
  • 49,922
  • 7
  • 53
  • 105
1

Your actionMap is looking for app.StandardIntents.HELP but it doesn't exist. You can view all of the standard intents in the GitHub repo.

app.StandardIntents.MAIN returns another string which corresponds to "'actions.intent.MAIN'". It does not read your action.json and generate new intents. Thus, app.StandardIntents.HELP actually returns undefined and is never called.

Your map should use a string for your help intent since it is not available as a constant in the app object.

actionMap.set("app.StandardIntents.HELP", helpHandler);

This should resolve your issue. Let me know if it does not.

Nick Felker
  • 11,536
  • 1
  • 21
  • 35