Integrate the Unity 3D Client

Version 2.0.0 - Created and tested with Unity 3D 2018.1.3f1 - ChangeLog

Installation

Download the .unitypackage and drag it into your Unity3D scene. Import at least the Cognigy and the Plugins folder and all their subfolders. Or you can just open the Unity Project located in the src folder of the repository.

๐Ÿ“˜

API Compatability Level

You need to set the API Compatability Level to .NET 2.0 (Edit/Project Settings/Player/Other Settings).

Requirements

Credentials


If you want to use the COGNIGY.AI service within your project you need to have a valid Login for the COGNIGY.AI platform. For the different speech to text and text to speech services you'll need to accquire the necessary credentials from the given provider.

Socket Endpoint


Within COGNIGY.AI you have to create and configure a Socket Endpoint. You can find instructions how to create a Socket Endpoint in our Documentation. After creating a Socket Endpoint and assigning a Flow to the Endpoint (see Documentation), you have to get the following two strings from the Endpoint configuration page (see Figure 1):

  • Endpoint URL
  • URL Token
Figure 1: COGNIGY.AI Socket Endpoint ConfigurationFigure 1: COGNIGY.AI Socket Endpoint Configuration

Figure 1: COGNIGY.AI Socket Endpoint Configuration

These informations are used within the CognigyUnity3DClient for connecting to the specified Socket Endpoint.

Service Setups

Cognigy AI


To use the Cognigy AI service you'll have to attach the CognigyAI component (Component/Cognigy/CognigyAI) to a gameObject. There a two possible ways to achieve that:

  1. Click on Window/Cognigy/COGNIGY.AI and you'll see an editor window where you can fill all the necessary and optional fields for the Cognigy AI service (see Figure 2). Next you have the option to just create the AI options asset or to directly attach the Cognigy AI component to a (in the scene) selected gameObject which places the just filled AI options on the component.
Figure 2: Socket Endpoint Options Window in the Unity 3D EditorFigure 2: Socket Endpoint Options Window in the Unity 3D Editor

Figure 2: Socket Endpoint Options Window in the Unity 3D Editor

  1. Do a right-click in the project view and choose Create/Cognigy/Cognigy Socket Options to create the socket options (see Figure 3). Now you are able to fill the necessary and optional fields in the inspector view. When you've finished filling the fields you can place the CognigyAI component on the gameObject of your choice. Next drag the Socket Endpoint Options asset onto the empty Socket Endpoint Options field on the Cognigy AI component.
Figure 3: Socket Endpoint Options Context Menu in the Unity 3D EditorFigure 3: Socket Endpoint Options Context Menu in the Unity 3D Editor

Figure 3: Socket Endpoint Options Context Menu in the Unity 3D Editor

Now the CognigyAI component is all set up and ready to use.

Speech To Text


If you want to use one of the speech to text services you'll have to attach the Speech To Text component (Component/Speech To Text) to a gameObject. There a two possible ways to achieve that:

  1. Click on Window/Cognigy/Speech To Text and you'll see an editor window where you can select the speech to text service provider of your choice (see Figure 4). Select a service provider and fill all the given fields. Next you have the option to just create the options asset or to directly attach the Speech To Text component to a (in the scene) selected gameObject which places the just filled options on the component.
Figure 4: Speech To Text Options Window in the Unity 3D EditorFigure 4: Speech To Text Options Window in the Unity 3D Editor

Figure 4: Speech To Text Options Window in the Unity 3D Editor

  1. Do a right-click in the project view and choose Create/Speech To Text/. Within this menu you have the choice to create an options asset for different providers. Select one of the service provider options and then you are able to fill the necessary fields in the inspector view. When you've finished filling the fields you can place the Speech To Text component on the gameObject of your choice. Next drag the just created options asset onto the empty Speech To Text Options field on the Speech To Text component.

Text to Speech


The setup for the text to speech services works similiar to the setup workflow for the speech to text services but with the text to speech menus.

Usage

Cognigy AI


using  Cognigy;
using  Newtonsoft.Json.Linq;
using  UnityEngine;

public  class  CharacterHandler : MonoBehaviour
{
  private  CognigyAI cognigyAI;
  private  bool initDone;

  private  void Awake()
  {
    cognigyAI = GetComponent<CognigyAI>();
    cognigyAI.ConnectAIClient(); // Connects the AI client with the server
    cognigyAI.OnOutput += OnOutput; // provides the reponse from the Cognigy AI
  }

  private  void Update()
  {
    if(cognigyAI.HasAI && !initDone)
    {
      initDone = true; // Now we're able to send and receive message from the Cognigy AI

      UnityDataForCognigyAI unityData = new  UnityDataForCognigyAI
      {
        Position = gameObject.transform.position
      };

      cognigyAI.AISendMessage("Hi"); // We can send simple text
      cognigyAI.AISendMessage(unityData); // Or data to the Cognigy AI
    }
  }

  private  void OnOutput(object sender, OutputEventArgs args)
  {
    Debug.Log("OUTPUT:\n" + args.FlowOutput.text); // text from the Flow Output

    if(args.Output.data != null)
      Debug.Log("DATA:\n" + args.Output.data.ToString()); // data from the Flow Output
  }
}

Speech To Text


using  UnityEngine;

public  class  SpeechToTextHandler : MonoBehaviour
{
  private  SpeechToText speechToText; // Speech to Text component
  private  bool streaming;

  private  void Awake()
  {
    speechToText = GetComponent<SpeechToText>();
  }

  private  void Start()
  {
    speechToText.STTResult += OnSTTResult; // provides STT result
    speechToText.ProcessAudioToText(microphoneInput); // Takes a speech as AudioClip (non streaming service)
  }

  private  void Update()
  {
    if(Input.GetKeyDown(KeyCode.R))
    {
      if(streaming)
      {
        Debug.Log("Streaming STT Off");
        speechToText.DisableSpeechToText(); // Disables the speech to text service (streaming service)
        streaming = false;
      } else
      {
        Debug.Log("Streaming STT On");
        speechToText.EnableSpeechToText(); // Enables the speech to text service (streaming service)
        streaming = true;
      }
    }
  }

  private  void OnSTTResult(object sender, SpeechToTextResultEventArgs args)
  {
    Debug.Log(args.STTResult); // STT result as a string
  }
}

Text to Speech


using  UnityEngine;

public  class  TextToSpeechHandler : MonoBehaviour
{
  private  TextToSpeech textToSpeech; // Text to Speech component
  private  AudioSource audioSource; // AudioSource to play the received TTS clip

  private  void Awake()
  {
    textToSpeech = GetComponent<TextToSpeech>();
    audioSource = GetComponent<AudioSource>();
  }

  private  void Start()
  {
    textToSpeech.TTSResult += OnTTSResult; // Provides the result from the TTS service
    textToSpeech.ProcessTextToAudio("Hi"); 
  }

  private  void OnTTSResult(object sender, TextToSpeechResultEventArgs args)
  {
    audioSource.PlayOneShot(args.TTSResult);
  }

  private  void OnDisable()
  {
    textToSpeech.TTSResult -= OnTTSResult;
  }
}

Did this page help you?