Skip to main content

Usage Example

This comprehensive guide covers all aspects of using the UG Labs Unity SDK, from initialization to advanced conversation management.

Complete Implementation

Initialize the SDK

Initialize the SDK once at application startup:

using UnityEngine;
using UGSDK;

public class UGManager : MonoBehaviour
{
[SerializeField] private string accessToken;

private void Awake()
{
DontDestroyOnLoad(gameObject);
}

private async void Start()
{
try
{
await UGSDK.Instance.Initialize(accessToken);
Debug.Log("UG SDK initialized successfully");
}
catch (System.Exception e)
{
Debug.LogError($"Failed to initialize SDK: {e.Message}");
}
}
}

Create Conversation Configuration

Configure your conversation with a prompt and optional utilities:

using UGSDK;

public class ConversationSetup
{
public static ConversationConfiguration GetBasicConfig()
{
return new ConversationConfiguration
{
Prompt = "You are a friendly NPC in a fantasy RPG. You run a shop that sells potions and magical items.",
Utilities = null
};
}

public static ConversationConfiguration GetAdvancedConfig()
{
return new ConversationConfiguration
{
Prompt = "You are a quest giver in an RPG. Analyze player intent and provide quests.",
Utilities = new Dictionary<string, Utility>
{
["player_intent"] = new Utility
{
Type = "classify",
ClassificationQuestion = "What does the player want to do?",
Answers = new[] { "get_quest", "ask_question", "turn_in_quest", "shop" }
},
["quest_difficulty"] = new Utility
{
Type = "extract",
ExtractPrompt = "Determine the difficulty level for this quest (easy, medium, hard)"
}
}
};
}
}

Start a Conversation

Start a conversation and subscribe to events:

using UnityEngine;
using UGSDK;

public class NPCConversation : MonoBehaviour
{
private Conversation conversation;
private bool isInitialized = false;

public async void StartConversation()
{
if (isInitialized) return;

try
{
// Get configuration
var config = ConversationSetup.GetBasicConfig();

// Start conversation
conversation = await UGSDK.Instance.StartConversation(config);

// Subscribe to events
SubscribeToEvents();

isInitialized = true;
Debug.Log("Conversation started");
}
catch (System.Exception e)
{
Debug.LogError($"Failed to start conversation: {e.Message}");
}
}

private void SubscribeToEvents()
{
conversation.OnInteractionStarted += OnInteractionStarted;
conversation.OnTextReceived += OnTextReceived;
conversation.OnAudioReceived += OnAudioReceived;
conversation.OnUtilityResult += OnUtilityResult;
conversation.OnInteractionComplete += OnInteractionComplete;
conversation.OnError += OnError;
}

private void OnInteractionStarted()
{
Debug.Log("Interaction started");
// Show loading indicator, disable input, etc.
}

private void OnTextReceived(string text)
{
Debug.Log($"NPC says: {text}");
// Update UI with text
}

private void OnAudioReceived(byte[] audioData)
{
Debug.Log($"Received {audioData.Length} bytes of audio");
// Audio playback is handled automatically by the SDK
}

private void OnUtilityResult(string utilityName, object result)
{
Debug.Log($"Utility '{utilityName}' result: {result}");
// Handle utility results (classifications, extractions, etc.)
}

private void OnInteractionComplete()
{
Debug.Log("Interaction complete");
// Hide loading indicator, enable input, etc.
}

private void OnError(string error)
{
Debug.LogError($"Conversation error: {error}");
// Show error to user
}

private void OnDestroy()
{
if (conversation != null)
{
conversation.Dispose();
}
}
}

Send Text Messages

Send text messages to the AI:

public class ChatInput : MonoBehaviour
{
[SerializeField] private InputField inputField;
[SerializeField] private Button sendButton;
private Conversation conversation;

private void Start()
{
sendButton.onClick.AddListener(SendMessage);
inputField.onEndEdit.AddListener(OnEndEdit);
}

private void OnEndEdit(string text)
{
if (Input.GetKeyDown(KeyCode.Return) || Input.GetKeyDown(KeyCode.KeypadEnter))
{
SendMessage();
}
}

private async void SendMessage()
{
string message = inputField.text.Trim();
if (string.IsNullOrEmpty(message)) return;

try
{
// Clear input immediately
inputField.text = "";

// Disable input while processing
SetInputEnabled(false);

// Send message
await conversation.SendMessage(message);
}
catch (System.Exception e)
{
Debug.LogError($"Failed to send message: {e.Message}");
SetInputEnabled(true);
}
}

private void SetInputEnabled(bool enabled)
{
inputField.interactable = enabled;
sendButton.interactable = enabled;
}

public void SetConversation(Conversation conv)
{
conversation = conv;
conv.OnInteractionComplete += () => SetInputEnabled(true);
}
}

Voice Recording

Record and send audio input:

using UnityEngine;
using UnityEngine.UI;
using UGSDK;

public class VoiceInput : MonoBehaviour
{
[SerializeField] private Button recordButton;
[SerializeField] private Image recordIndicator;
[SerializeField] private Text recordingTime;

private Conversation conversation;
private bool isRecording = false;
private float recordingStartTime;

private void Start()
{
recordButton.onClick.AddListener(ToggleRecording);
recordIndicator.enabled = false;
}

private void Update()
{
if (isRecording)
{
float elapsed = Time.time - recordingStartTime;
recordingTime.text = $"{elapsed:F1}s";

// Pulse animation
float alpha = Mathf.PingPong(Time.time * 2, 1);
recordIndicator.color = new Color(1, 0, 0, alpha);
}
}

private async void ToggleRecording()
{
if (!isRecording)
{
await StartRecording();
}
else
{
await StopRecording();
}
}

private async System.Threading.Tasks.Task StartRecording()
{
try
{
await conversation.StartRecording();

isRecording = true;
recordingStartTime = Time.time;
recordIndicator.enabled = true;
recordButton.GetComponentInChildren<Text>().text = "Stop";

Debug.Log("Recording started");
}
catch (System.Exception e)
{
Debug.LogError($"Failed to start recording: {e.Message}");
}
}

private async System.Threading.Tasks.Task StopRecording()
{
try
{
await conversation.StopRecording();

isRecording = false;
recordIndicator.enabled = false;
recordingTime.text = "0.0s";
recordButton.GetComponentInChildren<Text>().text = "Record";

Debug.Log("Recording stopped");
}
catch (System.Exception e)
{
Debug.LogError($"Failed to stop recording: {e.Message}");
}
}

public void SetConversation(Conversation conv)
{
conversation = conv;
}
}

Handle Utility Results

Process utility execution results:

using UnityEngine;
using UGSDK;

public class UtilityHandler : MonoBehaviour
{
private void OnUtilityResult(string utilityName, object result)
{
switch (utilityName)
{
case "player_intent":
HandlePlayerIntent(result as string);
break;

case "quest_difficulty":
HandleQuestDifficulty(result as string);
break;

case "sentiment":
HandleSentiment(result as string);
break;
}
}

private void HandlePlayerIntent(string intent)
{
Debug.Log($"Player intent: {intent}");

switch (intent)
{
case "get_quest":
// Show quest UI
break;
case "shop":
// Open shop interface
break;
case "ask_question":
// Continue conversation
break;
}
}

private void HandleQuestDifficulty(string difficulty)
{
Debug.Log($"Quest difficulty: {difficulty}");

// Adjust rewards based on difficulty
switch (difficulty)
{
case "easy":
// Set low-tier rewards
break;
case "medium":
// Set mid-tier rewards
break;
case "hard":
// Set high-tier rewards
break;
}
}

private void HandleSentiment(string sentiment)
{
Debug.Log($"Player sentiment: {sentiment}");

// Adjust NPC reactions
switch (sentiment)
{
case "positive":
// Friendly response
break;
case "negative":
// Concerned response
break;
case "neutral":
// Standard response
break;
}
}
}

Complete Example: NPC Dialog System

Here's a complete example tying everything together:

using UnityEngine;
using UnityEngine.UI;
using UGSDK;
using System.Collections.Generic;

public class NPCDialogSystem : MonoBehaviour
{
[Header("UI References")]
[SerializeField] private Text npcNameText;
[SerializeField] private Text dialogText;
[SerializeField] private InputField playerInput;
[SerializeField] private Button sendButton;
[SerializeField] private Button recordButton;
[SerializeField] private Image recordIndicator;
[SerializeField] private GameObject loadingIndicator;

[Header("NPC Configuration")]
[SerializeField] private string npcName = "Merchant";
[SerializeField] private string npcPrompt = "You are a friendly merchant in a fantasy RPG.";

private Conversation conversation;
private bool isRecording = false;
private Queue<string> textQueue = new Queue<string>();
private bool isDisplayingText = false;

private async void Start()
{
// Initialize UI
npcNameText.text = npcName;
dialogText.text = "";
loadingIndicator.SetActive(false);
recordIndicator.enabled = false;

// Setup button listeners
sendButton.onClick.AddListener(SendPlayerMessage);
recordButton.onClick.AddListener(ToggleRecording);

// Initialize conversation
await InitializeConversation();
}

private async System.Threading.Tasks.Task InitializeConversation()
{
try
{
var config = new ConversationConfiguration
{
Prompt = npcPrompt,
Utilities = null
};

conversation = await UGSDK.Instance.StartConversation(config);

// Subscribe to events
conversation.OnInteractionStarted += OnInteractionStarted;
conversation.OnTextReceived += OnTextReceived;
conversation.OnInteractionComplete += OnInteractionComplete;
conversation.OnError += OnError;

Debug.Log($"{npcName} is ready to talk!");
}
catch (System.Exception e)
{
Debug.LogError($"Failed to initialize conversation: {e.Message}");
}
}

private async void SendPlayerMessage()
{
string message = playerInput.text.Trim();
if (string.IsNullOrEmpty(message)) return;

playerInput.text = "";
SetInputEnabled(false);

try
{
await conversation.SendMessage(message);
}
catch (System.Exception e)
{
Debug.LogError($"Failed to send message: {e.Message}");
SetInputEnabled(true);
}
}

private async void ToggleRecording()
{
if (!isRecording)
{
await conversation.StartRecording();
isRecording = true;
recordIndicator.enabled = true;
recordButton.GetComponentInChildren<Text>().text = "Stop";
}
else
{
await conversation.StopRecording();
isRecording = false;
recordIndicator.enabled = false;
recordButton.GetComponentInChildren<Text>().text = "Record";
}
}

private void OnInteractionStarted()
{
loadingIndicator.SetActive(true);
dialogText.text = "";
}

private void OnTextReceived(string text)
{
textQueue.Enqueue(text);
if (!isDisplayingText)
{
StartCoroutine(DisplayTextCoroutine());
}
}

private System.Collections.IEnumerator DisplayTextCoroutine()
{
isDisplayingText = true;

while (textQueue.Count > 0)
{
string text = textQueue.Dequeue();
dialogText.text += text;
yield return new WaitForSeconds(0.05f);
}

isDisplayingText = false;
}

private void OnInteractionComplete()
{
loadingIndicator.SetActive(false);
SetInputEnabled(true);
}

private void OnError(string error)
{
Debug.LogError($"Error: {error}");
dialogText.text = "Sorry, I didn't catch that. Could you try again?";
SetInputEnabled(true);
}

private void SetInputEnabled(bool enabled)
{
playerInput.interactable = enabled;
sendButton.interactable = enabled;
recordButton.interactable = enabled;
}

private void OnDestroy()
{
conversation?.Dispose();
}
}

Context Variables

Pass dynamic context to utilities:

var contextVars = new Dictionary<string, string>
{
["player_level"] = "15",
["player_class"] = "warrior",
["current_location"] = "marketplace"
};

await conversation.SendMessage("What quests do you have?", contextVars);

Use in utility prompts with {{variable_name}}:

new Utility
{
Type = "extract",
ExtractPrompt = "Generate a quest suitable for a level {{player_level}} {{player_class}} in the {{current_location}}"
}

You can also use ReturnType for structured JSON output from extraction utilities:

new Utility
{
Type = "extract",
ExtractPrompt = "Extract the player's name and level from: {{user_input}}",
ReturnType = new ReturnTypeSpec
{
Format = "json",
JsonSchema = new Dictionary<string, object>
{
["type"] = "object",
["properties"] = new Dictionary<string, object>
{
["name"] = new Dictionary<string, object> { ["type"] = "string" },
["level"] = new Dictionary<string, object> { ["type"] = "integer" }
},
["required"] = new[] { "name", "level" }
}
}
}

Conversation Lifecycle

The SDK provides three methods for managing conversation state, each with a different scope of what gets cleared:

MethodClearsKeeps
PauseConversation()audio, voice, captionscontext, config, connection
StopConversation()context, stateconnection, config
ClearConversation()everything (state, config, context, connection)nothing
  • PauseConversation() — Use when you want to temporarily silence output (e.g., a cutscene or menu) but keep the conversation context intact so you can resume seamlessly.
  • StopConversation() — Use when the current conversation is done but you plan to start a new one on the same connection (e.g., the player moves to a different NPC).
  • ClearConversation() — Use for a full teardown, such as when leaving a scene or shutting down. You'll need to re-initialize everything to start again.
// Pause during a cutscene, then resume
conversation.PauseConversation();
// ... cutscene plays ...
// Resume — context is still there

// End the current conversation, start fresh on same connection
conversation.StopConversation();

// Full cleanup — tears down everything
conversation.ClearConversation();

Microphone Mute / Unmute

To mute and unmute the microphone, control audio streaming independently from the conversation:

  • Mute: Stop sending audio and call ClearAudio() to discard the server-side buffer.
  • Unmute: Resume sending audio chunks. Do not call StartConversation() — that triggers the AI to respond and the avatar to speak.
// Mute the microphone
conversation.ClearAudio();

// Unmute — just resume audio capture, no StartConversation()
// The SDK will resume sending add_audio chunks automatically

// The AI only responds when the user finishes speaking
// and an interact request is sent
warning

Calling StartConversation() to unmute is a common mistake. It creates a new interaction, which makes the avatar speak. Unmuting should only resume audio capture.

This is distinct from PauseConversation(), which pauses AI output (audio playback and text streaming) while keeping context intact. Mic muting controls user input only.

Best Practices

1. Error Handling

Always wrap SDK calls in try-catch:

try
{
await conversation.SendMessage(text);
}
catch (System.Exception e)
{
Debug.LogError($"Error: {e.Message}");
// Show user-friendly error message
}

2. Resource Cleanup

Dispose conversations when done:

private void OnDestroy()
{
conversation?.Dispose();
}

3. Input Validation

Validate user input before sending:

private bool IsValidMessage(string message)
{
return !string.IsNullOrWhiteSpace(message) &&
message.Length > 0 &&
message.Length < 1000;
}

4. Microphone Permissions

Check permissions before recording:

#if UNITY_IOS || UNITY_ANDROID
private async System.Threading.Tasks.Task<bool> CheckMicrophonePermission()
{
if (!Application.HasUserAuthorization(UserAuthorization.Microphone))
{
bool granted = await Application.RequestUserAuthorization(UserAuthorization.Microphone);
return granted;
}
return true;
}
#endif

5. Singleton Pattern for SDK

Keep one SDK instance across scenes:

public class UGManager : MonoBehaviour
{
private static UGManager instance;

private void Awake()
{
if (instance == null)
{
instance = this;
DontDestroyOnLoad(gameObject);
}
else
{
Destroy(gameObject);
}
}
}

Next Steps