Skip to main content
PATCH
/
assistants
/
v1
/
assistants
/
{id}
Update Assistant
curl --request PATCH \
  --url https://api-prod.interactly.ai/assistants/v1/assistants/{id} \
  --header 'Authorization: Bearer <token>' \
  --header 'Content-Type: application/json' \
  --data '
{
  "name": "My Assistant",
  "welcomeMessage": "Hello! How can I help you?",
  "welcomeMessageMode": "assistant-speaks-first",
  "welcomeMessageInterruptionsEnabled": false,
  "assistantProvider": "openai",
  "assistantModel": "gpt-4o-mini",
  "assistantLLMUrl": "<string>",
  "assistantSystemPrompt": "You are AI assistant to help patients with their health care needs.",
  "assistantTemperature": 0,
  "assistantMaxTokens": 256,
  "assistantResponseSplitter": ",",
  "config": {
    "speech": {
      "stt": {
        "vendor": "microsoft",
        "languages": "en-US"
      },
      "ttsData": [
        {
          "vendor": "eleven-labs",
          "language": "en-US",
          "voice": "ZeK6O9RfGNGj0cJT2HoJ"
        }
      ]
    }
  },
  "hints": [
    "<string>"
  ],
  "backgroundSound": "enable",
  "backgroundSoundVolume": 50,
  "assistantBackchannelingEnabled": false,
  "dtmfInputEnabled": false,
  "maxCallDuration": 900,
  "idleTimeout": 20,
  "maxIdleMessagesInSequence": 3,
  "startSpeakingOptions": {
    "waitSeconds": 0,
    "smartEndpointing": "Interactly",
    "onPunctuationSeconds": 0,
    "onNoPunctuationSeconds": 0,
    "onNumberSeconds": 0,
    "LiveKitBaseValue": 100,
    "LiveKitScaleValue": 1000
  },
  "stopSpeakingOptions": {
    "numberOfWords": 3,
    "voiceSeconds": 0
  },
  "assistantToolIds": [],
  "assistantPredefinedTools": {
    "knowledgeBase": false,
    "endCall": false,
    "appointment": false,
    "volumeControl": false,
    "waitList": false,
    "callForward": false
  },
  "assistantKnowledgeBaseIds": [],
  "endCallMessage": "Goodbye!",
  "endCallToolDescription": "Trigger the end call only when the user is done with the conversation.",
  "endCallPhrases": [
    "goodbye",
    "bye"
  ],
  "callForwardData": {
    "phoneNumber": "+1234567890",
    "extension": "",
    "name": "call-forward-name"
  },
  "assistantAnalysis": {
    "summary": {
      "enabled": false,
      "prompt": "Generate a summary of the call.",
      "timeoutSeconds": 30
    },
    "successEvaluation": {
      "enabled": false,
      "prompt": "Evaluate the success of the call.",
      "rubric": "NumericScale",
      "timeoutSeconds": 30
    },
    "structuredData": {
      "enabled": false,
      "prompt": "Extract structured data from the call.",
      "timeoutSeconds": 30,
      "schema": {
        "type": "object",
        "properties": {
          "name": {
            "type": "string",
            "description": "This is the name of the user.",
            "example": "John Doe"
          },
          "dob": {
            "type": "string",
            "format": "date",
            "description": "This is the date of birth of the user.",
            "example": "1990-03-08"
          }
        }
      }
    }
  },
  "assistantOverrides": {
    "welcomeMessage": "Hello, how can I help you today?",
    "welcomeMessageMode": "assistant-speaks-first",
    "welcomeMessageInterruptionsEnabled": false,
    "recordingEnabled": true,
    "recordingPath": "/recordings",
    "dynamicVariables": {
      "user_name": "John Doe"
    }
  },
  "assistantServer": {
    "url": "https://api.example.com/v1/getUserDetails",
    "timeoutSeconds": 20,
    "secret": "my-secret",
    "headers": {},
    "enabled": false,
    "messages": [
      "status-update",
      "conversation-update",
      "hang",
      "end-of-call-report"
    ]
  }
}
'
{
  "name": "My Assistant",
  "welcomeMessage": "Hello! How can I help you?",
  "welcomeMessageMode": "assistant-speaks-first",
  "welcomeMessageInterruptionsEnabled": false,
  "assistantProvider": "openai",
  "assistantModel": "gpt-4o-mini",
  "assistantLLMUrl": "<string>",
  "assistantSystemPrompt": "You are AI assistant to help patients with their health care needs.",
  "assistantTemperature": 0,
  "assistantMaxTokens": 256,
  "assistantResponseSplitter": ",",
  "config": {
    "speech": {
      "stt": {
        "vendor": "microsoft",
        "languages": "en-US"
      },
      "ttsData": [
        {
          "vendor": "eleven-labs",
          "language": "en-US",
          "voice": "ZeK6O9RfGNGj0cJT2HoJ"
        }
      ]
    }
  },
  "hints": [
    "<string>"
  ],
  "backgroundSound": "enable",
  "backgroundSoundVolume": 50,
  "assistantBackchannelingEnabled": false,
  "dtmfInputEnabled": false,
  "maxCallDuration": 900,
  "idleTimeout": 20,
  "maxIdleMessagesInSequence": 3,
  "startSpeakingOptions": {
    "waitSeconds": 0,
    "smartEndpointing": "Interactly",
    "onPunctuationSeconds": 0,
    "onNoPunctuationSeconds": 0,
    "onNumberSeconds": 0,
    "LiveKitBaseValue": 100,
    "LiveKitScaleValue": 1000
  },
  "stopSpeakingOptions": {
    "numberOfWords": 3,
    "voiceSeconds": 0
  },
  "assistantToolIds": [],
  "assistantPredefinedTools": {
    "knowledgeBase": false,
    "endCall": false,
    "appointment": false,
    "volumeControl": false,
    "waitList": false,
    "callForward": false
  },
  "assistantKnowledgeBaseIds": [],
  "endCallMessage": "Goodbye!",
  "endCallToolDescription": "Trigger the end call only when the user is done with the conversation.",
  "endCallPhrases": [
    "goodbye",
    "bye"
  ],
  "callForwardData": {
    "phoneNumber": "+1234567890",
    "extension": "",
    "name": "call-forward-name"
  },
  "assistantAnalysis": {
    "summary": {
      "enabled": false,
      "prompt": "Generate a summary of the call.",
      "timeoutSeconds": 30
    },
    "successEvaluation": {
      "enabled": false,
      "prompt": "Evaluate the success of the call.",
      "rubric": "NumericScale",
      "timeoutSeconds": 30
    },
    "structuredData": {
      "enabled": false,
      "prompt": "Extract structured data from the call.",
      "timeoutSeconds": 30,
      "schema": {
        "type": "object",
        "properties": {
          "name": {
            "type": "string",
            "description": "This is the name of the user.",
            "example": "John Doe"
          },
          "dob": {
            "type": "string",
            "format": "date",
            "description": "This is the date of birth of the user.",
            "example": "1990-03-08"
          }
        }
      }
    }
  },
  "assistantOverrides": {
    "welcomeMessage": "Hello, how can I help you today?",
    "welcomeMessageMode": "assistant-speaks-first",
    "welcomeMessageInterruptionsEnabled": false,
    "recordingEnabled": true,
    "recordingPath": "/recordings",
    "dynamicVariables": {
      "user_name": "John Doe"
    }
  },
  "assistantServer": {
    "url": "https://api.example.com/v1/getUserDetails",
    "timeoutSeconds": 20,
    "secret": "my-secret",
    "headers": {},
    "enabled": false,
    "messages": [
      "status-update",
      "conversation-update",
      "hang",
      "end-of-call-report"
    ]
  },
  "_id": "5f7b1b1b1b1b1b1b1b1b1b1b",
  "teamId": "1f7b1b1b1b1b1b1b1b1b1b1b",
  "createdAt": "2020-10-05T00:00:00.000Z",
  "updatedAt": "2020-10-05T00:00:00.000Z"
}

Authorizations

Authorization
string
header
required

Retrieve your API Key from Dashboard API Keys Section.

Path Parameters

id
string
required

Assistant ID

Body

application/json
name
string
required

This is the name of the assistant.

Example:

"My Assistant"

welcomeMessage
string
default:hi

This is the welcome message of the assistant.

Example:

"Hello! How can I help you?"

welcomeMessageMode
string
default:automatic

This is the mode of the welcome message. It can be one of the following: 'assistant-speaks-first', 'assistant-waits-for-user', 'automatic'.

Example:

"assistant-speaks-first"

welcomeMessageInterruptionsEnabled
boolean
default:false

This is a boolean that controls whether the interruptions are enabled during the welcome message. If set to false, the user can not interrupt the welcome message.

Example:

false

assistantProvider
enum<string>
default:azure

This is the provider of the assistant.

Available options:
openai,
azure,
gemini,
deepseek,
bedrock,
custom-llm
Example:

"openai"

assistantModel
string
default:gpt-4o-mini

The type of model used for the assistant depends on the provider.

For openai - Available Options: gpt-4, gpt-4o, gpt-4o-mini, gpt-3.5-turbo.

For azure - Available Options: gpt-4, gpt-4o, gpt-4o-mini.

For gemini - Available Options: gemini-1.5-flash-latest,gemini-1.5-pro-latest,gemini-1.5-flash.

For deepseek - Available Options: V3.

For bedrock - Available Options: anthropic.claude-3-5-sonnet, anthropic.claude-3-5-haiku, meta.llama3-1-8b-instruct.

Example:

"gpt-4o-mini"

assistantLLMUrl
string

Provider your LLM Base URL here when assistantProvider=custom-llm.

Ex: https://your-server.com/custom-llm.

Please note that, we will append /chat/completions to your base URL before calling LLM endpoints. The LLM URL should be accessible from the Interactly server. If you are using a local server, you can use ngrok to expose your local server to the internet.

assistantSystemPrompt
string

This system prompt guides the assistant's operations.

Example:

"You are AI assistant to help patients with their health care needs."

assistantTemperature
number
default:0

This is the temperature of the assistant.

Example:

0

assistantMaxTokens
integer
default:256

This is the maximum number of tokens that the assistant can generate.

Example:

256

assistantResponseSplitter
string

Use this Delimiter to split the AI responses into separate lines.

Example:

","

config
object

This is the stt and tts configuration of the assistant. You can add one stt and multiple tts configurations.

hints
string[]

Provide keyword hints to help the assistant better recognize and transcribe important words or phrases.

backgroundSound

This is the background sound in the call. Default is 'disable'. You can also provide a custom sound by providing a URL to an audio file. URL should be publicly accessible and should starts with https:// only.

Note: Currently custom sound is not supported in twilio phone vendor.

Available options:
enable,
disable
Maximum string length: 1000
Example:

"enable"

backgroundSoundVolume
number
default:50

This is the volume of the background sound. It is a number between 1 and 100.

Example:

50

assistantBackchannelingEnabled
boolean

This is the backchanneling enabled of the assistant.

Example:

false

dtmfInputEnabled
boolean

This is a boolean that controls whether the DTMF input is enabled for the assistant.

Example:

false

maxCallDuration
integer

This is the max call duration(in minutes) of the assistant.

Required range: 1 <= x <= 120
Example:

900

idleTimeout
integer
default:30

How long should the assistant wait in silence before confirming the user's presence and playing an idle message?

Required range: 5 <= x <= 600
Example:

20

maxIdleMessagesInSequence
integer

Maximum number of times to repeat the idle message in sequence

Required range: 1 <= x <= 5
Example:

3

startSpeakingOptions
object

Configuration for when the assistant should start talking.

stopSpeakingOptions
object

Configuration for detecting when when assistant should stop talking on customer interruption.

assistantToolIds
string[]

This is the list of tool IDs of the assistant.

Example:
[]
assistantPredefinedTools
object

Enable or disable specific tools that the assistant can use to improve its functionality.

assistantKnowledgeBaseIds
string[]

This is the list of knowledge base IDs of the assistant. Provide only when assistantPredefinedTools.knowledgeBase is enabled

Example:
[]
endCallMessage
string

This is the message that the assistant will say if it ends the call. Provide only when assistantPredefinedTools.endCall is enabled

Example:

"Goodbye!"

endCallToolDescription
string

This is the description of the tool that the assistant will use to end the call. Provide only when assistantPredefinedTools.endCall is enabled

Example:

"Trigger the end call only when the user is done with the conversation."

endCallPhrases
string[]

List of phrases that the assistant will listen to end the call. Provide only when assistantPredefinedTools.endCall is enabled

Example:
["goodbye", "bye"]
callForwardData
object[]

This is the call-forwarding data of the assistant.

Example:
{
"phoneNumber": "+1234567890",
"extension": "",
"name": "call-forward-name"
}
assistantAnalysis
object
assistantOverrides
object

This is where you can override the default behavior of the assistant.

assistantServer
object

This is where Interactly will send webhooks. You can find all webhooks available along with their shape in ServerMessage schema.

Response

200 - application/json

Successful response

name
string

This is the name of the assistant.

Example:

"My Assistant"

welcomeMessage
string
default:hi

This is the welcome message of the assistant.

Example:

"Hello! How can I help you?"

welcomeMessageMode
string
default:automatic

This is the mode of the welcome message. It can be one of the following: 'assistant-speaks-first', 'assistant-waits-for-user', 'automatic'.

Example:

"assistant-speaks-first"

welcomeMessageInterruptionsEnabled
boolean
default:false

This is a boolean that controls whether the interruptions are enabled during the welcome message. If set to false, the user can not interrupt the welcome message.

Example:

false

assistantProvider
enum<string>
default:azure

This is the provider of the assistant.

Available options:
openai,
azure,
gemini,
deepseek,
bedrock,
custom-llm
Example:

"openai"

assistantModel
string
default:gpt-4o-mini

The type of model used for the assistant depends on the provider.

For openai - Available Options: gpt-4, gpt-4o, gpt-4o-mini, gpt-3.5-turbo.

For azure - Available Options: gpt-4, gpt-4o, gpt-4o-mini.

For gemini - Available Options: gemini-1.5-flash-latest,gemini-1.5-pro-latest,gemini-1.5-flash.

For deepseek - Available Options: V3.

For bedrock - Available Options: anthropic.claude-3-5-sonnet, anthropic.claude-3-5-haiku, meta.llama3-1-8b-instruct.

Example:

"gpt-4o-mini"

assistantLLMUrl
string

Provider your LLM Base URL here when assistantProvider=custom-llm.

Ex: https://your-server.com/custom-llm.

Please note that, we will append /chat/completions to your base URL before calling LLM endpoints. The LLM URL should be accessible from the Interactly server. If you are using a local server, you can use ngrok to expose your local server to the internet.

assistantSystemPrompt
string

This system prompt guides the assistant's operations.

Example:

"You are AI assistant to help patients with their health care needs."

assistantTemperature
number
default:0

This is the temperature of the assistant.

Example:

0

assistantMaxTokens
integer
default:256

This is the maximum number of tokens that the assistant can generate.

Example:

256

assistantResponseSplitter
string

Use this Delimiter to split the AI responses into separate lines.

Example:

","

config
object

This is the stt and tts configuration of the assistant. You can add one stt and multiple tts configurations.

hints
string[]

Provide keyword hints to help the assistant better recognize and transcribe important words or phrases.

backgroundSound

This is the background sound in the call. Default is 'disable'. You can also provide a custom sound by providing a URL to an audio file. URL should be publicly accessible and should starts with https:// only.

Note: Currently custom sound is not supported in twilio phone vendor.

Available options:
enable,
disable
Maximum string length: 1000
Example:

"enable"

backgroundSoundVolume
number
default:50

This is the volume of the background sound. It is a number between 1 and 100.

Example:

50

assistantBackchannelingEnabled
boolean

This is the backchanneling enabled of the assistant.

Example:

false

dtmfInputEnabled
boolean

This is a boolean that controls whether the DTMF input is enabled for the assistant.

Example:

false

maxCallDuration
integer

This is the max call duration(in minutes) of the assistant.

Required range: 1 <= x <= 120
Example:

900

idleTimeout
integer
default:30

How long should the assistant wait in silence before confirming the user's presence and playing an idle message?

Required range: 5 <= x <= 600
Example:

20

maxIdleMessagesInSequence
integer

Maximum number of times to repeat the idle message in sequence

Required range: 1 <= x <= 5
Example:

3

startSpeakingOptions
object

Configuration for when the assistant should start talking.

stopSpeakingOptions
object

Configuration for detecting when when assistant should stop talking on customer interruption.

assistantToolIds
string[]

This is the list of tool IDs of the assistant.

Example:
[]
assistantPredefinedTools
object

Enable or disable specific tools that the assistant can use to improve its functionality.

assistantKnowledgeBaseIds
string[]

This is the list of knowledge base IDs of the assistant. Provide only when assistantPredefinedTools.knowledgeBase is enabled

Example:
[]
endCallMessage
string

This is the message that the assistant will say if it ends the call. Provide only when assistantPredefinedTools.endCall is enabled

Example:

"Goodbye!"

endCallToolDescription
string

This is the description of the tool that the assistant will use to end the call. Provide only when assistantPredefinedTools.endCall is enabled

Example:

"Trigger the end call only when the user is done with the conversation."

endCallPhrases
string[]

List of phrases that the assistant will listen to end the call. Provide only when assistantPredefinedTools.endCall is enabled

Example:
["goodbye", "bye"]
callForwardData
object[]

This is the call-forwarding data of the assistant.

Example:
{
"phoneNumber": "+1234567890",
"extension": "",
"name": "call-forward-name"
}
assistantAnalysis
object
assistantOverrides
object

This is where you can override the default behavior of the assistant.

assistantServer
object

This is where Interactly will send webhooks. You can find all webhooks available along with their shape in ServerMessage schema.

_id
string

This is the unique identifier of the assistant.

Example:

"5f7b1b1b1b1b1b1b1b1b1b1b"

teamId
string

This is the unique identifier of the team that the assistant belongs to.

Example:

"1f7b1b1b1b1b1b1b1b1b1b1b"

createdAt
string<date-time>

This is the ISO 8601 date-time string of when the record was created.

Example:

"2020-10-05T00:00:00.000Z"

updatedAt
string<date-time>

This is the ISO 8601 date-time string of when the record was last updated.

Example:

"2020-10-05T00:00:00.000Z"