Send Question to Perplexity

Template
Function
20
Template
Function
by
Nicolas Arcay Bermejo

Send a question to Perplexity's AI models. This supports pplx-7b-online and pplx-70b-online.

Created:

Heading

Voiceflow APIs used:

Channels
No items found.
Created By
Nicolas Arcay Bermejo
This is some text inside of a div block.

Function Code Snippet


export default async function main(args) {
const { inputVars } = args;
let { prompt, model, apiKey, maxTokens } = inputVars;

// Remove extra spaces
prompt = prompt.trim();
model = model.trim();
apiKey = apiKey.trim();
maxTokens = maxTokens.trim();

// Get a proper value for max tokens
if (maxTokens === undefined || isNaN(parseFloat(maxTokens))) {
maxTokens = 200;
} else {
maxTokens = Math.round(maxTokens)
}

try{
// Get model value or default to '7b'
if( !model === undefined || model.toLowerCase() != '70b' ) { model = '7b' }

// Check for Api Key
if( !apiKey || !apiKey == undefined ) {
return {
outputVars: {
error: `Please provide your Perplexity API key`
},
next: {
path: 'error'
},
trace: [
{
type: 'debug',
payload: {
message: `No Perplexity API key provided`
}
}
],
}
}

// Check for prompt
if( !prompt || !prompt == undefined ) {
return {
outputVars: {
error: `No prompt`
},
next: {
path: 'no_prompt'
},
trace: [
{
type: 'debug',
payload: {
message: `No prompt value`
}
}
],
}
}

// Call the Perplexity completions endpoint
// and use pplx-7b-online or pplx-70b-online model

const url = 'https://api.perplexity.ai/chat/completions';
const options = {
method: 'POST',
headers: {
accept: 'application/json',
'content-type': 'application/json',
authorization: `Bearer ${apiKey}`
},
body: JSON.stringify({
model: `pplx-${model}-online`,
max_tokens: maxTokens,
temperature: 0.6,
messages: [
{role: 'system', content: `Be precise and concise. Your answer shouldn\'t use more than ${maxTokens} tokens.`},
{role: 'user', content: prompt}
]
})
};

// Pass fetch response to result variable
const result = await fetch(url, options)

// Check response status code, range from 200 to 299 should be a success
if (result?.ok) {

// Await the response and save it to the responseBody variable
const responseBody = await result.json;

// Check if we have content (answer)
if(responseBody?.choices[0]?.message?.content) {
let answer = responseBody.choices[0].message.content

// Add a '.' to the answer if missing (it can happen with 7B model)
if (!answer.endsWith('.')) {
answer += '.';
}

// We take the 'success' path and generate a Text step with the answer
return {
outputVars: {
answer: answer
},
next: {
path: 'success'
},
trace: [
{
type: "text",
payload: {
message: answer,
},
},
],
};
} else {

// No answer, we take the 'no_answer' path and log a message
return {
outputVars: {
error: `Unable to get an answer`
},
next: {
path: 'no_answer'
},
trace: [
{
type: 'debug',
payload: {
message: `No answer returned`
}
}
],
}
}
} else {

// API error, we take the 'error' path and log the error
return {
outputVars: {
error: `API response error. ${result.status}: ${result.statusText}`
},
next: {
path: 'error'
},
trace: [
{
type: 'debug',
payload: {
message: `${result.status}: ${result.statusText}`,
},
},
],
};
}
} catch(error) {

// Unknown error, we take the 'error' path and log the error
return {
outputVars: {
error: error
},
next: {
path: 'error'
},
trace: [
{
type: 'debug',
payload: {
message: `this is an error: ${error}`
}
}
],
}
}
}
copy-icon

Explore More Templates

Build and submit a Template to have it featured in the community.

ghraphic
No items found.
No items found.