Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions packages/components/credentials/TelnyxApi.credential.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import { INodeParams, INodeCredential } from '../src/Interface'

class TelnyxApi implements INodeCredential {
label: string
name: string
version: number
inputs: INodeParams[]

constructor() {
this.label = 'Telnyx API'
this.name = 'telnyxApi'
this.version = 1.0
this.inputs = [
{
label: 'API Key',
name: 'apiKey',
type: 'password'
}
]
}
}

module.exports = { credClass: TelnyxApi }
136 changes: 136 additions & 0 deletions packages/components/nodes/chatmodels/ChatTelnyx/ChatTelnyx.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
import { ChatOpenAI, ChatOpenAIFields } from '@langchain/openai'
import { BaseCache } from '@langchain/core/caches'
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { secureFetch } from '../../../src/httpSecurity'

const TELNYX_OPENAI_BASE = 'https://api.telnyx.com/v2/ai/openai'
const TELNYX_CHAT_MODELS_URL = 'https://api.telnyx.com/v2/ai/openai/models'

const fetchTelnyxModels = async (apiKey: string) => {
const response = await secureFetch(TELNYX_CHAT_MODELS_URL, {
headers: {
Authorization: `Bearer ${apiKey}`,
'Content-Type': 'application/json'
}
})

if (!response.ok) {
throw new Error(`Failed to fetch Telnyx models: ${response.status} ${response.statusText}`)
}

const json = await response.json()
return json.data || []
}

class ChatTelnyx_ChatModels implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]

constructor() {
this.label = 'Telnyx Chat'
this.name = 'chatTelnyx'
this.version = 1.1
this.type = 'ChatTelnyx'
this.icon = 'telnyx.png'
this.category = 'Chat Models'
this.description = 'Use Telnyx OpenAI-compatible chat completions as a native Flowise chat model'
this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['telnyxApi'],
refresh: true
}
this.inputs = [
{ label: 'Cache', name: 'cache', type: 'BaseCache', optional: true },
{ label: 'Model Name', name: 'modelName', type: 'asyncOptions', loadMethod: 'listModels', default: 'openai/gpt-4o', refresh: true },
{ label: 'Temperature', name: 'temperature', type: 'number', step: 0.1, default: 0.9, optional: true },
{ label: 'Streaming', name: 'streaming', type: 'boolean', default: true, optional: true, additionalParams: true },
{ label: 'Max Tokens', name: 'maxTokens', type: 'number', step: 1, optional: true, additionalParams: true },
{ label: 'Top Probability', name: 'topP', type: 'number', step: 0.1, optional: true, additionalParams: true },
{ label: 'Frequency Penalty', name: 'frequencyPenalty', type: 'number', step: 0.1, optional: true, additionalParams: true },
{ label: 'Presence Penalty', name: 'presencePenalty', type: 'number', step: 0.1, optional: true, additionalParams: true },
{ label: 'Timeout', name: 'timeout', type: 'number', step: 1, optional: true, additionalParams: true }
]
}

//@ts-ignore
loadMethods = {
async listModels(nodeData: INodeData, options: ICommonObject): Promise<INodeOptionsValue[]> {
const credentialId = nodeData.credential || nodeData.inputs?.credentialId
if (!credentialId) {
return [{ label: 'Select a Telnyx API credential to load models', name: 'openai/gpt-4o' }]
}

try {
const credentialData = await getCredentialData(credentialId as string, options)
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
const models = await fetchTelnyxModels(apiKey)

return models
.map((model: any) => ({
label: model.id,
name: model.id,
description: [model.task, model.context_length ? `context ${model.context_length}` : '', model.tier || '']
.filter(Boolean)
.join(' • ')
}))
} catch (error) {
console.warn('Falling back to static Telnyx chat model list:', error)
return [{ label: 'openai/gpt-4o', name: 'openai/gpt-4o' }]
}
}
}

async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const temperature = nodeData.inputs?.temperature as string
const modelName = nodeData.inputs?.modelName as string
const maxTokens = nodeData.inputs?.maxTokens as string
const topP = nodeData.inputs?.topP as string
const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string
const presencePenalty = nodeData.inputs?.presencePenalty as string
const timeout = nodeData.inputs?.timeout as string
const streaming = nodeData.inputs?.streaming as boolean
const cache = nodeData.inputs?.cache as BaseCache

const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)

const parsedTemperature = temperature ? parseFloat(temperature) : 0.9
if (Number.isNaN(parsedTemperature)) {
throw new Error('Temperature must be a valid number')
}

const obj: ChatOpenAIFields = {
temperature: parsedTemperature,
modelName,
openAIApiKey: apiKey,
apiKey,
streaming: streaming ?? true,
configuration: {
baseURL: TELNYX_OPENAI_BASE
}
}
Comment thread
gbattistel marked this conversation as resolved.

if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
if (topP) obj.topP = parseFloat(topP)
if (frequencyPenalty) obj.frequencyPenalty = parseFloat(frequencyPenalty)
if (presencePenalty) obj.presencePenalty = parseFloat(presencePenalty)
if (timeout) obj.timeout = parseInt(timeout, 10)
if (cache) obj.cache = cache

return new ChatOpenAI(obj)
}
}

module.exports = { nodeClass: ChatTelnyx_ChatModels }
13 changes: 13 additions & 0 deletions packages/components/nodes/chatmodels/ChatTelnyx/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Telnyx Chat Model

Telnyx Chat Model integration for Flowise

## 🌱 Env Variables

| Variable | Description | Type | Default |
| --------------- | ----------------------------------------------------- | ------ | ------- |
| TELNYX_API_KEY | Default `credential.apiKey` for the Telnyx API | String | |

## License

Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md).
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
13 changes: 13 additions & 0 deletions packages/components/nodes/embeddings/TelnyxEmbedding/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Telnyx Embedding Model

Telnyx Embedding Model integration for Flowise

## 🌱 Env Variables

| Variable | Description | Type | Default |
| --------------- | ----------------------------------------------------- | ------ | ------- |
| TELNYX_API_KEY | Default `credential.apiKey` for the Telnyx API | String | |

## License

Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md).
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
import { ClientOptions, OpenAIEmbeddings, OpenAIEmbeddingsParams } from '@langchain/openai'
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { secureFetch } from '../../../src/httpSecurity'

const TELNYX_OPENAI_BASE = 'https://api.telnyx.com/v2/ai/openai'
const TELNYX_EMBEDDINGS_MODELS_URL = 'https://api.telnyx.com/v2/ai/embeddings/models'

const fetchTelnyxModels = async (apiKey: string) => {
const response = await secureFetch(TELNYX_EMBEDDINGS_MODELS_URL, {
headers: {
Authorization: `Bearer ${apiKey}`,
'Content-Type': 'application/json'
}
})

if (!response.ok) {
throw new Error(`Failed to fetch Telnyx models: ${response.status} ${response.statusText}`)
}

const json = await response.json()
return json.data || []
}

class TelnyxEmbedding_Embeddings implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]

constructor() {
this.label = 'Telnyx Embeddings'
this.name = 'telnyxEmbeddings'
this.version = 1.1
this.type = 'TelnyxEmbeddings'
this.icon = 'telnyx.png'
this.category = 'Embeddings'
this.description = 'Use Telnyx OpenAI-compatible embeddings as a native Flowise embeddings node'
this.baseClasses = [this.type, ...getBaseClasses(OpenAIEmbeddings)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['telnyxApi'],
refresh: true
}
this.inputs = [
{ label: 'Model Name', name: 'modelName', type: 'asyncOptions', loadMethod: 'listModels', default: 'text-embedding-3-small', refresh: true },
{ label: 'Strip New Lines', name: 'stripNewLines', type: 'boolean', optional: true, additionalParams: true },
{ label: 'Batch Size', name: 'batchSize', type: 'number', optional: true, additionalParams: true },
{ label: 'Timeout', name: 'timeout', type: 'number', optional: true, additionalParams: true },
{ label: 'Dimensions', name: 'dimensions', type: 'number', optional: true, additionalParams: true },
{ label: 'Encoding Format', name: 'encodingFormat', type: 'options', options: [{ label: 'float', name: 'float' }, { label: 'base64', name: 'base64' }], optional: true, additionalParams: true }
]
}

//@ts-ignore
loadMethods = {
async listModels(nodeData: INodeData, options: ICommonObject): Promise<INodeOptionsValue[]> {
const credentialId = nodeData.credential || nodeData.inputs?.credentialId
if (!credentialId) {
return [{ label: 'Select a Telnyx API credential to load models', name: 'text-embedding-3-small' }]
}

try {
const credentialData = await getCredentialData(credentialId as string, options)
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
const models = await fetchTelnyxModels(apiKey)

return models
.map((model: any) => ({
label: model.id,
name: model.id,
description: [model.task, model.context_length ? `context ${model.context_length}` : '', model.tier || '']
.filter(Boolean)
.join(' • ')
}))
} catch (error) {
console.warn('Falling back to static Telnyx embeddings model list:', error)
return [{ label: 'text-embedding-3-small', name: 'text-embedding-3-small' }]
}
}
}

async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const stripNewLines = nodeData.inputs?.stripNewLines as boolean
const batchSize = nodeData.inputs?.batchSize as string
const timeout = nodeData.inputs?.timeout as string
const modelName = nodeData.inputs?.modelName as string
const dimensions = nodeData.inputs?.dimensions as string
const encodingFormat = nodeData.inputs?.encodingFormat as 'float' | 'base64' | undefined

const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)

const obj: Partial<OpenAIEmbeddingsParams> & { openAIApiKey?: string; configuration?: ClientOptions } = {
openAIApiKey: apiKey,
modelName,
configuration: {
baseURL: TELNYX_OPENAI_BASE
}
}

if (stripNewLines) obj.stripNewLines = stripNewLines
if (batchSize) obj.batchSize = parseInt(batchSize, 10)
if (timeout) obj.timeout = parseInt(timeout, 10)
if (dimensions) obj.dimensions = parseInt(dimensions, 10)
if (encodingFormat) obj.encodingFormat = encodingFormat

return new OpenAIEmbeddings(obj)
}
}

module.exports = { nodeClass: TelnyxEmbedding_Embeddings }
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading