Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 25 additions & 0 deletions packages/components/credentials/FuturMixApi.credential.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import { INodeParams, INodeCredential } from '../src/Interface'

class FuturMixAPIAuth implements INodeCredential {
label: string
name: string
version: number
description: string
inputs: INodeParams[]

constructor() {
this.label = 'FuturMix API Key'
this.name = 'futurmixApi'
this.version = 1.0
this.inputs = [
{
label: 'FuturMix API Key',
name: 'futurmixApiKey',
type: 'password',
description: 'API Key from https://futurmix.ai'
}
]
}
}

module.exports = { credClass: FuturMixAPIAuth }
194 changes: 194 additions & 0 deletions packages/components/nodes/chatmodels/ChatFuturMix/ChatFuturMix.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,194 @@
import { ChatOpenAI as LangchainChatOpenAI, ChatOpenAIFields } from '@langchain/openai'
import { BaseCache } from '@langchain/core/caches'
import { ICommonObject, IMultiModalOption, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ChatFuturMix } from './FlowiseChatFuturMix'

class ChatFuturMix_ChatModels implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]

constructor() {
this.label = 'FuturMix'
this.name = 'chatFuturMix'
this.version = 1.0
this.type = 'ChatFuturMix'
this.icon = 'futurmix.svg'
this.category = 'Chat Models'
this.description = 'Wrapper around FuturMix AI Gateway - Access 22+ models with a single API key'
this.baseClasses = [this.type, ...getBaseClasses(LangchainChatOpenAI)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['futurmixApi'],
optional: true
}
this.inputs = [
{
label: 'Cache',
name: 'cache',
type: 'BaseCache',
optional: true
},
{
label: 'Model Name',
name: 'modelName',
type: 'string',
placeholder: 'claude-sonnet-4-20250514'
},
{
label: 'Temperature',
name: 'temperature',
type: 'number',
step: 0.1,
default: 0.9,
optional: true
},
{
label: 'Streaming',
name: 'streaming',
type: 'boolean',
default: true,
optional: true,
additionalParams: true
},
{
label: 'Allow Image Uploads',
name: 'allowImageUploads',
type: 'boolean',
description:
'Allow image input. Refer to the <a href="https://docs.flowiseai.com/using-flowise/uploads#image" target="_blank">docs</a> for more details.',
default: false,
optional: true
},
{
label: 'Max Tokens',
name: 'maxTokens',
type: 'number',
step: 1,
optional: true,
additionalParams: true
},
{
label: 'Top Probability',
name: 'topP',
type: 'number',
step: 0.1,
optional: true,
additionalParams: true
},
{
label: 'Frequency Penalty',
name: 'frequencyPenalty',
type: 'number',
step: 0.1,
optional: true,
additionalParams: true
},
{
label: 'Presence Penalty',
name: 'presencePenalty',
type: 'number',
step: 0.1,
optional: true,
additionalParams: true
},
{
label: 'Timeout',
name: 'timeout',
type: 'number',
step: 1,
optional: true,
additionalParams: true
},
{
label: 'Base Path',
name: 'basepath',
type: 'string',
optional: true,
default: 'https://futurmix.ai/v1',
description: 'Override the default base URL for the API.',
additionalParams: true
},
{
label: 'Base Options',
name: 'baseOptions',
type: 'json',
optional: true,
description: 'Default headers to include with every request to the API.',
additionalParams: true
}
]
}

async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const temperature = nodeData.inputs?.temperature as string
const modelName = nodeData.inputs?.modelName as string
const maxTokens = nodeData.inputs?.maxTokens as string
const topP = nodeData.inputs?.topP as string
const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string
const presencePenalty = nodeData.inputs?.presencePenalty as string
const timeout = nodeData.inputs?.timeout as string
const streaming = nodeData.inputs?.streaming as boolean
const basePath = (nodeData.inputs?.basepath as string) || 'https://futurmix.ai/v1'
const baseOptions = nodeData.inputs?.baseOptions
const cache = nodeData.inputs?.cache as BaseCache
const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean

const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const futurmixApiKey = getCredentialParam('futurmixApiKey', credentialData, nodeData)

const obj: ChatOpenAIFields = {
modelName,
openAIApiKey: futurmixApiKey,
apiKey: futurmixApiKey,
streaming: streaming ?? true
}
Comment on lines +150 to +155
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The temperature parameter is currently assigned directly using parseFloat(temperature). If temperature is undefined or an empty string, this will result in NaN, which can cause issues in the underlying model request. It should be handled conditionally, consistent with how maxTokens, topP, and other optional parameters are processed later in the function.

Suggested change
const obj: ChatOpenAIFields = {
temperature: parseFloat(temperature),
modelName,
openAIApiKey: futurmixApiKey,
apiKey: futurmixApiKey,
streaming: streaming ?? true
}
const obj: ChatOpenAIFields = {
modelName,
openAIApiKey: futurmixApiKey,
apiKey: futurmixApiKey,
streaming: streaming ?? true
}
if (temperature) obj.temperature = parseFloat(temperature)


if (temperature) obj.temperature = parseFloat(temperature)
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
if (topP) obj.topP = parseFloat(topP)
if (frequencyPenalty) obj.frequencyPenalty = parseFloat(frequencyPenalty)
if (presencePenalty) obj.presencePenalty = parseFloat(presencePenalty)
if (timeout) obj.timeout = parseInt(timeout, 10)
if (cache) obj.cache = cache

let parsedBaseOptions: any | undefined = undefined

if (baseOptions) {
try {
parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions)
} catch (exception) {
throw new Error("Invalid JSON in the ChatFuturMix's BaseOptions: " + exception)
}
}

if (basePath || parsedBaseOptions) {
obj.configuration = {
baseURL: basePath,
defaultHeaders: parsedBaseOptions
}
}

const multiModalOption: IMultiModalOption = {
image: {
allowImageUploads: allowImageUploads ?? false
}
}

const model = new ChatFuturMix(nodeData.id, obj)
model.setMultiModalOption(multiModalOption)
return model
}
}

module.exports = { nodeClass: ChatFuturMix_ChatModels }
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import { ChatOpenAI as LangchainChatOpenAI, ChatOpenAIFields } from '@langchain/openai'
import { IMultiModalOption, IVisionChatModal } from '../../../src'

export class ChatFuturMix extends LangchainChatOpenAI implements IVisionChatModal {
configuredModel: string
configuredMaxToken?: number
multiModalOption: IMultiModalOption
id: string

constructor(id: string, fields?: ChatOpenAIFields) {
super(fields)
this.id = id
this.configuredModel = fields?.modelName ?? ''
this.configuredMaxToken = fields?.maxTokens
}

setMultiModalOption(multiModalOption: IMultiModalOption): void {
this.multiModalOption = multiModalOption
}
}
10 changes: 10 additions & 0 deletions packages/components/nodes/chatmodels/ChatFuturMix/futurmix.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.