-
Notifications
You must be signed in to change notification settings - Fork 697
add featherless provider (access to 7900+ open source models) #1138
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Conversation
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This PR adds support for the Featherless AI provider, giving access to 7900+ open source models. The implementation follows the existing provider pattern and is well-structured. I have a few suggestions to improve the implementation.
src/providers/featherless-ai/api.ts
Outdated
return ''; | ||
} | ||
}, | ||
}; |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
🛠️ Code Refactor
Issue: The API implementation is missing a newline at the end of the file.
Fix: Add a newline at the end of the file to follow best practices.
Impact: Improves code consistency and prevents potential issues with some tools that expect files to end with a newline.
}; | |
}; | |
chatComplete: true, | ||
complete: true, | ||
}), | ||
}; |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
🛠️ Code Refactor
Issue: The file is missing a newline at the end of the file.
Fix: Add a newline at the end of the file to follow best practices.
Impact: Improves code consistency and prevents potential issues with some tools that expect files to end with a newline.
}; | |
}; | |
src/providers/featherless-ai/api.ts
Outdated
import { ProviderAPIConfig } from '../types'; | ||
|
||
export const featherlessAIAPIConfig: ProviderAPIConfig = { | ||
getBaseURL: () => 'https://api.featherless.ai/v1', | ||
headers({ providerOptions }) { | ||
const { apiKey } = providerOptions; | ||
return { Authorization: `Bearer ${apiKey}` }; | ||
}, | ||
getEndpoint({ fn }) { | ||
switch (fn) { | ||
case 'chatComplete': | ||
return `/chat/completions`; | ||
case 'complete': | ||
return '/completions'; | ||
default: | ||
return ''; | ||
} | ||
}, | ||
}; |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
💡 Optional Recommendation
Issue: Error handling for API failures is not explicitly implemented.
Fix: Consider adding error handling for API failures.
Impact: Improves robustness of the application by properly handling API errors.
import { ProviderAPIConfig } from '../types'; | |
export const featherlessAIAPIConfig: ProviderAPIConfig = { | |
getBaseURL: () => 'https://api.featherless.ai/v1', | |
headers({ providerOptions }) { | |
const { apiKey } = providerOptions; | |
return { Authorization: `Bearer ${apiKey}` }; | |
}, | |
getEndpoint({ fn }) { | |
switch (fn) { | |
case 'chatComplete': | |
return `/chat/completions`; | |
case 'complete': | |
return '/completions'; | |
default: | |
return ''; | |
} | |
}, | |
}; | |
import { ProviderAPIConfig } from '../types'; | |
export const featherlessAIAPIConfig: ProviderAPIConfig = { | |
getBaseURL: () => 'https://api.featherless.ai/v1', | |
headers({ providerOptions }) { | |
const { apiKey } = providerOptions; | |
if (!apiKey) { | |
throw new Error('Featherless AI API key is required'); | |
} | |
return { Authorization: `Bearer ${apiKey}` }; | |
}, | |
getEndpoint({ fn }) { | |
switch (fn) { | |
case 'chatComplete': | |
return `/chat/completions`; | |
case 'complete': | |
return '/completions'; | |
default: | |
return ''; | |
} | |
}, | |
}; |
import { FEATHERLESS_AI } from '../../globals'; | ||
import { | ||
chatCompleteParams, | ||
completeParams, | ||
responseTransformers, | ||
} from '../open-ai-base'; | ||
import { ProviderConfigs } from '../types'; | ||
import { featherlessAIAPIConfig } from './api'; | ||
|
||
export const FeatherlessAIConfig: ProviderConfigs = { | ||
chatComplete: chatCompleteParams([], { | ||
model: 'mistralai/Magistral-Small-2506', | ||
}), | ||
complete: completeParams([], { model: 'mistralai/Magistral-Small-2506' }), | ||
api: featherlessAIAPIConfig, | ||
responseTransforms: responseTransformers(FEATHERLESS_AI, { | ||
chatComplete: true, | ||
complete: true, | ||
}), | ||
}; |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
💡 Optional Recommendation
Issue: The default model is hardcoded without any documentation about available models.
Fix: Add a comment explaining the default model choice and possibly other available models.
Impact: Improves code documentation and helps users understand model options.
import { FEATHERLESS_AI } from '../../globals'; | |
import { | |
chatCompleteParams, | |
completeParams, | |
responseTransformers, | |
} from '../open-ai-base'; | |
import { ProviderConfigs } from '../types'; | |
import { featherlessAIAPIConfig } from './api'; | |
export const FeatherlessAIConfig: ProviderConfigs = { | |
chatComplete: chatCompleteParams([], { | |
model: 'mistralai/Magistral-Small-2506', | |
}), | |
complete: completeParams([], { model: 'mistralai/Magistral-Small-2506' }), | |
api: featherlessAIAPIConfig, | |
responseTransforms: responseTransformers(FEATHERLESS_AI, { | |
chatComplete: true, | |
complete: true, | |
}), | |
}; | |
import { FEATHERLESS_AI } from '../../globals'; | |
import { | |
chatCompleteParams, | |
completeParams, | |
responseTransformers, | |
} from '../open-ai-base'; | |
import { ProviderConfigs } from '../types'; | |
import { featherlessAIAPIConfig } from './api'; | |
// Featherless AI provides access to 7900+ open source models | |
// Default model is set to Magistral-Small-2506, but many others are available | |
// See https://featherless.ai for the full list of supported models | |
export const FeatherlessAIConfig: ProviderConfigs = { | |
chatComplete: chatCompleteParams([], { | |
model: 'mistralai/Magistral-Small-2506', | |
}), | |
complete: completeParams([], { model: 'mistralai/Magistral-Small-2506' }), | |
api: featherlessAIAPIConfig, | |
responseTransforms: responseTransformers(FEATHERLESS_AI, { | |
chatComplete: true, | |
complete: true, | |
}), | |
}; |
Important PR Review SkippedPR review skipped as per the configuration setting. Run a manually review by commenting /matter review 💡Tips to use Matter AICommand List
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
LGTM 🚀🚀
Important PR Review SkippedPR review skipped as per the configuration setting. Run a manually review by commenting /matter review 💡Tips to use Matter AICommand List
|
Hi @DarinVerheijke , can you please fix formatting with |
Important PR Review SkippedPR review skipped as per the configuration setting. Run a manually review by commenting /matter review 💡Tips to use Matter AICommand List
|
@VisargD done, thank you! |
Important PR Review SkippedPR review skipped as per the configuration setting. Run a manually review by commenting /matter review 💡Tips to use Matter AICommand List
|
Description
Adds Featherless.ai as an LLM provider giving access to over 7,900+ models and counting on Hugging Face
Motivation
Featherless.ai was recently announced as Hugging Face largest LLM inference provider and this gives Portkey-AI users access to most LLMs on Hugging Face
Type of Change
How Has This Been Tested?
Screenshots (if applicable)
Checklist
Related Issues