Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

✨ feat: Support Cloudflare Workers AI #2966

Closed
wants to merge 48 commits into from
Closed
Show file tree
Hide file tree
Changes from 27 commits
Commits
Show all changes
48 commits
Select commit Hold shift + click to select a range
f2996eb
Delete .nvmrc
sxjeru May 31, 2024
6de6000
Merge branch 'lobehub:main' into cf
sxjeru Jun 21, 2024
4b1d4c6
feat: Add Cloudflare as a model provider
sxjeru Jun 21, 2024
b774549
fix
sxjeru Jun 21, 2024
1336aae
fix
sxjeru Jun 21, 2024
ff5361e
fix
sxjeru Jun 21, 2024
6d658bd
fix
sxjeru Jun 21, 2024
5a0a4da
fix
sxjeru Jun 21, 2024
3608659
fix
sxjeru Jun 21, 2024
161af46
fix
sxjeru Jun 21, 2024
aa609af
fix icon
sxjeru Jun 21, 2024
0972e11
fix
sxjeru Jun 21, 2024
8ad1100
Create .nvmrc
sxjeru Jun 21, 2024
ed2f3c0
Delete src/config/modelProviders/.nvmrc
sxjeru Jun 21, 2024
e47aee5
CF -> CLOUDFLARE
sxjeru Jun 21, 2024
1909a89
Merge branch 'cf' of https://github.com/sxjeru/lobe-chat into cf
sxjeru Jun 21, 2024
5a1180c
revert
sxjeru Jun 21, 2024
7648bde
chore: Update agentRuntime.ts and auth.ts to support Cloudflare accou…
sxjeru Jun 21, 2024
9d036ee
Add provider setting
sxjeru Jun 21, 2024
7fe9401
fix
sxjeru Jun 21, 2024
fa23ba4
Update cloudflare.ts
sxjeru Jun 21, 2024
4414320
fix
sxjeru Jun 24, 2024
8d1f973
Update cloudflare.ts
sxjeru Jun 24, 2024
3b57709
Merge branch 'main' into cf
sxjeru Jun 24, 2024
7efaab9
accountID
sxjeru Jul 1, 2024
87f0721
fix
sxjeru Jul 1, 2024
7844a5b
Merge branch 'main' into cf
sxjeru Jul 1, 2024
26de0f1
i18n
sxjeru Jul 1, 2024
65463e0
Merge branch 'main' into cf
sxjeru Jul 10, 2024
7fe207a
Merge branch 'main' into cf
sxjeru Jul 25, 2024
e0f541a
Update index.ts
sxjeru Jul 27, 2024
bc26fd8
Update baichuan.ts
sxjeru Jul 27, 2024
0f5462f
Merge branch 'main' into cf
sxjeru Jul 27, 2024
bb02954
Update cloudflare.ts
sxjeru Jul 27, 2024
85021aa
save changes
BrandonStudio Jul 31, 2024
cb7dd1c
commit check
BrandonStudio Jul 31, 2024
ac8d4f2
disable function calling for now
BrandonStudio Jul 31, 2024
eefacf5
does not catch errors when fetching models
BrandonStudio Jul 31, 2024
5fc4c81
ready to add base url
BrandonStudio Jul 31, 2024
52ff9d1
commit check
BrandonStudio Jul 31, 2024
b8492e2
revert change
BrandonStudio Aug 1, 2024
b452d30
revert string boolean check
BrandonStudio Aug 1, 2024
b46c642
fix type error on Vercel.
BrandonStudio Aug 1, 2024
2dca07d
i18n by groq/llama-3.1-8b-instant
BrandonStudio Aug 1, 2024
0f40d15
rename env var
BrandonStudio Aug 1, 2024
8469931
Merge branch 'cf' into pr/BrandonStudio/38
sxjeru Aug 1, 2024
b3351d8
Merge branch 'main' into cf
sxjeru Aug 1, 2024
65c0bd2
Merge branch 'main' into cf
sxjeru Aug 4, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,12 @@ OPENAI_API_KEY=sk-xxxxxxxxx

# QWEN_API_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx

### Cloudflare Workers AI ####

# CLOUDFLARE_ACCOUNT_ID=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
# CLOUDFLARE_API_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxx


########################################
############ Market Service ############
########################################
Expand Down
2 changes: 1 addition & 1 deletion src/app/(main)/chat/(workspace)/_layout/Desktop/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ import { Flexbox } from 'react-layout-kit';

import { LayoutProps } from '../type';
import ChatHeader from './ChatHeader';
import Inspector from './Portal';
import HotKeys from './HotKeys';
import Inspector from './Portal';
import TopicPanel from './TopicPanel';

const Layout = ({ children, topic, conversation, portal }: LayoutProps) => {
Expand Down
46 changes: 46 additions & 0 deletions src/app/(main)/settings/llm/ProviderList/Cloudflare/index.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
'use client';

import { Input } from 'antd';
import { useTranslation } from 'react-i18next';

import { CloudflareProviderCard } from '@/config/modelProviders';
import { GlobalLLMProviderKey } from '@/types/user/settings';

import { KeyVaultsConfigKey } from '../../const';
import { ProviderItem } from '../../type';
import { CloudflareBrand } from '../providers';

const providerKey: GlobalLLMProviderKey = 'cloudflare';

export const useCloudflareProvider = (): ProviderItem => {
const { t } = useTranslation('modelProvider');

return {
...CloudflareProviderCard,
apiKeyItems: [
{
children: (
<Input.Password
autoComplete={'new-password'}
placeholder={t(`${providerKey}.apiKey.placeholder`)}
/>
),
desc: t(`${providerKey}.apiKey.desc`),
label: t(`${providerKey}.apiKey.title`),
name: [KeyVaultsConfigKey, providerKey, 'apiKey'],
},
{
children: (
<Input.Password
autoComplete={'new-password'}
placeholder={t(`${providerKey}.accountID.placeholder`)}
/>
),
desc: t(`${providerKey}.accountID.desc`),
label: t(`${providerKey}.accountID.title`),
name: [KeyVaultsConfigKey, providerKey, 'accountID'],
},
],
title: <CloudflareBrand />,
};
};
20 changes: 18 additions & 2 deletions src/app/(main)/settings/llm/ProviderList/providers.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import {
Anthropic,
Baichuan,
Claude,
Cloudflare,
DeepSeek,
Gemini,
Google,
Expand All @@ -14,6 +15,7 @@ import {
Stepfun,
Together,
Tongyi,
WorkersAI,
ZeroOne,
Zhipu,
} from '@lobehub/icons';
Expand Down Expand Up @@ -44,6 +46,7 @@ import {
import { ProviderItem } from '../type';
import { useAzureProvider } from './Azure';
import { useBedrockProvider } from './Bedrock';
import { useCloudflareProvider } from './Cloudflare';
import { useOllamaProvider } from './Ollama';
import { useOpenAIProvider } from './OpenAI';

Expand Down Expand Up @@ -78,11 +81,20 @@ const GoogleBrand = () => (
</Flexbox>
);

export const CloudflareBrand = () => (
<Flexbox align={'center'} gap={8} horizontal>
<Cloudflare.Combine size={22} type={'color'} />
<Divider style={{ margin: '0 4px' }} type={'vertical'} />
<WorkersAI.Combine size={22} type={'color'} />
</Flexbox>
);

export const useProviderList = (): ProviderItem[] => {
const azureProvider = useAzureProvider();
const ollamaProvider = useOllamaProvider();
const openAIProvider = useOpenAIProvider();
const bedrockProvider = useBedrockProvider();
const cloudflareProvider = useCloudflareProvider();

return useMemo(
() => [
Expand Down Expand Up @@ -175,9 +187,13 @@ export const useProviderList = (): ProviderItem[] => {
{
...BaichuanProviderCard,
docUrl: urlJoin(BASE_DOC_URL, 'baichuan'),
title: <Baichuan.Combine size={ 20 } type={ 'color' } />,
title: <Baichuan.Combine size={20} type={'color'} />,
},
{
...cloudflareProvider,
docUrl: urlJoin(BASE_DOC_URL, 'cloudflare'),
},
],
[azureProvider, ollamaProvider, ollamaProvider, bedrockProvider],
[azureProvider, ollamaProvider, ollamaProvider, bedrockProvider, cloudflareProvider],
);
};
11 changes: 11 additions & 0 deletions src/app/api/chat/agentRuntime.ts
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,17 @@ const getLlmOptionsFromPayload = (provider: string, payload: JWTPayload) => {

return { apiKey };
}
case ModelProvider.Cloudflare: {
const { CLOUDFLARE_API_KEY, CLOUDFLARE_ACCOUNT_ID } = getLLMConfig();

const apiKey = apiKeyManager.pick(payload?.apiKey || CLOUDFLARE_API_KEY);
const accountID =
payload.apiKey && payload.cloudflareAccountID
? payload.cloudflareAccountID
: CLOUDFLARE_ACCOUNT_ID;

return { accountID, apiKey };
}
}
};

Expand Down
5 changes: 5 additions & 0 deletions src/components/ModelProviderIcon/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import {
Azure,
Baichuan,
Bedrock,
Cloudflare,
DeepSeek,
Google,
Groq,
Expand Down Expand Up @@ -119,6 +120,10 @@ const ModelProviderIcon = memo<ModelProviderIconProps>(({ provider }) => {
return <Baichuan size={20} />;
}

case ModelProvider.Cloudflare: {
return <Cloudflare size={20} />;
}

default: {
return null;
}
Expand Down
8 changes: 8 additions & 0 deletions src/config/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,10 @@ export const getLLMConfig = () => {

ENABLED_BAICHUAN: z.boolean(),
BAICHUAN_API_KEY: z.string().optional(),

ENABLED_CLOUDFLARE: z.boolean(),
CLOUDFLARE_API_KEY: z.string().optional(),
CLOUDFLARE_ACCOUNT_ID: z.string().optional(),
},
runtimeEnv: {
API_KEY_SELECT_MODE: process.env.API_KEY_SELECT_MODE,
Expand Down Expand Up @@ -161,6 +165,10 @@ export const getLLMConfig = () => {

ENABLED_BAICHUAN: !!process.env.BAICHUAN_API_KEY,
BAICHUAN_API_KEY: process.env.BAICHUAN_API_KEY,

ENABLED_CLOUDFLARE: !!process.env.CLOUDFLARE_API_KEY && !!process.env.CLOUDFLARE_ACCOUNT_ID,
CLOUDFLARE_API_KEY: process.env.CLOUDFLARE_API_KEY,
CLOUDFLARE_ACCOUNT_ID: process.env.CLOUDFLARE_ACCOUNT_ID,
},
});
};
Expand Down
15 changes: 10 additions & 5 deletions src/config/modelProviders/baichuan.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@ import { ModelProviderCard } from '@/types/llm';
const Baichuan: ModelProviderCard = {
chatModels: [
{
description: '模型能力国内第一,在知识百科、长文本、生成创作等中文任务上超越国外主流模型。还具备行业领先的多模态能力,多项权威评测基准表现优异。',
description:
'模型能力国内第一,在知识百科、长文本、生成创作等中文任务上超越国外主流模型。还具备行业领先的多模态能力,多项权威评测基准表现优异。',
displayName: 'Baichuan 4',
enabled: true,
functionCall: true,
Expand All @@ -13,7 +14,8 @@ const Baichuan: ModelProviderCard = {
tokens: 32_768,
},
{
description: '针对企业高频场景优化,效果大幅提升,高性价比。相对于Baichuan2模型,内容创作提升20%,知识问答提升17%, 角色扮演能力提升40%。整体效果比GPT3.5更优。',
description:
'针对企业高频场景优化,效果大幅提升,高性价比。相对于Baichuan2模型,内容创作提升20%,知识问答提升17%, 角色扮演能力提升40%。整体效果比GPT3.5更优。',
displayName: 'Baichuan 3 Turbo',
enabled: true,
functionCall: true,
Expand All @@ -22,23 +24,26 @@ const Baichuan: ModelProviderCard = {
tokens: 32_768,
},
{
description: '具备 128K 超长上下文窗口,针对企业高频场景优化,效果大幅提升,高性价比。相对于Baichuan2模型,内容创作提升20%,知识问答提升17%, 角色扮演能力提升40%。整体效果比GPT3.5更优。',
description:
'具备 128K 超长上下文窗口,针对企业高频场景优化,效果大幅提升,高性价比。相对于Baichuan2模型,内容创作提升20%,知识问答提升17%, 角色扮演能力提升40%。整体效果比GPT3.5更优。',
displayName: 'Baichuan 3 Turbo 128k',
enabled: true,
id: 'Baichuan3-Turbo-128k',
maxOutput: 4096,
tokens: 128_000,
},
{
description: '采用搜索增强技术实现大模型与领域知识、全网知识的全面链接。支持PDF、Word等多种文档上传及网址输入,信息获取及时、全面,输出结果准确、专业。',
description:
'采用搜索增强技术实现大模型与领域知识、全网知识的全面链接。支持PDF、Word等多种文档上传及网址输入,信息获取及时、全面,输出结果准确、专业。',
displayName: 'Baichuan 2 Turbo',
enabled: true,
id: 'Baichuan2-Turbo',
maxOutput: 8192,
tokens: 32_768,
},
{
description: '具备 192K 超长上下文窗口,采用搜索增强技术实现大模型与领域知识、全网知识的全面链接。支持PDF、Word等多种文档上传及网址输入,信息获取及时、全面,输出结果准确、专业。',
description:
'具备 192K 超长上下文窗口,采用搜索增强技术实现大模型与领域知识、全网知识的全面链接。支持PDF、Word等多种文档上传及网址输入,信息获取及时、全面,输出结果准确、专业。',
displayName: 'Baichuan 2 Turbo 192k',
enabled: true,
id: 'Baichuan2-Turbo-192k',
Expand Down
83 changes: 83 additions & 0 deletions src/config/modelProviders/cloudflare.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
import { ModelProviderCard } from '@/types/llm';

// ref https://developers.cloudflare.com/workers-ai/models/#text-generation
// api https://developers.cloudflare.com/workers-ai/configuration/open-ai-compatibility
const Cloudflare: ModelProviderCard = {
chatModels: [
{
displayName: 'deepseek-coder-6.7b-instruct-awq',
enabled: true,
id: '@hf/thebloke/deepseek-coder-6.7b-instruct-awq',
tokens: 16_384,
},
{
displayName: 'deepseek-math-7b-instruct',
enabled: true,
id: '@hf/thebloke/deepseek-math-7b-instruct',
tokens: 4096,
},
{
displayName: 'gemma-7b-it',
enabled: true,
id: '@hf/google/gemma-7b-it',
tokens: 2048,
},
{
displayName: 'hermes-2-pro-mistral-7b',
enabled: true,
// functionCall: true,
id: '@hf/nousresearch/hermes-2-pro-mistral-7b',
tokens: 4096,
},
{
displayName: 'llama-3-8b-instruct-awq',
id: '@cf/meta/llama-3-8b-instruct-awq',
tokens: 8192,
},
{
displayName: 'mistral-7b-instruct-v0.2',
id: '@hf/mistral/mistral-7b-instruct-v0.2',
tokens: 4096,
},
{
displayName: 'neural-chat-7b-v3-1-awq',
enabled: true,
id: '@hf/thebloke/neural-chat-7b-v3-1-awq',
tokens: 32_768,
},
{
displayName: 'openchat-3.5-0106',
id: '@cf/openchat/openchat-3.5-0106',
tokens: 8192,
},
{
displayName: 'openhermes-2.5-mistral-7b-awq',
enabled: true,
id: '@hf/thebloke/openhermes-2.5-mistral-7b-awq',
tokens: 32_768,
},
{
displayName: 'qwen1.5-14b-chat-awq',
enabled: true,
id: '@cf/qwen/qwen1.5-14b-chat-awq',
tokens: 32_768,
},
{
displayName: 'starling-lm-7b-beta',
enabled: true,
id: '@hf/nexusflow/starling-lm-7b-beta',
tokens: 4096,
},
{
displayName: 'zephyr-7b-beta-awq',
enabled: true,
id: '@hf/thebloke/zephyr-7b-beta-awq',
tokens: 32_768,
},
],
checkModel: '@hf/thebloke/deepseek-coder-6.7b-instruct-awq',
id: 'cloudflare',
name: 'Cloudflare Workers AI',
};

export default Cloudflare;
11 changes: 7 additions & 4 deletions src/config/modelProviders/google.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,22 +43,25 @@ const Google: ModelProviderCard = {
vision: true,
},
{
description: 'The best model for scaling across a wide range of tasks. This is the latest model.',
description:
'The best model for scaling across a wide range of tasks. This is the latest model.',
displayName: 'Gemini 1.0 Pro',
id: 'gemini-1.0-pro-latest',
maxOutput: 2048,
tokens: 30_720 + 2048,
},
{
description: 'The best model for scaling across a wide range of tasks. This is a stable model that supports tuning.',
description:
'The best model for scaling across a wide range of tasks. This is a stable model that supports tuning.',
displayName: 'Gemini 1.0 Pro 001 (Tuning)',
functionCall: true,
id: 'gemini-1.0-pro-001',
maxOutput: 2048,
tokens: 30_720 + 2048,
},
{
description: 'The best model for scaling across a wide range of tasks. Released April 9, 2024.',
description:
'The best model for scaling across a wide range of tasks. Released April 9, 2024.',
displayName: 'Gemini 1.0 Pro 002 (Tuning)',
id: 'gemini-1.0-pro-002',
maxOutput: 2048,
Expand All @@ -78,7 +81,7 @@ const Google: ModelProviderCard = {
legacy: true,
maxOutput: 1024,
// tokens: 4096 + 1024, // none tokens test
}
},
],
checkModel: 'gemini-1.5-flash',
id: 'google',
Expand Down
4 changes: 4 additions & 0 deletions src/config/modelProviders/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import AnthropicProvider from './anthropic';
import AzureProvider from './azure';
import BaichuanProvider from './baichuan';
import BedrockProvider from './bedrock';
import CloudflareProvider from './cloudflare';
import DeepSeekProvider from './deepseek';
import GoogleProvider from './google';
import GroqProvider from './groq';
Expand Down Expand Up @@ -39,6 +40,7 @@ export const LOBE_DEFAULT_MODEL_LIST: ChatModelCard[] = [
ZeroOneProvider.chatModels,
StepfunProvider.chatModels,
BaichuanProvider.chatModels,
CloudflareProvider.chatModels,
].flat();

export const DEFAULT_MODEL_PROVIDER_LIST = [
Expand All @@ -61,6 +63,7 @@ export const DEFAULT_MODEL_PROVIDER_LIST = [
ZhiPuProvider,
StepfunProvider,
BaichuanProvider,
CloudflareProvider,
];

export const filterEnabledModels = (provider: ModelProviderCard) => {
Expand All @@ -71,6 +74,7 @@ export { default as AnthropicProviderCard } from './anthropic';
export { default as AzureProviderCard } from './azure';
export { default as BaichuanProviderCard } from './baichuan';
export { default as BedrockProviderCard } from './bedrock';
export { default as CloudflareProviderCard } from './cloudflare';
export { default as DeepSeekProviderCard } from './deepseek';
export { default as GoogleProviderCard } from './google';
export { default as GroqProviderCard } from './groq';
Expand Down
Loading