From 1dfc26f8e7c8e0aaa5fee02f81943ae7ef58cd3c Mon Sep 17 00:00:00 2001 From: Ansgar Dietrichs Date: Thu, 15 Jun 2023 09:38:56 +0200 Subject: [PATCH] feat: show available models list in settings (#83) * create utils/models * add availableModels prop * fetch models on startup and key change * remove default model list * consider model fetch for loading spinner * clearer description * remove accidental debug delay * Update src/components/App.tsx * Update src/components/App.tsx * Update src/components/App.tsx * distinguish isAnythingSaving and isAnythingLoading --------- Co-authored-by: t11s --- src/components/App.tsx | 52 ++++++++++++++++++++++++- src/components/modals/SettingsModal.tsx | 6 ++- src/utils/constants.ts | 2 - src/utils/models.ts | 28 +++++++++++++ 4 files changed, 82 insertions(+), 6 deletions(-) create mode 100644 src/utils/models.ts diff --git a/src/components/App.tsx b/src/components/App.tsx index 6e7e40d..5512ceb 100644 --- a/src/components/App.tsx +++ b/src/components/App.tsx @@ -44,6 +44,7 @@ import { } from "../utils/fluxNode"; import { useLocalStorage } from "../utils/lstore"; import { mod } from "../utils/mod"; +import { getAvailableChatModels } from "../utils/models"; import { generateNodeId, generateStreamId } from "../utils/nodeId"; import { messagesFromLineage, promptFromLineage } from "../utils/prompt"; import { getQueryParam, resetURL } from "../utils/qparams"; @@ -864,11 +865,57 @@ function App() { const [apiKey, setApiKey] = useLocalStorage(API_KEY_LOCAL_STORAGE_KEY); - const isAnythingLoading = isSavingReactFlow || isSavingSettings; + const [availableModels, setAvailableModels] = useState(null); + + // modelsLoadCounter lets us discard the results of the requests if a concurrent newer one was made. + const modelsLoadCounter = useRef(0); + useEffect(() => { + if (isValidAPIKey(apiKey)) { + const modelsLoadIndex = modelsLoadCounter.current + 1; + modelsLoadCounter.current = modelsLoadIndex; + + setAvailableModels(null); + + (async () => { + let modelList: string[] = []; + try { + modelList = await getAvailableChatModels(apiKey!); + } catch (e) { + toast({ + title: "Failed to load model list!", + status: "error", + ...TOAST_CONFIG, + }); + } + if (modelsLoadIndex !== modelsLoadCounter.current) return; + + if (modelList.length === 0) modelList.push(settings.model); + + setAvailableModels(modelList); + + if (!modelList.includes(settings.model)) { + const oldModel = settings.model; + const newModel = modelList.includes(DEFAULT_SETTINGS.model) ? DEFAULT_SETTINGS.model : modelList[0]; + + setSettings((settings) => ({ ...settings, model: newModel })); + + toast({ + title: `Model "${oldModel}" no longer available!`, + description: `Switched to "${newModel}"`, + status: "warning", + ...TOAST_CONFIG, + }); + } + })(); + } + }, [apiKey]); + + const isAnythingSaving = isSavingReactFlow || isSavingSettings; + const isAnythingLoading = isAnythingSaving || (availableModels === null); useBeforeunload((event: BeforeUnloadEvent) => { // Prevent leaving the page before saving. - if (isAnythingLoading) event.preventDefault(); + if (isAnythingSaving) event.preventDefault(); }); /*////////////////////////////////////////////////////////////// @@ -1000,6 +1047,7 @@ function App() { onClose={onCloseSettingsModal} apiKey={apiKey} setApiKey={setApiKey} + availableModels={availableModels} /> void; @@ -33,6 +34,7 @@ export const SettingsModal = memo(function SettingsModal({ setSettings: (settings: Settings) => void; apiKey: string | null; setApiKey: (apiKey: string) => void; + availableModels: string[] | null; }) { const reset = () => { if ( @@ -78,7 +80,7 @@ export const SettingsModal = memo(function SettingsModal({ { setSettings({ ...settings, model: v }); diff --git a/src/utils/constants.ts b/src/utils/constants.ts index 3e5b842..2355c43 100644 --- a/src/utils/constants.ts +++ b/src/utils/constants.ts @@ -15,8 +15,6 @@ export const REACT_FLOW_NODE_TYPES: Record< LabelUpdater: LabelUpdaterNode, }; -export const SUPPORTED_MODELS = ["gpt-3.5-turbo", "gpt-4", "gpt-4-32k"]; - export const DEFAULT_SETTINGS: Settings = { temp: 1.2, n: 3, diff --git a/src/utils/models.ts b/src/utils/models.ts new file mode 100644 index 0000000..434fc0d --- /dev/null +++ b/src/utils/models.ts @@ -0,0 +1,28 @@ +export function getAvailableModels(apiKey: string): Promise { + return new Promise(async (resolve, reject) => { + try { + const response = await fetch("https://api.openai.com/v1/models", { + method: "GET", + headers: { + Authorization: `Bearer ${apiKey}`, + }, + }) + const data = await response.json(); + resolve(data.data.map((model: any) => model.id).sort()); + } catch (err) { + reject(err); + } + }); +}; + +export function getAvailableChatModels(apiKey: string): Promise { + return new Promise((resolve, reject) => { + getAvailableModels(apiKey) + .then((models) => { + resolve(models.filter((model) => model.startsWith("gpt-"))); + }) + .catch((err) => { + reject(err); + }); + }); +};