Compare commits

..

4 Commits

Author SHA1 Message Date
Ventz Petkov
37ae484fbc 🚅 docs: Updated Example for LiteLLM ports and Volume mount (#2941)
* Added necessary "ports" section for it to work by default
* Added (commented out) example GCP Vertex volume mount for auth config and for ENV variable.
2024-06-01 08:51:18 -04:00
Danny Avila
8939d8af37 🦙 feat: Add LLama 3 System Context Length (#2938) 2024-05-31 12:16:08 -04:00
Danny Avila
f9a0166352 🔄 refactor(EditPresetDialog): Update Model on Endpoint Change (#2936)
* refactor(EditPresetDialog): dynamically update current editable preset model on endpoint change

* feat: Add null check for models in EditPresetDialog

* chore(AlertDialogPortal): typing

* fix(EditPresetDialog): prevent Unknown endpoint edge case for custom endpoints
2024-05-31 11:43:14 -04:00
Danny Avila
248dfb8b5b 🐛 fix: Resolve Preset Button Disappearing in Mobile View (#2935)
* refactor: Update import paths for ExportAndShareMenu component and add localization

* fix: mobile view for export/share button
2024-05-31 08:46:09 -04:00
15 changed files with 138 additions and 60 deletions

View File

@@ -44,7 +44,6 @@
"agenda": "^5.0.0",
"axios": "^1.3.4",
"bcryptjs": "^2.4.3",
"bottleneck": "^2.19.5",
"cheerio": "^1.0.0-rc.12",
"cohere-ai": "^7.9.1",
"connect-redis": "^7.1.0",

View File

@@ -1,6 +1,5 @@
const multer = require('multer');
const express = require('express');
const Bottleneck = require('bottleneck');
const { CacheKeys } = require('librechat-data-provider');
const { getVoices, streamAudio, textToSpeech } = require('~/server/services/Files/Audio');
const { getLogStores } = require('~/cache');
@@ -9,25 +8,8 @@ const { logger } = require('~/config');
const router = express.Router();
const upload = multer();
// todo: can add Redis support for limiter
const limiter = new Bottleneck({
minTime: 240, // Minimum time between requests (240ms per request = 250 requests per minute)
maxConcurrent: 100, // Maximum number of concurrent requests
reservoir: 250, // Initial number of available requests
reservoirRefreshAmount: 250, // Number of requests replenished in each interval
reservoirRefreshInterval: 60 * 1000, // Reservoir refresh interval (60 seconds)
});
const limitedStreamAudio = limiter.wrap(streamAudio);
const limitedTextToSpeech = limiter.wrap(textToSpeech);
router.post('/manual', upload.none(), async (req, res) => {
try {
await limitedTextToSpeech(req, res);
} catch (error) {
logger.error(`[textToSpeech] user: ${req.user.id} | Failed to process textToSpeech: ${error}`);
res.status(500).json({ error: 'Failed to process textToSpeech' });
}
await textToSpeech(req, res);
});
const logDebugMessage = (req, message) =>
@@ -44,7 +26,7 @@ router.post('/', async (req, res) => {
return res.status(401).json({ error: 'Audio stream already running' });
}
audioRunsCache.set(req.body.runId, true);
await limitedStreamAudio(req, res);
await streamAudio(req, res);
logDebugMessage(req, 'end stream audio');
res.status(200).end();
} catch (error) {
@@ -53,7 +35,6 @@ router.post('/', async (req, res) => {
}
});
// todo: cache voices
router.get('/voices', async (req, res) => {
await getVoices(req, res);
});

View File

@@ -59,6 +59,8 @@ const openAIModels = {
'gpt-3.5-turbo-1106': 16375, // -10 from max
'gpt-3.5-turbo-0125': 16375, // -10 from max
'mistral-': 31990, // -10 from max
llama3: 8187, // -5 from max
'llama-3': 8187, // -5 from max
};
const cohereModels = {

View File

@@ -20,6 +20,18 @@ describe('getModelMaxTokens', () => {
);
});
test('should return correct tokens for LLama 3 models', () => {
expect(getModelMaxTokens('meta-llama/llama-3-8b')).toBe(
maxTokensMap[EModelEndpoint.openAI]['llama-3'],
);
expect(getModelMaxTokens('meta-llama/llama-3-8b')).toBe(
maxTokensMap[EModelEndpoint.openAI]['llama3'],
);
expect(getModelMaxTokens('llama-3-500b')).toBe(maxTokensMap[EModelEndpoint.openAI]['llama-3']);
expect(getModelMaxTokens('llama3-70b')).toBe(maxTokensMap[EModelEndpoint.openAI]['llama3']);
expect(getModelMaxTokens('llama3:latest')).toBe(maxTokensMap[EModelEndpoint.openAI]['llama3']);
});
test('should return undefined for no match', () => {
expect(getModelMaxTokens('unknown-model')).toBeUndefined();
});

View File

@@ -196,6 +196,7 @@ export type TEditPresetProps = {
title?: string;
};
export type TSetOptions = (options: Record<string, unknown>) => void;
export type TSetOptionsPayload = {
setOption: TSetOption;
setExample: TSetExample;
@@ -205,6 +206,7 @@ export type TSetOptionsPayload = {
// getConversation: () => TConversation | TPreset | null;
checkPluginSelection: (value: string) => boolean;
setTools: (newValue: string, remove?: boolean) => void;
setOptions?: TSetOptions;
};
export type TPresetItemProps = {

View File

@@ -1,26 +1,18 @@
import { useState } from 'react';
import { Upload } from 'lucide-react';
import { useRecoilValue } from 'recoil';
import { useLocation } from 'react-router-dom';
import type { TConversation } from 'librechat-data-provider';
import DropDownMenu from '../Conversations/DropDownMenu';
import ShareButton from '../Conversations/ShareButton';
import HoverToggle from '../Conversations/HoverToggle';
import DropDownMenu from '~/components/Conversations/DropDownMenu';
import ShareButton from '~/components/Conversations/ShareButton';
import HoverToggle from '~/components/Conversations/HoverToggle';
import useLocalize from '~/hooks/useLocalize';
import ExportButton from './ExportButton';
import store from '~/store';
export default function ExportAndShareMenu() {
const location = useLocation();
export default function ExportAndShareMenu({ className = '' }: { className?: string }) {
const localize = useLocalize();
const activeConvo = useRecoilValue(store.conversationByIndex(0));
const globalConvo = useRecoilValue(store.conversation) ?? ({} as TConversation);
const conversation = useRecoilValue(store.conversationByIndex(0));
const [isPopoverActive, setIsPopoverActive] = useState(false);
let conversation: TConversation | null | undefined;
if (location.state?.from?.pathname.includes('/chat')) {
conversation = globalConvo;
} else {
conversation = activeConvo;
}
const exportable =
conversation &&
@@ -29,7 +21,7 @@ export default function ExportAndShareMenu() {
conversation.conversationId !== 'search';
if (!exportable) {
return <></>;
return null;
}
const isActiveConvo = exportable;
@@ -39,10 +31,11 @@ export default function ExportAndShareMenu() {
isActiveConvo={!!isActiveConvo}
isPopoverActive={isPopoverActive}
setIsPopoverActive={setIsPopoverActive}
className={className}
>
<DropDownMenu
icon={<Upload />}
tooltip="Export/Share"
tooltip={localize('com_endpoint_export_share')}
className="pointer-cursor relative z-50 flex h-[40px] min-w-4 flex-none flex-col items-center justify-center rounded-md border border-gray-100 bg-white px-3 text-left hover:bg-gray-50 focus:outline-none focus:ring-0 focus:ring-offset-0 radix-state-open:bg-gray-50 dark:border-gray-700 dark:bg-gray-800 dark:hover:bg-gray-700 dark:radix-state-open:bg-gray-700 sm:text-sm"
>
{conversation && conversation.conversationId && (

View File

@@ -6,6 +6,7 @@ import type { ContextType } from '~/common';
import { EndpointsMenu, ModelSpecsMenu, PresetsMenu, HeaderNewChat } from './Menus';
import ExportAndShareMenu from './ExportAndShareMenu';
import HeaderOptions from './Input/HeaderOptions';
import { useMediaQuery } from '~/hooks';
const defaultInterface = getConfigDefaults().interface;
@@ -18,6 +19,8 @@ export default function Header() {
[startupConfig],
);
const isSmallScreen = useMediaQuery('(max-width: 768px)');
return (
<div className="sticky top-0 z-10 flex h-14 w-full items-center justify-between bg-white p-2 font-semibold dark:bg-gray-800 dark:text-white">
<div className="hide-scrollbar flex w-full items-center justify-between gap-2 overflow-x-auto">
@@ -27,8 +30,9 @@ export default function Header() {
{modelSpecs?.length > 0 && <ModelSpecsMenu modelSpecs={modelSpecs} />}
{<HeaderOptions interfaceConfig={interfaceConfig} />}
{interfaceConfig.presets && <PresetsMenu />}
{isSmallScreen && <ExportAndShareMenu className="pl-0" />}
</div>
<ExportAndShareMenu />
{!isSmallScreen && <ExportAndShareMenu />}
</div>
{/* Empty div for spacing */}
<div />

View File

@@ -1,10 +1,20 @@
import { useRecoilState } from 'recoil';
import { useCallback, useEffect } from 'react';
import { useQueryClient } from '@tanstack/react-query';
import { QueryKeys } from 'librechat-data-provider';
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query';
import { cn, defaultTextProps, removeFocusOutlines, mapEndpoints } from '~/utils';
import { Input, Label, Dropdown, Dialog, DialogClose, DialogButton } from '~/components/';
import type { TModelsConfig, TEndpointsConfig } from 'librechat-data-provider';
import {
cn,
defaultTextProps,
removeFocusOutlines,
mapEndpoints,
getConvoSwitchLogic,
} from '~/utils';
import { Input, Label, Dropdown, Dialog, DialogClose, DialogButton } from '~/components';
import { useSetIndexOptions, useLocalize, useDebouncedInput } from '~/hooks';
import PopoverButtons from '~/components/Chat/Input/PopoverButtons';
import DialogTemplate from '~/components/ui/DialogTemplate';
import { useSetIndexOptions, useLocalize, useDebouncedInput } from '~/hooks';
import { EndpointSettings } from '~/components/Endpoints';
import { useChatContext } from '~/Providers';
import store from '~/store';
@@ -17,8 +27,9 @@ const EditPresetDialog = ({
submitPreset: () => void;
}) => {
const localize = useLocalize();
const queryClient = useQueryClient();
const { preset, setPreset } = useChatContext();
const { setOption } = useSetIndexOptions(preset);
const { setOption, setOptions, setAgentOption } = useSetIndexOptions(preset);
const [onTitleChange, title] = useDebouncedInput({
setOption,
optionKey: 'title',
@@ -30,6 +41,67 @@ const EditPresetDialog = ({
select: mapEndpoints,
});
useEffect(() => {
if (!preset) {
return;
}
if (!preset.endpoint) {
return;
}
const modelsConfig = queryClient.getQueryData<TModelsConfig>([QueryKeys.models]);
if (!modelsConfig) {
return;
}
const models = modelsConfig[preset.endpoint];
if (!models) {
return;
}
if (!models.length) {
return;
}
if (preset.model === models[0]) {
return;
}
if (!models.includes(preset.model ?? '')) {
console.log('setting model', models[0]);
setOption('model')(models[0]);
}
if (preset.agentOptions?.model === models[0]) {
return;
}
if (preset.agentOptions?.model && !models.includes(preset.agentOptions.model)) {
console.log('setting agent model', models[0]);
setAgentOption('model')(models[0]);
}
}, [preset, queryClient, setOption, setAgentOption]);
const switchEndpoint = useCallback(
(newEndpoint: string) => {
if (!setOptions) {
return console.warn('setOptions is not defined');
}
const { newEndpointType } = getConvoSwitchLogic({
newEndpoint,
modularChat: true,
conversation: null,
endpointsConfig: queryClient.getQueryData<TEndpointsConfig>([QueryKeys.endpoints]) ?? {},
});
setOptions({
endpoint: newEndpoint,
endpointType: newEndpointType,
});
},
[queryClient, setOptions],
);
const { endpoint, endpointType, model } = preset || {};
if (!endpoint) {
return null;
@@ -76,7 +148,7 @@ const EditPresetDialog = ({
</Label>
<Dropdown
value={endpoint || ''}
onChange={(value) => setOption('endpoint')(value)}
onChange={switchEndpoint}
options={availableEndpoints}
/>
</div>

View File

@@ -7,23 +7,26 @@ const HoverToggle = ({
isActiveConvo,
isPopoverActive,
setIsPopoverActive,
className = 'absolute bottom-0 right-0 top-0',
}: {
children: React.ReactNode;
isActiveConvo: boolean;
isPopoverActive: boolean;
setIsPopoverActive: (isActive: boolean) => void;
className?: string;
}) => {
const setPopoverActive = (value: boolean) => setIsPopoverActive(value);
return (
<ToggleContext.Provider value={{ isPopoverActive, setPopoverActive }}>
<div
className={cn(
'peer absolute bottom-0 right-0 top-0 items-center gap-1.5 rounded-r-lg from-gray-500 from-gray-900 pl-2 pr-2 dark:text-white',
'peer items-center gap-1.5 rounded-r-lg from-gray-500 from-gray-900 pl-2 pr-2 dark:text-white',
isPopoverActive || isActiveConvo ? 'flex' : 'hidden group-hover:flex',
isActiveConvo
? 'from-gray-50 from-85% to-transparent group-hover:bg-gradient-to-l group-hover:from-gray-200 dark:from-gray-800 dark:group-hover:from-gray-800'
: 'z-50 from-gray-200 from-gray-50 from-0% to-transparent hover:bg-gradient-to-l hover:from-gray-200 dark:from-gray-750 dark:from-gray-800 dark:hover:from-gray-800',
isPopoverActive && !isActiveConvo ? 'from-gray-50 dark:from-gray-800' : '',
className,
)}
>
{children}

View File

@@ -7,12 +7,10 @@ const AlertDialog = AlertDialogPrimitive.Root;
const AlertDialogTrigger = AlertDialogPrimitive.Trigger;
const AlertDialogPortal = ({
className = '',
children,
...props
}: AlertDialogPrimitive.AlertDialogPortalProps) => (
<AlertDialogPrimitive.Portal className={cn(className)} {...props}>
type AlertPortalProps = AlertDialogPrimitive.AlertDialogPortalProps & { className?: string };
const AlertDialogPortal = ({ className = '', children, ...props }: AlertPortalProps) => (
<AlertDialogPrimitive.Portal className={cn(className)} {...(props as AlertPortalProps)}>
<div className="fixed inset-0 z-50 flex items-end justify-center sm:items-center">
{children}
</div>

View File

@@ -1,6 +1,6 @@
import { useRecoilValue, useSetRecoilState } from 'recoil';
import type { TPreset, TPlugin } from 'librechat-data-provider';
import type { TSetOptionsPayload, TSetExample, TSetOption } from '~/common';
import type { TSetOptionsPayload, TSetExample, TSetOption, TSetOptions } from '~/common';
import { useChatContext } from '~/Providers/ChatContext';
import { cleanupPreset } from '~/utils';
import store from '~/store';
@@ -17,6 +17,18 @@ const usePresetIndexOptions: TUsePresetOptions = (_preset) => {
}
const getConversation: () => TPreset | null = () => preset;
const setOptions: TSetOptions = (options) => {
const update = { ...options };
setPreset((prevState) =>
cleanupPreset({
preset: {
...prevState,
...update,
},
}),
);
};
const setOption: TSetOption = (param) => (newValue) => {
const update = {};
update[param] = newValue;
@@ -155,6 +167,7 @@ const usePresetIndexOptions: TUsePresetOptions = (_preset) => {
setOption,
setExample,
addExample,
setOptions,
removeExample,
getConversation,
checkPluginSelection,

View File

@@ -157,13 +157,13 @@ const useSetIndexOptions: TUseSetOptions = (preset = false) => {
};
return {
setTools,
setOption,
setExample,
addExample,
removeExample,
setAgentOption,
checkPluginSelection,
setTools,
};
};

View File

@@ -424,6 +424,7 @@ export default {
com_endpoint_agent: 'Agent',
com_endpoint_show_what_settings: 'Show {0} Settings',
com_endpoint_export: 'Export',
com_endpoint_export_share: 'Export/Share',
com_endpoint_assistant: 'Assistant',
com_endpoint_use_active_assistant: 'Use Active Assistant',
com_endpoint_assistant_model: 'Assistant Model',

View File

@@ -120,9 +120,13 @@ version: '3.4'
# image: ghcr.io/berriai/litellm:main-latest
# volumes:
# - ./litellm/litellm-config.yaml:/app/config.yaml
# - ./litellm/application_default_credentials.json:/app/application_default_credentials.json # only if using Google Vertex
# ports:
# - "4000:8000"
# command: [ "--config", "/app/config.yaml", "--port", "8000", "--num_workers", "8" ]
# environment:
# OPENAI_API_KEY: none ## needs to be set if ollama's openai api compatibility is used
# GOOGLE_APPLICATION_CREDENTIALS: /app/application_default_credentials.json ## only if using Google Vertex
# REDIS_HOST: redis
# REDIS_PORT: 6379
# REDIS_PASSWORD: RedisChangeMe

6
package-lock.json generated
View File

@@ -52,7 +52,6 @@
"agenda": "^5.0.0",
"axios": "^1.3.4",
"bcryptjs": "^2.4.3",
"bottleneck": "^2.19.5",
"cheerio": "^1.0.0-rc.12",
"cohere-ai": "^7.9.1",
"connect-redis": "^7.1.0",
@@ -11701,11 +11700,6 @@
"integrity": "sha512-ma2q0Tc760dW54CdOyJjhrg/a54317o1zYADQJFgperNGKIKgAUGIcKnuMiff8z57+yGlrGNEt4lPgZfCgTJgA==",
"dev": true
},
"node_modules/bottleneck": {
"version": "2.19.5",
"resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz",
"integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw=="
},
"node_modules/bowser": {
"version": "2.11.0",
"resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz",