Skip to content

Commit

Permalink
update(console): support client serving and optimize ui (#3065)
Browse files Browse the repository at this point in the history
  • Loading branch information
waynelwz authored Dec 6, 2023
1 parent f0f0d89 commit 24d55d8
Show file tree
Hide file tree
Showing 12 changed files with 464 additions and 90 deletions.
21 changes: 13 additions & 8 deletions console/client/App.tsx
Original file line number Diff line number Diff line change
@@ -1,19 +1,24 @@
import React from 'react'
import { Provider as StyletronProvider } from 'styletron-react'
import { BaseProvider } from 'baseui'
import { BaseProvider, LocaleProvider } from 'baseui'
import DeepTheme from '@starwhale/ui/theme'
import { Client as Styletron } from 'styletron-engine-atomic'
import { QueryClient, QueryClientProvider } from 'react-query'
import ServingPage from './ServingPage'
import { initI18n } from '@/i18n'

initI18n()

export default function App(): any {
return (
<StyletronProvider value={new Styletron()}>
<BaseProvider theme={DeepTheme}>
<QueryClientProvider client={new QueryClient()}>
<ServingPage />
</QueryClientProvider>
</BaseProvider>
</StyletronProvider>
<QueryClientProvider client={new QueryClient()}>
<StyletronProvider value={new Styletron()}>
<BaseProvider theme={DeepTheme}>
<LocaleProvider locale={{}}>
<ServingPage />
</LocaleProvider>
</BaseProvider>
</StyletronProvider>
</QueryClientProvider>
)
}
19 changes: 17 additions & 2 deletions console/client/ServingPage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@ import axios from 'axios'
import { useQuery } from 'react-query'
import { Select } from '@starwhale/ui'
import _ from 'lodash'
import LLMChat from './pages/llm/LLMChat'
import { IApiSchema, InferenceType, ISpecSchema } from './schemas/api'
import { useChatStore } from '@starwhale/ui/Serving/store/chat'
import ChatGroup from './components/ChatGroup'

const fetchSpec = async () => {
const { data } = await axios.get<ISpecSchema>('/api/spec')
Expand All @@ -13,6 +14,7 @@ const fetchSpec = async () => {

export default function ServingPage() {
const useFetchSpec = useQuery('spec', fetchSpec)
const chatStore = useChatStore()

const [spec, setSpec] = React.useState<ISpecSchema>()
const [currentApi, setCurrentApi] = React.useState<IApiSchema>()
Expand All @@ -21,8 +23,20 @@ export default function ServingPage() {
if (!useFetchSpec.data) {
return
}
const apiSpec = useFetchSpec.data.apis[0]
chatStore.newOrUpdateSession({
job: { id: 'client' } as any,
type: apiSpec?.inference_type,
exposedLink: {
link: '',
type: 'WEB_HANDLER',
name: 'llm_chat',
},
apiSpec: useFetchSpec.data.apis[0],
} as any)
setSpec(useFetchSpec.data)
setCurrentApi(useFetchSpec.data.apis[0])
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [useFetchSpec.data])

return (
Expand All @@ -44,7 +58,8 @@ export default function ServingPage() {
}}
/>
)}
{currentApi?.inference_type === InferenceType.LLM_CHAT && <LLMChat api={currentApi} />}
{/* {currentApi?.inference_type === InferenceType.LLM_CHAT && <LLMChat api={currentApi} />} */}
{currentApi?.inference_type === InferenceType.LLM_CHAT && <ChatGroup useStore={useChatStore} />}
</div>
)
}
Loading

0 comments on commit 24d55d8

Please sign in to comment.