diff --git a/src/lib/config.ts b/src/lib/config.ts
index ed1998ae..09ece7fa 100644
--- a/src/lib/config.ts
+++ b/src/lib/config.ts
@@ -24,8 +24,11 @@ export const SOF_HOSTS = [
// },
{
id: "epic",
- name: "Epic Demo",
- url: "https://fhir.epic.com/interconnect-fhir-oauth/api/FHIR/R4",
+ //name: "Epic Demo",
+ //url: "https://fhir.epic.com/interconnect-fhir-oauth/api/FHIR/R4",
+ name: "Epic, UW Medicine Epic (production)",
+ // per https://open.epic.com/MyApps/Endpoints :
+ url: "https://fhir.epic.medical.washington.edu/FHIR-Proxy/api/FHIR/R4",
clientId: import.meta.env.VITE_EPIC_CLIENT_ID,
note: "Test patient credentials "
},
diff --git a/src/lib/utils/llmChat.js b/src/lib/utils/llmChat.js
new file mode 100644
index 00000000..364f54b9
--- /dev/null
+++ b/src/lib/utils/llmChat.js
@@ -0,0 +1,109 @@
+let fhirResources = null;
+let messages = [];
+
+function initLLMChat(resources) {
+ fhirResources = resources;
+ const llmChatContent = document.getElementById('llm-chat-content');
+ const chatInput = document.getElementById('chat-input');
+ const sendMessageButton = document.getElementById('send-message');
+
+ sendMessageButton.addEventListener('click', sendMessage);
+ chatInput.addEventListener('keypress', (e) => {
+ if (e.key === 'Enter') sendMessage();
+ });
+}
+
+function insertMessageIntoUi(role, userMessage) {
+ const chatMessages = document.getElementById('chat-messages');
+
+ // Create a new table row for the user message
+ const row = document.createElement('tr');
+
+ // Create cells for the request
+ const requestCell = document.createElement('td');
+ requestCell.textContent = userMessage;
+
+ // Create empty cells for response and tokens
+ const responseCell = document.createElement('td');
+ const promptTokensCell = document.createElement('td');
+ const completionTokensCell = document.createElement('td');
+ const costCell = document.createElement('td');
+
+ // Append cells to the row
+ row.appendChild(requestCell);
+ row.appendChild(responseCell);
+ row.appendChild(promptTokensCell);
+ row.appendChild(completionTokensCell);
+ row.appendChild(costCell);
+
+ // Append the row to the chat messages table
+ chatMessages.appendChild(row);
+
+ // Return the row for later updates
+ return row;
+}
+
+// Update the sendMessage function to use the new insertMessageIntoUi
+async function sendMessage() {
+ const chatInput = document.getElementById('chat-input');
+ const userMessage = chatInput.value.trim();
+ if (userMessage.length === 0) return;
+
+ // Append the FHIR resources as the first message
+ if (messages.length === 0) {
+ messages.push({
+ role: "user",
+ content: [{ type: "text", text: JSON.stringify(fhirResources) }]
+ });
+ }
+
+ // Append the user message
+ messages.push({
+ role: "user",
+ content: [{ type: "text", text: userMessage }]
+ });
+
+ // Insert the user message into the UI and get the row reference
+ const row = insertMessageIntoUi('user', userMessage);
+
+ chatInput.value = '';
+
+ try {
+ // FIXME config for this url...
+ const response = await fetch('https://llm-service.fl.mcjustin.dev.cirg.uw.edu/api/chat', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify({ messages: messages }), // Send the messages array
+ });
+
+ if (!response.ok) {
+ throw new Error('Failed to get LLM response');
+ }
+
+ const data = await response.json();
+ // Append the assistant's response
+ messages.push({
+ role: "assistant",
+ content: [{ type: "text", text: data.content }]
+ });
+
+ const promptTokens = data.prompt_tokens;
+ const completionTokens = data.completion_tokens;
+ const costInput = parseInt(promptTokens) * 0.15 / 1000000;
+ const costOutput = parseInt(completionTokens) * 0.6 / 1000000;
+ const cost = costInput + costOutput;
+
+ // Update the existing row with the response and token counts
+ row.cells[1].textContent = data.content; // Response
+ row.cells[2].textContent = promptTokens; // Prompt Tokens
+ row.cells[3].textContent = completionTokens; // Completion Tokens
+ row.cells[4].textContent = costInput.toString().substring(0,7) + " + " + costOutput.toString().substring(0,7) + " = " + cost.toString().substring(0,7);
+ } catch (error) {
+ console.error('Error sending message to LLM:', error);
+ row.cells[1].textContent = 'Failed to get a response. Please try again.'; // Update response cell with error message
+ }
+}
+
+export { initLLMChat };
diff --git a/src/lib/utils/retreiveIPS.js b/src/lib/utils/retreiveIPS.js
index 75d7dceb..9933f442 100644
--- a/src/lib/utils/retreiveIPS.js
+++ b/src/lib/utils/retreiveIPS.js
@@ -71,6 +71,7 @@ async function retrieve(){
$("#ips-loader").hide();
return;
}
+ // TODO look at these in the debugger... actually, is this used at all?
const decoded = await Promise.all(retrieveResult.shcs.map(verify));
const data = decoded.map((e) => e.fhirBundle);
prepareSHLContents(data);
diff --git a/src/routes/(viewer)/ips/+page.svelte b/src/routes/(viewer)/ips/+page.svelte
index dcfed60e..f4a94e36 100644
--- a/src/routes/(viewer)/ips/+page.svelte
+++ b/src/routes/(viewer)/ips/+page.svelte
@@ -21,6 +21,8 @@
import { SHOW_VIEWER_DEMO } from "$lib/config";
+ import { initLLMChat } from '$lib/utils/llmChat.js';
+
let shlContents: Bundle[] = [];
let loading: boolean;
@@ -54,6 +56,8 @@
}
}
+ let showLlmChat = false;
+
let displayModeText:string;
$: {
if ($displayMode) {
@@ -150,6 +154,9 @@
if (retrieveResult.shcs) {
const decoded = await Promise.all(retrieveResult.shcs.map(verify));
const data = decoded.map((e) => e.fhirBundle);
+
+ data.forEach(ipsBundle => initLLMChat(ipsBundle)); // Call initLLMChat for each bundle
+
shlContents = data;
}
}
@@ -186,6 +193,28 @@
+
+
+
+
+
+
+ Your request |
+ LLM Chat Response |
+ Prompt Tokens |
+ Response Tokens |
+ Cost in US$ (prompt + response = total) |
+
+
+
+
+
+
+
+
+
+
+
Displaying FHIR Resources Using:
@@ -245,6 +274,33 @@
{/if}
+
+
+
+
\ No newline at end of file
+