diff --git a/frontend/__snapshots__/components-activitylog--insight-activity--dark.png b/frontend/__snapshots__/components-activitylog--insight-activity--dark.png index 9fa6256d0c217..b4d37292ee9a8 100644 Binary files a/frontend/__snapshots__/components-activitylog--insight-activity--dark.png and b/frontend/__snapshots__/components-activitylog--insight-activity--dark.png differ diff --git a/frontend/__snapshots__/components-activitylog--insight-activity--light.png b/frontend/__snapshots__/components-activitylog--insight-activity--light.png index bc40839af4c33..12512e9a25861 100644 Binary files a/frontend/__snapshots__/components-activitylog--insight-activity--light.png and b/frontend/__snapshots__/components-activitylog--insight-activity--light.png differ diff --git a/frontend/__snapshots__/components-property-key-info--property-key-info--dark.png b/frontend/__snapshots__/components-property-key-info--property-key-info--dark.png index 0f7a15872752d..c7ceacfd4fd49 100644 Binary files a/frontend/__snapshots__/components-property-key-info--property-key-info--dark.png and b/frontend/__snapshots__/components-property-key-info--property-key-info--dark.png differ diff --git a/frontend/__snapshots__/components-property-key-info--property-key-info--light.png b/frontend/__snapshots__/components-property-key-info--property-key-info--light.png index b9cd979649fc2..649dba618ee27 100644 Binary files a/frontend/__snapshots__/components-property-key-info--property-key-info--light.png and b/frontend/__snapshots__/components-property-key-info--property-key-info--light.png differ diff --git a/frontend/src/lib/components/PropertyKeyInfo.tsx b/frontend/src/lib/components/PropertyKeyInfo.tsx index d7efdeb051c65..e2e006915252e 100644 --- a/frontend/src/lib/components/PropertyKeyInfo.tsx +++ b/frontend/src/lib/components/PropertyKeyInfo.tsx @@ -38,11 +38,8 @@ export const PropertyKeyInfo = React.forwardRef(empty string) : valueDisplayText - const recognizedSource: 'posthog' | 'langfuse' | null = coreDefinition - ? 'posthog' - : value.startsWith('langfuse ') - ? 'langfuse' - : null + const recognizedSource: 'posthog' | 'langfuse' | null = + coreDefinition || value.startsWith('$') ? 'posthog' : value.startsWith('langfuse ') ? 'langfuse' : null const innerContent = (
- {!!coreDefinition && } + {!!coreDefinition && ( + + )} {coreDefinition.label}
{coreDefinition.description || coreDefinition.examples ? ( diff --git a/frontend/src/lib/taxonomy.tsx b/frontend/src/lib/taxonomy.tsx index 5ee7691128d42..f339332a36456 100644 --- a/frontend/src/lib/taxonomy.tsx +++ b/frontend/src/lib/taxonomy.tsx @@ -177,7 +177,12 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { }, $ai_generation: { label: 'AI Generation', - description: 'A call to a generative AI model, e.g. an LLM', + description: 'A call to a generative AI model (LLM)', + }, + $ai_trace: { + label: 'AI Trace', + description: + 'A generative AI trace. Usually a trace tracks a single user interaction and contains one or more AI generation calls', }, $ai_metric: { label: 'AI Metric', @@ -1378,9 +1383,9 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { description: 'The number of tokens in the input prmopt that was sent to the LLM API', examples: [23], }, - $ai_output: { + $ai_output_choices: { label: 'AI Output (LLM)', - description: 'The output JSON that was received from the LLM API', + description: 'The output message choices JSON that was received from the LLM API', examples: [ '{"choices": [{"text": "Quantum computing is a type of computing that harnesses the power of quantum mechanics to perform operations on data."}]}', ], @@ -1420,6 +1425,29 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { description: 'The parameters used to configure the model in the LLM API, in JSON', examples: ['{"temperature": 0.5, "max_tokens": 50}'], }, + $ai_stream: { + label: 'AI Stream (LLM)', + description: 'Whether the response from the LLM API was streamed', + examples: ['true', 'false'], + }, + $ai_temperature: { + label: 'AI Temperature (LLM)', + description: 'The temperature parameter used in the request to the LLM API', + examples: [0.7, 1.0], + }, + $ai_input_state: { + label: 'AI Input State (LLM)', + description: 'Input state of the LLM agent', + }, + $ai_output_state: { + label: 'AI Output State (LLM)', + description: 'Output state of the LLM agent', + }, + $ai_trace_name: { + label: 'AI Trace Name (LLM)', + description: 'The name given to this trace of LLM API calls', + examples: ['LangGraph'], + }, $ai_provider: { label: 'AI Provider (LLM)', description: 'The provider of the AI model used to generate the output from the LLM API', diff --git a/frontend/src/queries/nodes/HogQLX/render.tsx b/frontend/src/queries/nodes/HogQLX/render.tsx index 7b9ab93a1a53a..684f80026e4b8 100644 --- a/frontend/src/queries/nodes/HogQLX/render.tsx +++ b/frontend/src/queries/nodes/HogQLX/render.tsx @@ -59,7 +59,7 @@ export function renderHogQLX(value: any): JSX.Element { const { href, children, source, target } = rest return ( - + {children ?? source ? renderHogQLX(children ?? source) : href} diff --git a/products/llm_observability/frontend/ConversationDisplay/ConversationMessagesDisplay.tsx b/products/llm_observability/frontend/ConversationDisplay/ConversationMessagesDisplay.tsx index c9093181da741..98e966049c7b6 100644 --- a/products/llm_observability/frontend/ConversationDisplay/ConversationMessagesDisplay.tsx +++ b/products/llm_observability/frontend/ConversationDisplay/ConversationMessagesDisplay.tsx @@ -67,11 +67,11 @@ export function LLMMessageDisplay({ message, isOutput }: { message: CompatMessag 'rounded border text-default', isOutput ? 'bg-[var(--bg-fill-success-tertiary)]' - : role === 'system' - ? 'bg-[var(--bg-fill-tertiary)]' : role === 'user' - ? 'bg-[var(--bg-fill-primary)]' - : 'bg-[var(--bg-fill-info-tertiary)]' + ? 'bg-[var(--bg-fill-tertiary)]' + : role === 'assistant' + ? 'bg-[var(--bg-fill-info-tertiary)]' + : null // e.g. system )} >
diff --git a/products/llm_observability/frontend/LLMObservabilityTracesScene.tsx b/products/llm_observability/frontend/LLMObservabilityTracesScene.tsx index 76f672b71a426..29cbcc764993c 100644 --- a/products/llm_observability/frontend/LLMObservabilityTracesScene.tsx +++ b/products/llm_observability/frontend/LLMObservabilityTracesScene.tsx @@ -62,12 +62,14 @@ export function LLMObservabilityTraces(): JSX.Element { const IDColumn: QueryContextColumnComponent = ({ record }) => { const row = record as LLMTrace return ( - - {row.id} - + + + {row.id.slice(0, 4)}...{row.id.slice(-4)} + + ) } diff --git a/products/llm_observability/frontend/llmObservabilityLogic.tsx b/products/llm_observability/frontend/llmObservabilityLogic.tsx index 5f338322e2f58..e001c698469fe 100644 --- a/products/llm_observability/frontend/llmObservabilityLogic.tsx +++ b/products/llm_observability/frontend/llmObservabilityLogic.tsx @@ -13,6 +13,7 @@ import { ChartDisplayType, EventDefinitionType, HogQLMathType, + PropertyFilterType, PropertyMathType, } from '~/types' @@ -130,7 +131,10 @@ export const llmObservabilityLogic = kea([ }, ], dateRange: { date_from: dateFilter.dateFrom, date_to: dateFilter.dateTo }, - properties: propertyFilters, + properties: propertyFilters.concat({ + type: PropertyFilterType.HogQL, + key: 'distinct_id != properties.$ai_trace_id', + }), filterTestAccounts: shouldFilterTestAccounts, }, }, @@ -183,7 +187,10 @@ export const llmObservabilityLogic = kea([ decimalPlaces: 2, }, dateRange: { date_from: dateFilter.dateFrom, date_to: dateFilter.dateTo }, - properties: propertyFilters, + properties: propertyFilters.concat({ + type: PropertyFilterType.HogQL, + key: 'distinct_id != properties.$ai_trace_id', + }), filterTestAccounts: shouldFilterTestAccounts, }, }, @@ -249,8 +256,7 @@ export const llmObservabilityLogic = kea([ }, trendsFilter: { aggregationAxisPostfix: ' s', - decimalPlaces: 3, - yAxisScaleType: 'log10', + decimalPlaces: 2, }, dateRange: { date_from: dateFilter.dateFrom, date_to: dateFilter.dateTo }, properties: propertyFilters, @@ -328,14 +334,17 @@ export const llmObservabilityLogic = kea([ kind: NodeKind.EventsQuery, select: [ '*', + ` + {f'{left(toString(uuid), 4)}...{right(toString(uuid), 4)}'} + -- ID`, + ` + {f'{left(properties.$ai_trace_id, 4)}...{right(properties.$ai_trace_id, 4)}'} + -- Trace ID`, 'person', - // The f-string wrapping below seems pointless, but it actually disables special rendering - // of the property keys, which would otherwise show property names overly verbose here - "f'{properties.$ai_trace_id}' -- Trace ID", "f'{properties.$ai_model}' -- Model", - "f'${round(toFloat(properties.$ai_total_cost_usd), 6)}' -- Total cost", + "f'{round(properties.$ai_latency, 2)} s' -- Latency", "f'{properties.$ai_input_tokens} → {properties.$ai_output_tokens} (∑ {properties.$ai_input_tokens + properties.$ai_output_tokens})' -- Token usage", - "f'{properties.$ai_latency} s' -- Latency", + "f'${round(toFloat(properties.$ai_total_cost_usd), 6)}' -- Total cost", 'timestamp', ], orderBy: ['timestamp DESC'], @@ -358,6 +367,7 @@ export const llmObservabilityLogic = kea([ TaxonomicFilterGroupType.HogQLExpression, ], showExport: true, + showActions: false, }), ], }), diff --git a/products/llm_observability/frontend/llmObservabilityTraceLogic.ts b/products/llm_observability/frontend/llmObservabilityTraceLogic.ts index 075a302a73728..0a8f449f9fa98 100644 --- a/products/llm_observability/frontend/llmObservabilityTraceLogic.ts +++ b/products/llm_observability/frontend/llmObservabilityTraceLogic.ts @@ -101,6 +101,11 @@ export const llmObservabilityTraceLogic = kea([ (s) => [s.traceId], (traceId): Breadcrumb[] => { return [ + { + key: 'LLMObservability', + name: 'LLM observability', + path: urls.llmObservability('dashboard'), + }, { key: 'LLMObservability', name: 'Traces',