Skip to content

Commit

Permalink
chore(llm-observability): UI tweaks (#27870)
Browse files Browse the repository at this point in the history
Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
  • Loading branch information
Twixes and github-actions[bot] authored Jan 24, 2025
1 parent 2265e87 commit 8b13d86
Show file tree
Hide file tree
Showing 11 changed files with 73 additions and 29 deletions.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
11 changes: 5 additions & 6 deletions frontend/src/lib/components/PropertyKeyInfo.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,8 @@ export const PropertyKeyInfo = React.forwardRef<HTMLSpanElement, PropertyKeyInfo
const valueDisplayText = (coreDefinition ? coreDefinition.label : value)?.trim() ?? ''
const valueDisplayElement = valueDisplayText === '' ? <i>(empty string)</i> : valueDisplayText

const recognizedSource: 'posthog' | 'langfuse' | null = coreDefinition
? 'posthog'
: value.startsWith('langfuse ')
? 'langfuse'
: null
const recognizedSource: 'posthog' | 'langfuse' | null =
coreDefinition || value.startsWith('$') ? 'posthog' : value.startsWith('langfuse ') ? 'langfuse' : null

const innerContent = (
<span
Expand All @@ -68,7 +65,9 @@ export const PropertyKeyInfo = React.forwardRef<HTMLSpanElement, PropertyKeyInfo
overlay={
<div className="PropertyKeyInfo__overlay">
<div className="PropertyKeyInfo__header">
{!!coreDefinition && <span className="PropertyKeyInfo__logo" />}
{!!coreDefinition && (
<span className={`PropertyKeyInfo__logo PropertyKeyInfo__logo--${recognizedSource}`} />
)}
{coreDefinition.label}
</div>
{coreDefinition.description || coreDefinition.examples ? (
Expand Down
34 changes: 31 additions & 3 deletions frontend/src/lib/taxonomy.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,12 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = {
},
$ai_generation: {
label: 'AI Generation',
description: 'A call to a generative AI model, e.g. an LLM',
description: 'A call to a generative AI model (LLM)',
},
$ai_trace: {
label: 'AI Trace',
description:
'A generative AI trace. Usually a trace tracks a single user interaction and contains one or more AI generation calls',
},
$ai_metric: {
label: 'AI Metric',
Expand Down Expand Up @@ -1378,9 +1383,9 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = {
description: 'The number of tokens in the input prmopt that was sent to the LLM API',
examples: [23],
},
$ai_output: {
$ai_output_choices: {
label: 'AI Output (LLM)',
description: 'The output JSON that was received from the LLM API',
description: 'The output message choices JSON that was received from the LLM API',
examples: [
'{"choices": [{"text": "Quantum computing is a type of computing that harnesses the power of quantum mechanics to perform operations on data."}]}',
],
Expand Down Expand Up @@ -1420,6 +1425,29 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = {
description: 'The parameters used to configure the model in the LLM API, in JSON',
examples: ['{"temperature": 0.5, "max_tokens": 50}'],
},
$ai_stream: {
label: 'AI Stream (LLM)',
description: 'Whether the response from the LLM API was streamed',
examples: ['true', 'false'],
},
$ai_temperature: {
label: 'AI Temperature (LLM)',
description: 'The temperature parameter used in the request to the LLM API',
examples: [0.7, 1.0],
},
$ai_input_state: {
label: 'AI Input State (LLM)',
description: 'Input state of the LLM agent',
},
$ai_output_state: {
label: 'AI Output State (LLM)',
description: 'Output state of the LLM agent',
},
$ai_trace_name: {
label: 'AI Trace Name (LLM)',
description: 'The name given to this trace of LLM API calls',
examples: ['LangGraph'],
},
$ai_provider: {
label: 'AI Provider (LLM)',
description: 'The provider of the AI model used to generate the output from the LLM API',
Expand Down
2 changes: 1 addition & 1 deletion frontend/src/queries/nodes/HogQLX/render.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ export function renderHogQLX(value: any): JSX.Element {
const { href, children, source, target } = rest
return (
<ErrorBoundary>
<Link to={href} target={target ?? '_self'}>
<Link to={href} target={target ?? undefined}>
{children ?? source ? renderHogQLX(children ?? source) : href}
</Link>
</ErrorBoundary>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,11 +67,11 @@ export function LLMMessageDisplay({ message, isOutput }: { message: CompatMessag
'rounded border text-default',
isOutput
? 'bg-[var(--bg-fill-success-tertiary)]'
: role === 'system'
? 'bg-[var(--bg-fill-tertiary)]'
: role === 'user'
? 'bg-[var(--bg-fill-primary)]'
: 'bg-[var(--bg-fill-info-tertiary)]'
? 'bg-[var(--bg-fill-tertiary)]'
: role === 'assistant'
? 'bg-[var(--bg-fill-info-tertiary)]'
: null // e.g. system
)}
>
<div className="flex items-center gap-1 w-full px-2 h-6 text-xs font-medium">
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,14 @@ export function LLMObservabilityTraces(): JSX.Element {
const IDColumn: QueryContextColumnComponent = ({ record }) => {
const row = record as LLMTrace
return (
<Link
className="ph-no-capture font-semibold"
to={urls.llmObservabilityTrace(row.id, { timestamp: removeMilliseconds(row.createdAt) })}
>
{row.id}
</Link>
<strong>
<Link
className="ph-no-capture"
to={urls.llmObservabilityTrace(row.id, { timestamp: removeMilliseconds(row.createdAt) })}
>
{row.id.slice(0, 4)}...{row.id.slice(-4)}
</Link>
</strong>
)
}

Expand Down
28 changes: 19 additions & 9 deletions products/llm_observability/frontend/llmObservabilityLogic.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import {
ChartDisplayType,
EventDefinitionType,
HogQLMathType,
PropertyFilterType,
PropertyMathType,
} from '~/types'

Expand Down Expand Up @@ -130,7 +131,10 @@ export const llmObservabilityLogic = kea<llmObservabilityLogicType>([
},
],
dateRange: { date_from: dateFilter.dateFrom, date_to: dateFilter.dateTo },
properties: propertyFilters,
properties: propertyFilters.concat({
type: PropertyFilterType.HogQL,
key: 'distinct_id != properties.$ai_trace_id',
}),
filterTestAccounts: shouldFilterTestAccounts,
},
},
Expand Down Expand Up @@ -183,7 +187,10 @@ export const llmObservabilityLogic = kea<llmObservabilityLogicType>([
decimalPlaces: 2,
},
dateRange: { date_from: dateFilter.dateFrom, date_to: dateFilter.dateTo },
properties: propertyFilters,
properties: propertyFilters.concat({
type: PropertyFilterType.HogQL,
key: 'distinct_id != properties.$ai_trace_id',
}),
filterTestAccounts: shouldFilterTestAccounts,
},
},
Expand Down Expand Up @@ -249,8 +256,7 @@ export const llmObservabilityLogic = kea<llmObservabilityLogicType>([
},
trendsFilter: {
aggregationAxisPostfix: ' s',
decimalPlaces: 3,
yAxisScaleType: 'log10',
decimalPlaces: 2,
},
dateRange: { date_from: dateFilter.dateFrom, date_to: dateFilter.dateTo },
properties: propertyFilters,
Expand Down Expand Up @@ -328,14 +334,17 @@ export const llmObservabilityLogic = kea<llmObservabilityLogicType>([
kind: NodeKind.EventsQuery,
select: [
'*',
`<strong><a href=f'/llm-observability/traces/{properties.$ai_trace_id}?event={uuid}'>
{f'{left(toString(uuid), 4)}...{right(toString(uuid), 4)}'}
</a></strong> -- ID`,
`<a href=f'/llm-observability/traces/{properties.$ai_trace_id}'>
{f'{left(properties.$ai_trace_id, 4)}...{right(properties.$ai_trace_id, 4)}'}
</a> -- Trace ID`,
'person',
// The f-string wrapping below seems pointless, but it actually disables special rendering
// of the property keys, which would otherwise show property names overly verbose here
"f'{properties.$ai_trace_id}' -- Trace ID",
"f'{properties.$ai_model}' -- Model",
"f'${round(toFloat(properties.$ai_total_cost_usd), 6)}' -- Total cost",
"f'{round(properties.$ai_latency, 2)} s' -- Latency",
"f'{properties.$ai_input_tokens} → {properties.$ai_output_tokens} (∑ {properties.$ai_input_tokens + properties.$ai_output_tokens})' -- Token usage",
"f'{properties.$ai_latency} s' -- Latency",
"f'${round(toFloat(properties.$ai_total_cost_usd), 6)}' -- Total cost",
'timestamp',
],
orderBy: ['timestamp DESC'],
Expand All @@ -358,6 +367,7 @@ export const llmObservabilityLogic = kea<llmObservabilityLogicType>([
TaxonomicFilterGroupType.HogQLExpression,
],
showExport: true,
showActions: false,
}),
],
}),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,11 @@ export const llmObservabilityTraceLogic = kea<llmObservabilityTraceLogicType>([
(s) => [s.traceId],
(traceId): Breadcrumb[] => {
return [
{
key: 'LLMObservability',
name: 'LLM observability',
path: urls.llmObservability('dashboard'),
},
{
key: 'LLMObservability',
name: 'Traces',
Expand Down

0 comments on commit 8b13d86

Please sign in to comment.