>
diff --git a/frontend/src/scenes/settings/organization/VerifiedDomains/SSOSelect.tsx b/frontend/src/scenes/settings/organization/VerifiedDomains/SSOSelect.tsx
index 8b00924441155..d106adc4f8289 100644
--- a/frontend/src/scenes/settings/organization/VerifiedDomains/SSOSelect.tsx
+++ b/frontend/src/scenes/settings/organization/VerifiedDomains/SSOSelect.tsx
@@ -11,9 +11,16 @@ export interface SSOSelectInterface {
loading: boolean
onChange: (value: SSOProvider | '') => void
samlAvailable: boolean
+ disabledReason?: string | null
}
-export function SSOSelect({ value, loading, onChange, samlAvailable }: SSOSelectInterface): JSX.Element | null {
+export function SSOSelect({
+ value,
+ loading,
+ onChange,
+ samlAvailable,
+ disabledReason,
+}: SSOSelectInterface): JSX.Element | null {
const { preflight } = useValues(preflightLogic)
if (!preflight) {
@@ -46,7 +53,7 @@ export function SSOSelect({ value, loading, onChange, samlAvailable }: SSOSelect
value={value}
options={options}
loading={loading}
- disabledReason={loading ? 'Cannot change while loading' : undefined}
+ disabledReason={loading ? 'Cannot change while loading' : disabledReason}
fullWidth
onChange={onChange}
/>
diff --git a/frontend/src/scenes/settings/organization/VerifiedDomains/VerifiedDomains.tsx b/frontend/src/scenes/settings/organization/VerifiedDomains/VerifiedDomains.tsx
index aeedc82526592..f4c516a924ace 100644
--- a/frontend/src/scenes/settings/organization/VerifiedDomains/VerifiedDomains.tsx
+++ b/frontend/src/scenes/settings/organization/VerifiedDomains/VerifiedDomains.tsx
@@ -1,6 +1,9 @@
import { IconCheckCircle, IconInfo, IconLock, IconTrash, IconWarning } from '@posthog/icons'
import { useActions, useValues } from 'kea'
import { PayGateMini } from 'lib/components/PayGateMini/PayGateMini'
+import { RestrictionScope } from 'lib/components/RestrictedArea'
+import { useRestrictedArea } from 'lib/components/RestrictedArea'
+import { OrganizationMembershipLevel } from 'lib/constants'
import { IconExclamation, IconOffline } from 'lib/lemon-ui/icons'
import { LemonButton } from 'lib/lemon-ui/LemonButton'
import { More } from 'lib/lemon-ui/LemonButton/More'
@@ -27,6 +30,11 @@ export function VerifiedDomains(): JSX.Element {
const { verifiedDomainsLoading, updatingDomainLoading } = useValues(verifiedDomainsLogic)
const { setAddModalShown } = useActions(verifiedDomainsLogic)
+ const restrictionReason = useRestrictedArea({
+ minimumAccessLevel: OrganizationMembershipLevel.Admin,
+ scope: RestrictionScope.Organization,
+ })
+
return (
@@ -39,7 +47,7 @@ export function VerifiedDomains(): JSX.Element {
type="primary"
onClick={() => setAddModalShown(true)}
className="mt-4"
- disabledReason={verifiedDomainsLoading || updatingDomainLoading ? 'loading...' : null}
+ disabledReason={verifiedDomainsLoading || updatingDomainLoading ? 'loading...' : restrictionReason}
>
Add domain
@@ -60,6 +68,11 @@ function VerifiedDomainsTable(): JSX.Element {
useActions(verifiedDomainsLogic)
const { preflight } = useValues(preflightLogic)
+ const restrictionReason = useRestrictedArea({
+ minimumAccessLevel: OrganizationMembershipLevel.Admin,
+ scope: RestrictionScope.Organization,
+ })
+
const columns: LemonTableColumns = [
{
key: 'domain',
@@ -119,6 +132,7 @@ function VerifiedDomainsTable(): JSX.Element {
updateDomain({ id, jit_provisioning_enabled: checked })}
label={
{jit_provisioning_enabled ? 'Enabled' : 'Disabled'}
@@ -157,6 +171,7 @@ function VerifiedDomainsTable(): JSX.Element {
loading={updatingDomainLoading}
onChange={(val) => updateDomain({ id, sso_enforcement: val })}
samlAvailable={has_saml}
+ disabledReason={restrictionReason}
/>
) : (
Verify domain to enable
@@ -205,7 +220,7 @@ function VerifiedDomainsTable(): JSX.Element {
return is_verified ? (
<>>
) : (
- setVerifyModal(id)}>
+ setVerifyModal(id)} disabledReason={restrictionReason}>
Verify
)
@@ -224,8 +239,10 @@ function VerifiedDomainsTable(): JSX.Element {
setConfigureSAMLModalId(id)}
fullWidth
- disabled={!isSAMLAvailable}
- title={isSAMLAvailable ? undefined : 'Upgrade to enable SAML'}
+ disabledReason={
+ restrictionReason ||
+ (!isSAMLAvailable ? 'Upgrade to enable SAML' : undefined)
+ }
>
Configure SAML
@@ -249,6 +266,7 @@ function VerifiedDomainsTable(): JSX.Element {
}
fullWidth
icon={}
+ disabledReason={restrictionReason}
>
Remove domain
diff --git a/frontend/src/scenes/settings/user/UpdateEmailPreferences.tsx b/frontend/src/scenes/settings/user/UpdateEmailPreferences.tsx
index eac79ab2f51af..d6e6974fb1f39 100644
--- a/frontend/src/scenes/settings/user/UpdateEmailPreferences.tsx
+++ b/frontend/src/scenes/settings/user/UpdateEmailPreferences.tsx
@@ -1,13 +1,15 @@
-import { LemonSwitch } from '@posthog/lemon-ui'
+import { LemonSwitch, LemonTag } from '@posthog/lemon-ui'
import { useActions, useValues } from 'kea'
+import { organizationLogic } from 'scenes/organizationLogic'
import { userLogic } from 'scenes/userLogic'
export function UpdateEmailPreferences(): JSX.Element {
const { user, userLoading } = useValues(userLogic)
const { updateUser } = useActions(userLogic)
+ const { currentOrganization } = useValues(organizationLogic)
return (
-
+
+
+
Weekly digest
+
+ The weekly digest keeps you up to date with everything that's happening in your PostHog organizations.
+
+
+
{
+ user?.notification_settings &&
+ updateUser({
+ notification_settings: {
+ ...user?.notification_settings,
+ all_weekly_digest_disabled: !user?.notification_settings.all_weekly_digest_disabled,
+ },
+ })
+ }}
+ checked={!user?.notification_settings.all_weekly_digest_disabled}
+ disabled={userLoading}
+ bordered
+ label="Receive the weekly digests"
+ tooltip="This option applies to all organizations you belong to. If you want to opt out of digests for a specific organization, disable all the individual project settings below."
+ />
+ {!user?.notification_settings.all_weekly_digest_disabled ? (
+ <>
+ Individual project settings:
+
+ {currentOrganization?.teams?.map((team) => (
+ -
+
+ {
+ user?.notification_settings &&
+ updateUser({
+ notification_settings: {
+ ...user?.notification_settings,
+ project_weekly_digest_disabled: {
+ ...user.notification_settings
+ .project_weekly_digest_disabled,
+ [team.id]:
+ !user.notification_settings
+ .project_weekly_digest_disabled?.[team.id],
+ },
+ },
+ })
+ }}
+ checked={
+ !user?.notification_settings.project_weekly_digest_disabled?.[team.id]
+ }
+ disabled={
+ userLoading || user?.notification_settings.all_weekly_digest_disabled
+ }
+ bordered
+ label={
+ <>
+ {team.name}
+
+ id: {team.id.toString()}
+
+ >
+ }
+ fullWidth
+ />
+
+
+ ))}
+
+ >
+ ) : null}
+
)
}
diff --git a/frontend/src/scenes/surveys/SurveyAppearancePreview.tsx b/frontend/src/scenes/surveys/SurveyAppearancePreview.tsx
index 68a5e6f43c825..bb7d29b8c35a0 100644
--- a/frontend/src/scenes/surveys/SurveyAppearancePreview.tsx
+++ b/frontend/src/scenes/surveys/SurveyAppearancePreview.tsx
@@ -7,13 +7,13 @@ import { Survey } from '~/types'
import { NewSurvey } from './constants'
import { surveysLogic } from './surveysLogic'
-export function SurveyAppearancePreview({
- survey,
- previewPageIndex,
-}: {
+interface Props {
survey: Survey | NewSurvey
previewPageIndex: number
-}): JSX.Element {
+ onPreviewSubmit?: (res: string | string[] | number | null) => void
+}
+
+export function SurveyAppearancePreview({ survey, previewPageIndex, onPreviewSubmit = () => {} }: Props): JSX.Element {
const surveyPreviewRef = useRef
(null)
const feedbackWidgetPreviewRef = useRef(null)
@@ -26,6 +26,7 @@ export function SurveyAppearancePreview({
parentElement: surveyPreviewRef.current,
previewPageIndex,
forceDisableHtml: !surveysHTMLAvailable,
+ onPreviewSubmit,
})
}
@@ -36,11 +37,10 @@ export function SurveyAppearancePreview({
forceDisableHtml: !surveysHTMLAvailable,
})
}
- }, [survey, previewPageIndex, surveysHTMLAvailable])
+ }, [survey, previewPageIndex, surveysHTMLAvailable, onPreviewSubmit])
return (
<>
-
>
)
}
diff --git a/frontend/src/scenes/surveys/SurveyEdit.tsx b/frontend/src/scenes/surveys/SurveyEdit.tsx
index 776a7e25c1409..216df4368250d 100644
--- a/frontend/src/scenes/surveys/SurveyEdit.tsx
+++ b/frontend/src/scenes/surveys/SurveyEdit.tsx
@@ -112,7 +112,9 @@ export default function SurveyEdit(): JSX.Element {
},
]
- if (featureFlags[FEATURE_FLAGS.SURVEYS_ADAPTIVE_LIMITS]) {
+ const adaptiveLimitFFEnabled = featureFlags[FEATURE_FLAGS.SURVEYS_ADAPTIVE_LIMITS]
+
+ if (adaptiveLimitFFEnabled) {
surveyLimitOptions.push({
value: 'until_adaptive_limit',
label: 'Collect a certain number of surveys per day, week or month',
@@ -120,15 +122,16 @@ export default function SurveyEdit(): JSX.Element {
disabledReason: surveysAdaptiveLimitsDisabledReason,
} as unknown as LemonRadioOption<'until_stopped' | 'until_limit' | 'until_adaptive_limit'>)
}
+
useMemo(() => {
if (surveyUsesLimit) {
setDataCollectionType('until_limit')
- } else if (surveyUsesAdaptiveLimit) {
+ } else if (surveyUsesAdaptiveLimit && adaptiveLimitFFEnabled) {
setDataCollectionType('until_adaptive_limit')
} else {
setDataCollectionType('until_stopped')
}
- }, [surveyUsesLimit, surveyUsesAdaptiveLimit, setDataCollectionType])
+ }, [surveyUsesLimit, surveyUsesAdaptiveLimit, adaptiveLimitFFEnabled, setDataCollectionType])
if (survey.iteration_count && survey.iteration_count > 0) {
setSchedule('recurring')
diff --git a/frontend/src/scenes/surveys/SurveyFormAppearance.tsx b/frontend/src/scenes/surveys/SurveyFormAppearance.tsx
index 8eb1f9502a67f..32943b208aaea 100644
--- a/frontend/src/scenes/surveys/SurveyFormAppearance.tsx
+++ b/frontend/src/scenes/surveys/SurveyFormAppearance.tsx
@@ -20,7 +20,11 @@ export function SurveyFormAppearance({
}: SurveyFormAppearanceProps): JSX.Element {
return survey.type !== SurveyType.API ? (
-
+
handleSetSelectedPageIndex(previewPageIndex + 1)}
+ />
handleSetSelectedPageIndex(pageIndex)}
className="mt-4 whitespace-nowrap"
diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts
index 38647a1e9389b..cace92f08b5fb 100644
--- a/frontend/src/scenes/urls.ts
+++ b/frontend/src/scenes/urls.ts
@@ -108,7 +108,9 @@ export const urls = {
`/insights/${id}/subscriptions/${subscriptionId}`,
insightSharing: (id: InsightShortId): string => `/insights/${id}/sharing`,
savedInsights: (tab?: string): string => `/insights${tab ? `?tab=${tab}` : ''}`,
+
webAnalytics: (): string => `/web`,
+ webAnalyticsCoreWebVitals: (): string => `/web/core-web-vitals`,
replay: (
tab?: ReplayTabs,
@@ -157,8 +159,8 @@ export const urls = {
cohorts: (): string => '/cohorts',
experiment: (id: string | number): string => `/experiments/${id}`,
experiments: (): string => '/experiments',
- experimentsSavedMetrics: (): string => '/experiments/saved-metrics',
- experimentsSavedMetric: (id: string | number): string => `/experiments/saved-metrics/${id}`,
+ experimentsSharedMetrics: (): string => '/experiments/shared-metrics',
+ experimentsSharedMetric: (id: string | number): string => `/experiments/shared-metrics/${id}`,
featureFlags: (tab?: string): string => `/feature_flags${tab ? `?tab=${tab}` : ''}`,
featureFlag: (id: string | number): string => `/feature_flags/${id}`,
featureManagement: (id?: string | number): string => `/features${id ? `/${id}` : ''}`,
diff --git a/frontend/src/scenes/web-analytics/WebAnalyticsLiveUserCount.tsx b/frontend/src/scenes/web-analytics/WebAnalyticsLiveUserCount.tsx
index 12b9b64c86448..a74305754377d 100644
--- a/frontend/src/scenes/web-analytics/WebAnalyticsLiveUserCount.tsx
+++ b/frontend/src/scenes/web-analytics/WebAnalyticsLiveUserCount.tsx
@@ -9,8 +9,8 @@ export const WebAnalyticsLiveUserCount = (): JSX.Element | null => {
const { liveUserCount, liveUserUpdatedSecondsAgo } = useValues(liveEventsTableLogic)
const { currentTeam } = useValues(teamLogic)
+ // No data yet, or feature flag disabled
if (liveUserCount == null) {
- // No data yet, or feature flag disabled
return null
}
@@ -27,13 +27,10 @@ export const WebAnalyticsLiveUserCount = (): JSX.Element | null => {
const tooltip = `${usersOnlineString}${inTeamString}${updatedAgoString}`
return (
-
-
-
- {humanFriendlyLargeNumber(liveUserCount)} currently online
-
-
-
-
+
+
+ {humanFriendlyLargeNumber(liveUserCount)} currently online
+
+
)
}
diff --git a/frontend/src/scenes/web-analytics/WebAnalyticsModal.tsx b/frontend/src/scenes/web-analytics/WebAnalyticsModal.tsx
index 19aa3535d690b..ffc15ab87c651 100644
--- a/frontend/src/scenes/web-analytics/WebAnalyticsModal.tsx
+++ b/frontend/src/scenes/web-analytics/WebAnalyticsModal.tsx
@@ -43,7 +43,7 @@ export const WebAnalyticsModal = (): JSX.Element | null => {
query={modal.query}
insightProps={modal.insightProps}
showIntervalSelect={modal.showIntervalSelect}
- showPathCleaningControls={modal.showPathCleaningControls}
+ control={modal.control}
/>
diff --git a/frontend/src/scenes/web-analytics/WebDashboard.tsx b/frontend/src/scenes/web-analytics/WebDashboard.tsx
index 710577cfcf5f9..dc847404ea4a1 100644
--- a/frontend/src/scenes/web-analytics/WebDashboard.tsx
+++ b/frontend/src/scenes/web-analytics/WebDashboard.tsx
@@ -5,6 +5,7 @@ import { CompareFilter } from 'lib/components/CompareFilter/CompareFilter'
import { DateFilter } from 'lib/components/DateFilter/DateFilter'
import { VersionCheckerBanner } from 'lib/components/VersionChecker/VersionCheckerBanner'
import { FEATURE_FLAGS } from 'lib/constants'
+import { useWindowSize } from 'lib/hooks/useWindowSize'
import { IconOpenInNew } from 'lib/lemon-ui/icons'
import { LemonButton } from 'lib/lemon-ui/LemonButton'
import { LemonSegmentedSelect } from 'lib/lemon-ui/LemonSegmentedSelect/LemonSegmentedSelect'
@@ -18,6 +19,7 @@ import { WebAnalyticsRecordingsTile } from 'scenes/web-analytics/tiles/WebAnalyt
import { WebQuery } from 'scenes/web-analytics/tiles/WebAnalyticsTile'
import { WebAnalyticsHealthCheck } from 'scenes/web-analytics/WebAnalyticsHealthCheck'
import {
+ ProductTab,
QueryTile,
TabsTile,
TileId,
@@ -48,24 +50,27 @@ const Filters = (): JSX.Element => {
return (
-
+
+
{featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_PERIOD_COMPARISON] ? (
) : null}
+
+
+
+
+
-
-
-
)
}
@@ -92,7 +97,7 @@ const Tiles = (): JSX.Element => {
}
const QueryTileItem = ({ tile }: { tile: QueryTile }): JSX.Element => {
- const { query, title, layout, insightProps, showPathCleaningControls, showIntervalSelect, docs } = tile
+ const { query, title, layout, insightProps, control, showIntervalSelect, docs } = tile
const { openModal } = useActions(webAnalyticsLogic)
const { getNewInsightUrl } = useValues(webAnalyticsLogic)
@@ -142,7 +147,7 @@ const QueryTileItem = ({ tile }: { tile: QueryTile }): JSX.Element => {
{buttonsRow.length > 0 ?
{buttonsRow}
: null}
@@ -174,7 +179,7 @@ const TabsTileItem = ({ tile }: { tile: TabsTile }): JSX.Element => {
key={tab.id}
query={tab.query}
showIntervalSelect={tab.showIntervalSelect}
- showPathCleaningControls={tab.showPathCleaningControls}
+ control={tab.control}
insightProps={tab.insightProps}
/>
),
@@ -329,15 +334,42 @@ export const LearnMorePopover = ({ url, title, description }: LearnMorePopoverPr
}
export const WebAnalyticsDashboard = (): JSX.Element => {
+ const { isWindowLessThan } = useWindowSize()
+ const isMobile = isWindowLessThan('sm')
+
+ const { productTab } = useValues(webAnalyticsLogic)
+ const { setProductTab } = useActions(webAnalyticsLogic)
+
+ const { featureFlags } = useValues(featureFlagLogic)
+
return (
-
+
+
+
+ {featureFlags[FEATURE_FLAGS.CORE_WEB_VITALS] && (
+
+ )}
+
+
+
diff --git a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx
index b5b04d6b419b5..3106de96c0946 100644
--- a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx
+++ b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx
@@ -1,5 +1,5 @@
-import { IconGear, IconTrending } from '@posthog/icons'
-import { Link, Tooltip } from '@posthog/lemon-ui'
+import { IconTrending } from '@posthog/icons'
+import { Tooltip } from '@posthog/lemon-ui'
import clsx from 'clsx'
import { useActions, useValues } from 'kea'
import { getColorVar } from 'lib/colors'
@@ -127,6 +127,8 @@ const BreakdownValueTitle: QueryContextColumnTitleComponent = (props) => {
return <>End Path>
case WebStatsBreakdown.ExitClick:
return <>Exit Click>
+ case WebStatsBreakdown.ScreenName:
+ return <>Screen Name>
case WebStatsBreakdown.InitialChannelType:
return <>Initial Channel Type>
case WebStatsBreakdown.InitialReferringDomain:
@@ -258,6 +260,8 @@ export const webStatsBreakdownToPropertyName = (
return { key: '$end_pathname', type: PropertyFilterType.Session }
case WebStatsBreakdown.ExitClick:
return { key: '$last_external_click_url', type: PropertyFilterType.Session }
+ case WebStatsBreakdown.ScreenName:
+ return { key: '$screen_name', type: PropertyFilterType.Event }
case WebStatsBreakdown.InitialChannelType:
return { key: '$channel_type', type: PropertyFilterType.Session }
case WebStatsBreakdown.InitialReferringDomain:
@@ -481,14 +485,14 @@ export const WebStatsTableTile = ({
query,
breakdownBy,
insightProps,
- showPathCleaningControls,
+ control,
}: {
query: DataTableNode
breakdownBy: WebStatsBreakdown
insightProps: InsightLogicProps
- showPathCleaningControls?: boolean
+ control?: JSX.Element
}): JSX.Element => {
- const { togglePropertyFilter, setIsPathCleaningEnabled } = useActions(webAnalyticsLogic)
+ const { togglePropertyFilter } = useActions(webAnalyticsLogic)
const { isPathCleaningEnabled } = useValues(webAnalyticsLogic)
const { key, type } = webStatsBreakdownToPropertyName(breakdownBy) || {}
@@ -536,47 +540,9 @@ export const WebStatsTableTile = ({
}
}, [onClick, insightProps])
- const pathCleaningSettingsUrl = urls.settings('project-product-analytics', 'path-cleaning')
return (
- {showPathCleaningControls && (
-
-
-
-
- Check{' '}
-
- our path cleaning rules documentation
- {' '}
- to learn more about path cleaning
- >
- }
- interactive
- >
- Enable path cleaning
-
- }
- type="tertiary"
- status="alt"
- size="small"
- noPadding={true}
- tooltip="Edit path cleaning settings"
- to={pathCleaningSettingsUrl}
- />
-
- }
- checked={!!isPathCleaningEnabled}
- onChange={setIsPathCleaningEnabled}
- className="h-full"
- />
-
-
- )}
+ {control != null && {control}
}
)
@@ -692,12 +658,12 @@ export const WebExternalClicksTile = ({
export const WebQuery = ({
query,
showIntervalSelect,
- showPathCleaningControls,
+ control,
insightProps,
}: {
query: QuerySchema
showIntervalSelect?: boolean
- showPathCleaningControls?: boolean
+ control?: JSX.Element
insightProps: InsightLogicProps
}): JSX.Element => {
if (query.kind === NodeKind.DataTableNode && query.source.kind === NodeKind.WebStatsTableQuery) {
@@ -706,7 +672,7 @@ export const WebQuery = ({
query={query}
breakdownBy={query.source.breakdownBy}
insightProps={insightProps}
- showPathCleaningControls={showPathCleaningControls}
+ control={control}
/>
)
}
diff --git a/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx b/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx
index 65b372d40a4b9..d2a503ea0776e 100644
--- a/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx
+++ b/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx
@@ -1,3 +1,4 @@
+import { IconGear } from '@posthog/icons'
import { actions, afterMount, BreakPointFunction, connect, kea, listeners, path, reducers, selectors } from 'kea'
import { loaders } from 'kea-loaders'
import { actionToUrl, urlToAction } from 'kea-router'
@@ -5,10 +6,14 @@ import { windowValues } from 'kea-window-values'
import api from 'lib/api'
import { FEATURE_FLAGS, RETENTION_FIRST_TIME, STALE_EVENT_SECONDS } from 'lib/constants'
import { dayjs } from 'lib/dayjs'
+import { LemonButton } from 'lib/lemon-ui/LemonButton'
+import { LemonSwitch } from 'lib/lemon-ui/LemonSwitch'
import { Link, PostHogComDocsURL } from 'lib/lemon-ui/Link/Link'
+import { Tooltip } from 'lib/lemon-ui/Tooltip'
import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
import { getDefaultInterval, isNotNil, objectsEqual, updateDatesWithInterval } from 'lib/utils'
import { errorTrackingQuery } from 'scenes/error-tracking/queries'
+import { Scene } from 'scenes/sceneTypes'
import { urls } from 'scenes/urls'
import { hogqlQuery } from '~/queries/query'
@@ -31,6 +36,7 @@ import {
import { isWebAnalyticsPropertyFilters } from '~/queries/schema-guards'
import {
BaseMathType,
+ Breadcrumb,
ChartDisplayType,
EventDefinition,
EventDefinitionType,
@@ -73,6 +79,11 @@ export enum TileId {
GOALS = 'GOALS',
}
+export enum ProductTab {
+ ANALYTICS = 'analytics',
+ CORE_WEB_VITALS = 'core-web-vitals',
+}
+
const loadPriorityMap: Record
= {
[TileId.OVERVIEW]: 1,
[TileId.GRAPHS]: 2,
@@ -86,7 +97,7 @@ const loadPriorityMap: Record = {
[TileId.GOALS]: 10,
}
-interface BaseTile {
+export interface BaseTile {
tileId: TileId
layout: WebTileLayout
docs?: Docs
@@ -97,12 +108,13 @@ export interface Docs {
title: string
description: string | JSX.Element
}
+
export interface QueryTile extends BaseTile {
kind: 'query'
title?: string
query: QuerySchema
showIntervalSelect?: boolean
- showPathCleaningControls?: boolean
+ control?: JSX.Element
insightProps: InsightLogicProps
canOpenModal: boolean
canOpenInsight?: boolean
@@ -114,7 +126,7 @@ export interface TabsTileTab {
linkText: string
query: QuerySchema
showIntervalSelect?: boolean
- showPathCleaningControls?: boolean
+ control?: JSX.Element
insightProps: InsightLogicProps
canOpenModal?: boolean
canOpenInsight?: boolean
@@ -146,7 +158,7 @@ export interface WebDashboardModalQuery {
query: QuerySchema
insightProps: InsightLogicProps
showIntervalSelect?: boolean
- showPathCleaningControls?: boolean
+ control?: JSX.Element
canOpenInsight?: boolean
}
@@ -182,6 +194,7 @@ export enum PathTab {
INITIAL_PATH = 'INITIAL_PATH',
END_PATH = 'END_PATH',
EXIT_CLICK = 'EXIT_CLICK',
+ SCREEN_NAME = 'SCREEN_NAME',
}
export enum GeographyTab {
@@ -193,27 +206,27 @@ export enum GeographyTab {
LANGUAGES = 'LANGUAGES',
}
+export enum ConversionGoalWarning {
+ CustomEventWithNoSessionId = 'CustomEventWithNoSessionId',
+}
+
export interface WebAnalyticsStatusCheck {
isSendingPageViews: boolean
isSendingPageLeaves: boolean
isSendingPageLeavesScroll: boolean
}
-export enum ConversionGoalWarning {
- CustomEventWithNoSessionId = 'CustomEventWithNoSessionId',
-}
-
-export const GEOIP_PLUGIN_URLS = [
+const GEOIP_PLUGIN_URLS = [
'https://github.com/PostHog/posthog-plugin-geoip',
'https://www.npmjs.com/package/@posthog/geoip-plugin',
]
export const WEB_ANALYTICS_DATA_COLLECTION_NODE_ID = 'web-analytics'
-export const initialWebAnalyticsFilter = [] as WebAnalyticsPropertyFilters
-const initialDateFrom = '-7d' as string | null
-const initialDateTo = null as string | null
-const initialInterval = getDefaultInterval(initialDateFrom, initialDateTo)
+const INITIAL_WEB_ANALYTICS_FILTER = [] as WebAnalyticsPropertyFilters
+const INITIAL_DATE_FROM = '-7d' as string | null
+const INITIAL_DATE_TO = null as string | null
+const INITIAL_INTERVAL = getDefaultInterval(INITIAL_DATE_FROM, INITIAL_DATE_TO)
const getDashboardItemId = (section: TileId, tab: string | undefined, isModal?: boolean): `new-${string}` => {
// pretend to be a new-AdHoc to get the correct behaviour elsewhere
@@ -240,24 +253,11 @@ export const webAnalyticsLogic = kea([
pathTab?: string
geographyTab?: string
}
- ) => ({
- type,
- key,
- value,
- tabChange,
- }),
- setGraphsTab: (tab: string) => ({
- tab,
- }),
- setSourceTab: (tab: string) => ({
- tab,
- }),
- setDeviceTab: (tab: string) => ({
- tab,
- }),
- setPathTab: (tab: string) => ({
- tab,
- }),
+ ) => ({ type, key, value, tabChange }),
+ setGraphsTab: (tab: string) => ({ tab }),
+ setSourceTab: (tab: string) => ({ tab }),
+ setDeviceTab: (tab: string) => ({ tab }),
+ setPathTab: (tab: string) => ({ tab }),
setGeographyTab: (tab: string) => ({ tab }),
setDates: (dateFrom: string | null, dateTo: string | null) => ({ dateFrom, dateTo }),
setInterval: (interval: IntervalType) => ({ interval }),
@@ -267,26 +267,19 @@ export const webAnalyticsLogic = kea([
interval,
}),
setIsPathCleaningEnabled: (isPathCleaningEnabled: boolean) => ({ isPathCleaningEnabled }),
- setShouldFilterTestAccounts: (shouldFilterTestAccounts: boolean) => ({
- shouldFilterTestAccounts,
- }),
- setShouldStripQueryParams: (shouldStripQueryParams: boolean) => ({
- shouldStripQueryParams,
- }),
+ setShouldFilterTestAccounts: (shouldFilterTestAccounts: boolean) => ({ shouldFilterTestAccounts }),
+ setShouldStripQueryParams: (shouldStripQueryParams: boolean) => ({ shouldStripQueryParams }),
setConversionGoal: (conversionGoal: WebAnalyticsConversionGoal | null) => ({ conversionGoal }),
- openModal: (tileId: TileId, tabId?: string) => {
- return { tileId, tabId }
- },
+ openModal: (tileId: TileId, tabId?: string) => ({ tileId, tabId }),
closeModal: () => true,
- openAsNewInsight: (tileId: TileId, tabId?: string) => {
- return { tileId, tabId }
- },
+ openAsNewInsight: (tileId: TileId, tabId?: string) => ({ tileId, tabId }),
setConversionGoalWarning: (warning: ConversionGoalWarning | null) => ({ warning }),
setCompareFilter: (compareFilter: CompareFilter) => ({ compareFilter }),
+ setProductTab: (tab: ProductTab) => ({ tab }),
}),
reducers({
webAnalyticsFilters: [
- initialWebAnalyticsFilter,
+ INITIAL_WEB_ANALYTICS_FILTER,
persistConfig,
{
setWebAnalyticsFilters: (_, { webAnalyticsFilters }) => webAnalyticsFilters,
@@ -325,7 +318,7 @@ export const webAnalyticsLogic = kea([
return f
}
const oldValue = (Array.isArray(f.value) ? f.value : [f.value]).filter(isNotNil)
- let newValue: (string | number)[]
+ let newValue: (string | number | bigint)[]
if (oldValue.includes(value)) {
// If there are multiple values for this filter, reduce that to just the one being clicked
if (oldValue.length > 1) {
@@ -416,9 +409,9 @@ export const webAnalyticsLogic = kea([
],
dateFilter: [
{
- dateFrom: initialDateFrom,
- dateTo: initialDateTo,
- interval: initialInterval,
+ dateFrom: INITIAL_DATE_FROM,
+ dateTo: INITIAL_DATE_TO,
+ interval: INITIAL_INTERVAL,
},
persistConfig,
{
@@ -437,8 +430,8 @@ export const webAnalyticsLogic = kea([
},
setDatesAndInterval: (_, { dateTo, dateFrom, interval }) => {
if (!dateFrom && !dateTo) {
- dateFrom = initialDateFrom
- dateTo = initialDateTo
+ dateFrom = INITIAL_DATE_FROM
+ dateTo = INITIAL_DATE_TO
}
return {
dateTo,
@@ -482,8 +475,36 @@ export const webAnalyticsLogic = kea([
setCompareFilter: (_, { compareFilter }) => compareFilter,
},
],
+ productTab: [
+ ProductTab.ANALYTICS as ProductTab,
+ {
+ setProductTab: (_, { tab }) => tab,
+ },
+ ],
}),
selectors(({ actions, values }) => ({
+ breadcrumbs: [
+ (s) => [s.productTab],
+ (productTab: ProductTab): Breadcrumb[] => {
+ const breadcrumbs: Breadcrumb[] = [
+ {
+ key: Scene.WebAnalytics,
+ name: `Web Analytics`,
+ path: urls.webAnalytics(),
+ },
+ ]
+
+ if (productTab === ProductTab.CORE_WEB_VITALS) {
+ breadcrumbs.push({
+ key: Scene.WebAnalyticsCoreWebVitals,
+ name: `Core Web Vitals`,
+ path: urls.webAnalyticsCoreWebVitals(),
+ })
+ }
+
+ return breadcrumbs
+ },
+ ],
graphsTab: [(s) => [s._graphsTab], (graphsTab: string | null) => graphsTab || GraphsTab.UNIQUE_USERS],
sourceTab: [(s) => [s._sourceTab], (sourceTab: string | null) => sourceTab || SourceTab.CHANNEL],
deviceTab: [(s) => [s._deviceTab], (deviceTab: string | null) => deviceTab || DeviceTab.DEVICE_TYPE],
@@ -526,8 +547,16 @@ export const webAnalyticsLogic = kea([
}),
],
tiles: [
- (s) => [s.tabs, s.controls, s.filters, () => values.featureFlags, () => values.isGreaterThanMd],
+ (s) => [
+ s.productTab,
+ s.tabs,
+ s.controls,
+ s.filters,
+ () => values.featureFlags,
+ () => values.isGreaterThanMd,
+ ],
(
+ productTab,
{ graphsTab, sourceTab, deviceTab, pathTab, geographyTab, shouldShowGeographyTile },
{ isPathCleaningEnabled, filterTestAccounts, shouldStripQueryParams },
{
@@ -540,41 +569,29 @@ export const webAnalyticsLogic = kea([
featureFlags,
isGreaterThanMd
): WebDashboardTile[] => {
- const dateRange = {
- date_from: dateFrom,
- date_to: dateTo,
- }
-
- const sampling = {
- enabled: false,
- forceSamplingRate: { numerator: 1, denominator: 10 },
- }
-
- const createInsightProps = (tile: TileId, tab?: string): InsightLogicProps => {
- return {
- dashboardItemId: getDashboardItemId(tile, tab, false),
- loadPriority: loadPriorityMap[tile],
- dataNodeCollectionId: WEB_ANALYTICS_DATA_COLLECTION_NODE_ID,
- }
- }
+ const dateRange = { date_from: dateFrom, date_to: dateTo }
+ const sampling = { enabled: false, forceSamplingRate: { numerator: 1, denominator: 10 } }
const uniqueUserSeries: EventsNode = {
- event: '$pageview',
+ event: featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_FOR_MOBILE] ? '$screen' : '$pageview',
kind: NodeKind.EventsNode,
math: BaseMathType.UniqueUsers,
name: 'Pageview',
custom_name: 'Unique visitors',
}
+
const pageViewsSeries = {
...uniqueUserSeries,
math: BaseMathType.TotalCount,
- custom_name: 'Page views',
+ custom_name: featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_FOR_MOBILE] ? 'Screen Views' : 'Page views',
}
+
const sessionsSeries = {
...uniqueUserSeries,
math: BaseMathType.UniqueSessions,
custom_name: 'Sessions',
}
+
const uniqueConversionsSeries: ActionsNode | EventsNode | undefined = !conversionGoal
? undefined
: 'actionId' in conversionGoal
@@ -601,6 +618,14 @@ export const webAnalyticsLogic = kea([
}
: undefined
+ const createInsightProps = (tile: TileId, tab?: string): InsightLogicProps => {
+ return {
+ dashboardItemId: getDashboardItemId(tile, tab, false),
+ loadPriority: loadPriorityMap[tile],
+ dataNodeCollectionId: WEB_ANALYTICS_DATA_COLLECTION_NODE_ID,
+ }
+ }
+
const createGraphsTrendsTab = (
id: GraphsTab,
title: string,
@@ -681,6 +706,67 @@ export const webAnalyticsLogic = kea([
}
}
+ // TODO: Use actual web vitals tab, this is just a placeholder
+ if (featureFlags[FEATURE_FLAGS.CORE_WEB_VITALS] && productTab === ProductTab.CORE_WEB_VITALS) {
+ return [
+ {
+ kind: 'query',
+ tileId: TileId.OVERVIEW,
+ layout: {
+ colSpanClassName: 'md:col-span-full',
+ orderWhenLargeClassName: 'xxl:order-0',
+ },
+ query: {
+ kind: NodeKind.WebOverviewQuery,
+ properties: webAnalyticsFilters,
+ dateRange,
+ sampling,
+ compareFilter,
+ filterTestAccounts,
+ conversionGoal,
+ includeLCPScore: true,
+ },
+ insightProps: createInsightProps(TileId.OVERVIEW),
+ canOpenModal: false,
+ },
+ ]
+ }
+
+ const pathCleaningControl = (
+
+
+ Check{' '}
+
+ our path cleaning rules documentation
+ {' '}
+ to learn more about path cleaning
+ >
+ }
+ interactive
+ >
+ Enable path cleaning
+
+ }
+ type="tertiary"
+ status="alt"
+ size="small"
+ noPadding={true}
+ tooltip="Edit path cleaning settings"
+ to={urls.settings('project-product-analytics', 'path-cleaning')}
+ />
+
+ }
+ checked={!!isPathCleaningEnabled}
+ onChange={(value) => actions.setIsPathCleaningEnabled(value)}
+ className="h-full"
+ />
+ )
+
const allTiles: (WebDashboardTile | null)[] = [
{
kind: 'query',
@@ -766,150 +852,163 @@ export const webAnalyticsLogic = kea([
},
activeTabId: pathTab,
setTabId: actions.setPathTab,
- tabs: (
- [
- createTableTab(
- TileId.PATHS,
- PathTab.PATH,
- 'Paths',
- 'Path',
- WebStatsBreakdown.Page,
- {
- includeScrollDepth: false, // TODO needs some perf work before it can be enabled
- includeBounceRate: true,
- doPathCleaning: !!isPathCleaningEnabled,
- },
- {
- showPathCleaningControls: true,
- docs: {
- url: 'https://posthog.com/docs/web-analytics/dashboard#paths',
- title: 'Paths',
- description: (
-
-
- In this view you can validate all of the paths that were
- accessed in your application, regardless of when they were
- accessed through the lifetime of a user session.
-
- {conversionGoal ? (
-
- The conversion rate is the percentage of users who completed
- the conversion goal in this specific path.
-
- ) : (
-
- The{' '}
-
- bounce rate
- {' '}
- indicates the percentage of users who left your page
- immediately after visiting without capturing any event.
-
- )}
-
- ),
- },
- }
- ),
- createTableTab(
- TileId.PATHS,
- PathTab.INITIAL_PATH,
- 'Entry paths',
- 'Entry path',
- WebStatsBreakdown.InitialPage,
- {
- includeBounceRate: true,
- includeScrollDepth: false,
- doPathCleaning: !!isPathCleaningEnabled,
- },
- {
- showPathCleaningControls: true,
- docs: {
- url: 'https://posthog.com/docs/web-analytics/dashboard#paths',
- title: 'Entry Path',
- description: (
-
-
- Entry paths are the paths a user session started, i.e. the first
- path they saw when they opened your website.
-
- {conversionGoal && (
-
- The conversion rate is the percentage of users who completed
- the conversion goal after the first path in their session
- being this path.
-
- )}
-
- ),
- },
- }
- ),
- createTableTab(
- TileId.PATHS,
- PathTab.END_PATH,
- 'End paths',
- 'End path',
- WebStatsBreakdown.ExitPage,
- {
- includeBounceRate: false,
- includeScrollDepth: false,
- doPathCleaning: !!isPathCleaningEnabled,
- },
- {
- showPathCleaningControls: true,
- docs: {
- url: 'https://posthog.com/docs/web-analytics/dashboard#paths',
- title: 'End Path',
- description: (
-
- End paths are the last path a user visited before their session
- ended, i.e. the last path they saw before leaving your
- website/closing the browser/turning their computer off.
-
- ),
- },
- }
- ),
- {
- id: PathTab.EXIT_CLICK,
- title: 'Outbound link clicks',
- linkText: 'Outbound clicks',
- query: {
- full: true,
- kind: NodeKind.DataTableNode,
- source: {
- kind: NodeKind.WebExternalClicksTableQuery,
- properties: webAnalyticsFilters,
- dateRange,
- compareFilter,
- sampling,
- limit: 10,
- filterTestAccounts,
- conversionGoal: featureFlags[
- FEATURE_FLAGS.WEB_ANALYTICS_CONVERSION_GOAL_FILTERS
- ]
- ? conversionGoal
- : undefined,
- stripQueryParams: shouldStripQueryParams,
- },
- embedded: false,
- columns: ['url', 'visitors', 'clicks'],
- },
- insightProps: createInsightProps(TileId.PATHS, PathTab.END_PATH),
- canOpenModal: true,
- docs: {
- title: 'Outbound Clicks',
- description: (
-
- You'll be able to verify when someone leaves your website by clicking an
- outbound link (to a separate domain)
-
- ),
- },
- },
- ] as (TabsTileTab | undefined)[]
- ).filter(isNotNil),
+ tabs: featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_FOR_MOBILE]
+ ? [
+ createTableTab(
+ TileId.PATHS,
+ PathTab.SCREEN_NAME,
+ 'Screens',
+ 'Screen',
+ WebStatsBreakdown.ScreenName,
+ {},
+ {}
+ ),
+ ]
+ : (
+ [
+ createTableTab(
+ TileId.PATHS,
+ PathTab.PATH,
+ 'Paths',
+ 'Path',
+ WebStatsBreakdown.Page,
+ {
+ includeScrollDepth: false, // TODO needs some perf work before it can be enabled
+ includeBounceRate: true,
+ doPathCleaning: !!isPathCleaningEnabled,
+ },
+ {
+ control: pathCleaningControl,
+ docs: {
+ url: 'https://posthog.com/docs/web-analytics/dashboard#paths',
+ title: 'Paths',
+ description: (
+
+
+ In this view you can validate all of the paths that were
+ accessed in your application, regardless of when they were
+ accessed through the lifetime of a user session.
+
+ {conversionGoal ? (
+
+ The conversion rate is the percentage of users who
+ completed the conversion goal in this specific path.
+
+ ) : (
+
+ The{' '}
+
+ bounce rate
+ {' '}
+ indicates the percentage of users who left your page
+ immediately after visiting without capturing any
+ event.
+
+ )}
+
+ ),
+ },
+ }
+ ),
+ createTableTab(
+ TileId.PATHS,
+ PathTab.INITIAL_PATH,
+ 'Entry paths',
+ 'Entry path',
+ WebStatsBreakdown.InitialPage,
+ {
+ includeBounceRate: true,
+ includeScrollDepth: false,
+ doPathCleaning: !!isPathCleaningEnabled,
+ },
+ {
+ control: pathCleaningControl,
+ docs: {
+ url: 'https://posthog.com/docs/web-analytics/dashboard#paths',
+ title: 'Entry Path',
+ description: (
+
+
+ Entry paths are the paths a user session started, i.e. the
+ first path they saw when they opened your website.
+
+ {conversionGoal && (
+
+ The conversion rate is the percentage of users who
+ completed the conversion goal after the first path in
+ their session being this path.
+
+ )}
+
+ ),
+ },
+ }
+ ),
+ createTableTab(
+ TileId.PATHS,
+ PathTab.END_PATH,
+ 'End paths',
+ 'End path',
+ WebStatsBreakdown.ExitPage,
+ {
+ includeBounceRate: false,
+ includeScrollDepth: false,
+ doPathCleaning: !!isPathCleaningEnabled,
+ },
+ {
+ control: pathCleaningControl,
+ docs: {
+ url: 'https://posthog.com/docs/web-analytics/dashboard#paths',
+ title: 'End Path',
+ description: (
+
+ End paths are the last path a user visited before their
+ session ended, i.e. the last path they saw before leaving your
+ website/closing the browser/turning their computer off.
+
+ ),
+ },
+ }
+ ),
+ {
+ id: PathTab.EXIT_CLICK,
+ title: 'Outbound link clicks',
+ linkText: 'Outbound clicks',
+ query: {
+ full: true,
+ kind: NodeKind.DataTableNode,
+ source: {
+ kind: NodeKind.WebExternalClicksTableQuery,
+ properties: webAnalyticsFilters,
+ dateRange,
+ compareFilter,
+ sampling,
+ limit: 10,
+ filterTestAccounts,
+ conversionGoal: featureFlags[
+ FEATURE_FLAGS.WEB_ANALYTICS_CONVERSION_GOAL_FILTERS
+ ]
+ ? conversionGoal
+ : undefined,
+ stripQueryParams: shouldStripQueryParams,
+ },
+ embedded: false,
+ columns: ['url', 'visitors', 'clicks'],
+ },
+ insightProps: createInsightProps(TileId.PATHS, PathTab.END_PATH),
+ canOpenModal: true,
+ docs: {
+ title: 'Outbound Clicks',
+ description: (
+
+ You'll be able to verify when someone leaves your website by
+ clicking an outbound link (to a separate domain)
+
+ ),
+ },
+ },
+ ] as (TabsTileTab | undefined)[]
+ ).filter(isNotNil),
},
{
kind: 'tabs',
@@ -929,6 +1028,20 @@ export const webAnalyticsLogic = kea([
WebStatsBreakdown.InitialChannelType,
{},
{
+ control: (
+
+ Customize channel types
+ }
+ type="tertiary"
+ status="alt"
+ size="small"
+ noPadding={true}
+ tooltip="Customize channel types"
+ to={urls.settings('environment-web-analytics', 'channel-type')}
+ />
+
+ ),
docs: {
url: 'https://posthog.com/docs/data/channel-type',
title: 'Channels',
@@ -1263,8 +1376,8 @@ export const webAnalyticsLogic = kea([
users who return to perform any event in the following weeks.
- You want the numbers numbers to be the highest possible, suggesting
- that people that come to your page continue coming to your page - and
+ You want the numbers to be the highest possible, suggesting that
+ people that come to your page continue coming to your page - and
performing an actions. Also, the further down the table the higher the
numbers should be (or at least as high), which would indicate that
you're either increasing or keeping your retention at the same level.
@@ -1386,7 +1499,7 @@ export const webAnalyticsLogic = kea([
return null
}
const { tileId, tabId } = modalTileAndTab
- const tile = tiles.find((tile) => tile.tileId === tileId)
+ const tile: WebDashboardTile | undefined = tiles.find((tile) => tile.tileId === tileId)
if (!tile) {
return null
}
@@ -1419,7 +1532,7 @@ export const webAnalyticsLogic = kea([
tabId,
title: tab.title,
showIntervalSelect: tab.showIntervalSelect,
- showPathCleaningControls: tab.showPathCleaningControls,
+ control: tab.control,
insightProps: {
dashboardItemId: getDashboardItemId(tileId, tabId, true),
loadPriority: 0,
@@ -1434,7 +1547,7 @@ export const webAnalyticsLogic = kea([
tileId,
title: tile.title,
showIntervalSelect: tile.showIntervalSelect,
- showPathCleaningControls: tile.showPathCleaningControls,
+ control: tile.control,
insightProps: {
dashboardItemId: getDashboardItemId(tileId, undefined, true),
loadPriority: 0,
@@ -1536,10 +1649,11 @@ export const webAnalyticsLogic = kea([
return query
}
- const tile = tiles.find((tile) => tile.tileId === tileId)
+ const tile: WebDashboardTile | undefined = tiles.find((tile) => tile.tileId === tileId)
if (!tile) {
return undefined
}
+
if (tile.kind === 'tabs') {
const tab = tile.tabs.find((tab) => tab.id === tabId)
if (!tab) {
@@ -1664,6 +1778,7 @@ export const webAnalyticsLogic = kea([
isPathCleaningEnabled,
shouldFilterTestAccounts,
compareFilter,
+ productTab,
} = values
const urlParams = new URLSearchParams()
@@ -1677,7 +1792,7 @@ export const webAnalyticsLogic = kea([
urlParams.set('conversionGoal.customEventName', conversionGoal.customEventName)
}
}
- if (dateFrom !== initialDateFrom || dateTo !== initialDateTo || interval !== initialInterval) {
+ if (dateFrom !== INITIAL_DATE_FROM || dateTo !== INITIAL_DATE_TO || interval !== INITIAL_INTERVAL) {
urlParams.set('date_from', dateFrom ?? '')
urlParams.set('date_to', dateTo ?? '')
urlParams.set('interval', interval ?? '')
@@ -1706,7 +1821,12 @@ export const webAnalyticsLogic = kea([
if (compareFilter) {
urlParams.set('compare_filter', JSON.stringify(compareFilter))
}
- return `/web?${urlParams.toString()}`
+ if (productTab !== ProductTab.ANALYTICS) {
+ urlParams.set('product_tab', productTab)
+ }
+
+ const basePath = productTab === ProductTab.CORE_WEB_VITALS ? '/web/core-web-vitals' : '/web'
+ return `${basePath}${urlParams.toString() ? '?' + urlParams.toString() : ''}`
}
return {
@@ -1721,12 +1841,13 @@ export const webAnalyticsLogic = kea([
setPathTab: stateToUrl,
setGeographyTab: stateToUrl,
setCompareFilter: stateToUrl,
+ setProductTab: stateToUrl,
}
}),
- urlToAction(({ actions, values }) => ({
- '/web': (
- _,
+ urlToAction(({ actions, values }) => {
+ const toAction = (
+ { productTab = ProductTab.ANALYTICS }: { productTab?: ProductTab },
{
filters,
'conversionGoal.actionId': conversionGoalActionId,
@@ -1742,8 +1863,8 @@ export const webAnalyticsLogic = kea([
path_cleaning,
filter_test_accounts,
compare_filter,
- }
- ) => {
+ }: Record
+ ): void => {
const parsedFilters = isWebAnalyticsPropertyFilters(filters) ? filters : undefined
if (parsedFilters && !objectsEqual(parsedFilters, values.webAnalyticsFilters)) {
@@ -1791,8 +1912,14 @@ export const webAnalyticsLogic = kea([
if (compare_filter && !objectsEqual(compare_filter, values.compareFilter)) {
actions.setCompareFilter(compare_filter)
}
- },
- })),
+ if (productTab && productTab !== values.productTab) {
+ actions.setProductTab(productTab)
+ }
+ }
+
+ return { '/web': toAction, '/web/:productTab': toAction }
+ }),
+
listeners(({ values, actions }) => {
const checkGraphsTabIsCompatibleWithConversionGoal = (
tab: string,
diff --git a/frontend/src/toolbar/ToolbarApp.tsx b/frontend/src/toolbar/ToolbarApp.tsx
index 64e863b2a01cd..80e43b38868a2 100644
--- a/frontend/src/toolbar/ToolbarApp.tsx
+++ b/frontend/src/toolbar/ToolbarApp.tsx
@@ -4,7 +4,6 @@ import { useRef, useState } from 'react'
import root from 'react-shadow'
import { Slide, ToastContainer } from 'react-toastify'
-import { themeLogic } from '~/layout/navigation-3000/themeLogic'
import { toolbarConfigLogic } from '~/toolbar/toolbarConfigLogic'
import { ToolbarContainer } from '~/toolbar/ToolbarContainer'
import { ToolbarProps } from '~/types'
@@ -15,7 +14,7 @@ type HTMLElementWithShadowRoot = HTMLElement & { shadowRoot: ShadowRoot }
export function ToolbarApp(props: ToolbarProps = {}): JSX.Element {
const { apiURL } = useValues(toolbarConfigLogic(props))
- const { isDarkModeOn } = useValues(themeLogic)
+
const shadowRef = useRef(null)
const [didLoadStyles, setDidLoadStyles] = useState(false)
@@ -62,7 +61,6 @@ export function ToolbarApp(props: ToolbarProps = {}): JSX.Element {
closeOnClick={false}
draggable={false}
position="bottom-center"
- theme={isDarkModeOn ? 'dark' : 'light'}
/>
>
diff --git a/frontend/src/types.ts b/frontend/src/types.ts
index 91062d3976ab5..e638db249c396 100644
--- a/frontend/src/types.ts
+++ b/frontend/src/types.ts
@@ -267,7 +267,11 @@ export type UserTheme = 'light' | 'dark' | 'system'
/** Full User model. */
export interface UserType extends UserBaseType {
date_joined: string
- notification_settings: NotificationSettings
+ notification_settings: {
+ plugin_disabled: boolean
+ project_weekly_digest_disabled: Record
+ all_weekly_digest_disabled: boolean
+ }
events_column_config: ColumnConfig
anonymize_data: boolean
toolbar_mode: 'disabled' | 'toolbar'
@@ -323,6 +327,8 @@ export interface HedgehogConfig extends MinimalHedgehogConfig {
export interface NotificationSettings {
plugin_disabled: boolean
+ project_weekly_digest_disabled: Record
+ all_weekly_digest_disabled: boolean
}
export interface PluginAccess {
@@ -657,7 +663,7 @@ export interface ToolbarProps extends ToolbarParams {
export type PathCleaningFilter = { alias?: string; regex?: string }
-export type PropertyFilterValue = string | number | (string | number)[] | null
+export type PropertyFilterValue = string | number | bigint | (string | number | bigint)[] | null
/** Sync with plugin-server/src/types.ts */
export enum PropertyOperator {
@@ -703,7 +709,7 @@ export enum ExperimentsTabs {
Yours = 'yours',
Archived = 'archived',
Holdouts = 'holdouts',
- SavedMetrics = 'saved-metrics',
+ SharedMetrics = 'shared-metrics',
}
export enum ActivityTab {
@@ -2395,7 +2401,8 @@ export interface RetentionEntity {
export interface RetentionFilterType extends FilterType {
retention_type?: RetentionType
- retention_reference?: 'total' | 'previous' // retention wrt cohort size or previous period
+ /** Whether retention is with regard to initial cohort size, or that of the previous period. */
+ retention_reference?: 'total' | 'previous'
/**
* @asType integer
*/
@@ -3332,6 +3339,9 @@ export interface Experiment {
updated_at: string | null
holdout_id?: number | null
holdout?: Holdout
+ stats_config?: {
+ version?: number
+ }
}
export interface FunnelExperimentVariant {
@@ -4236,6 +4246,7 @@ export type BatchExportServiceS3 = {
kms_key_id: string | null
endpoint_url: string | null
file_format: string
+ max_file_size_mb: number | null
}
}
diff --git a/package.json b/package.json
index c636f327cb26f..7e224207b40c3 100644
--- a/package.json
+++ b/package.json
@@ -161,7 +161,7 @@
"pmtiles": "^2.11.0",
"postcss": "^8.4.31",
"postcss-preset-env": "^9.3.0",
- "posthog-js": "1.203.3",
+ "posthog-js": "1.205.0",
"posthog-js-lite": "3.0.0",
"prettier": "^2.8.8",
"prop-types": "^15.7.2",
diff --git a/plugin-server/package.json b/plugin-server/package.json
index 5f2a9dfdac165..ea8fd8da6c548 100644
--- a/plugin-server/package.json
+++ b/plugin-server/package.json
@@ -135,7 +135,7 @@
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^6.1.1",
"eslint-plugin-simple-import-sort": "^7.0.0",
- "jest": "^28.1.1",
+ "jest": "^29.7.0",
"nodemon": "^2.0.22",
"parse-prometheus-text-format": "^1.1.1",
"pino-pretty": "^9.1.0",
diff --git a/plugin-server/pnpm-lock.yaml b/plugin-server/pnpm-lock.yaml
index e23910979edfc..a22cbd90266e9 100644
--- a/plugin-server/pnpm-lock.yaml
+++ b/plugin-server/pnpm-lock.yaml
@@ -1,4 +1,4 @@
-lockfileVersion: '6.0'
+lockfileVersion: '6.1'
settings:
autoInstallPeers: true
@@ -286,8 +286,8 @@ devDependencies:
specifier: ^7.0.0
version: 7.0.0(eslint@8.53.0)
jest:
- specifier: ^28.1.1
- version: 28.1.3(@types/node@16.18.25)(ts-node@10.9.1)
+ specifier: ^29.7.0
+ version: 29.7.0(@types/node@16.18.25)(ts-node@10.9.1)
nodemon:
specifier: ^2.0.22
version: 2.0.22
@@ -1034,10 +1034,24 @@ packages:
dependencies:
'@babel/highlight': 7.18.6
+ /@babel/code-frame@7.26.2:
+ resolution: {integrity: sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/helper-validator-identifier': 7.25.9
+ js-tokens: 4.0.0
+ picocolors: 1.0.0
+ dev: true
+
/@babel/compat-data@7.21.4:
resolution: {integrity: sha512-/DYyDpeCfaVinT40FPGdkkb+lYSKvsVuMjDAG7jPOWWiM1ibOaB9CXJAlc4d1QpP/U2q2P9jbrSlClKSErd55g==}
engines: {node: '>=6.9.0'}
+ /@babel/compat-data@7.26.3:
+ resolution: {integrity: sha512-nHIxvKPniQXpmQLb0vhY3VaFb3S0YrTAwpOWJZh1wn3oJPjJk9Asva204PsBdmAE8vpzfHudT8DB0scYvy9q0g==}
+ engines: {node: '>=6.9.0'}
+ dev: true
+
/@babel/core@7.21.4:
resolution: {integrity: sha512-qt/YV149Jman/6AfmlxJ04LMIu8bMoyl3RB91yTFrxQmgbrSvQMy7cI8Q62FHx1t8wJ8B5fu0UDoLwHAhUo1QA==}
engines: {node: '>=6.9.0'}
@@ -1060,6 +1074,29 @@ packages:
transitivePeerDependencies:
- supports-color
+ /@babel/core@7.26.0:
+ resolution: {integrity: sha512-i1SLeK+DzNnQ3LL/CswPCa/E5u4lh1k6IAEphON8F+cXt0t9euTshDru0q7/IqMa1PMPz5RnHuHscF8/ZJsStg==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@ampproject/remapping': 2.2.1
+ '@babel/code-frame': 7.26.2
+ '@babel/generator': 7.26.3
+ '@babel/helper-compilation-targets': 7.25.9
+ '@babel/helper-module-transforms': 7.26.0(@babel/core@7.26.0)
+ '@babel/helpers': 7.26.0
+ '@babel/parser': 7.26.3
+ '@babel/template': 7.25.9
+ '@babel/traverse': 7.26.4
+ '@babel/types': 7.26.3
+ convert-source-map: 2.0.0
+ debug: 4.3.4
+ gensync: 1.0.0-beta.2
+ json5: 2.2.3
+ semver: 6.3.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
/@babel/generator@7.21.4:
resolution: {integrity: sha512-NieM3pVIYW2SwGzKoqfPrQsf4xGs9M9AIG3ThppsSRmO+m7eQhmI6amajKMUeIO37wFfsvnvcxQFx6x6iqxDnA==}
engines: {node: '>=6.9.0'}
@@ -1069,6 +1106,17 @@ packages:
'@jridgewell/trace-mapping': 0.3.18
jsesc: 2.5.2
+ /@babel/generator@7.26.3:
+ resolution: {integrity: sha512-6FF/urZvD0sTeO7k6/B15pMLC4CHUv1426lzr3N01aHJTl046uCAh9LXW/fzeXXjPNCJ6iABW5XaWOsIZB93aQ==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/parser': 7.26.3
+ '@babel/types': 7.26.3
+ '@jridgewell/gen-mapping': 0.3.8
+ '@jridgewell/trace-mapping': 0.3.25
+ jsesc: 3.1.0
+ dev: true
+
/@babel/helper-annotate-as-pure@7.18.6:
resolution: {integrity: sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA==}
engines: {node: '>=6.9.0'}
@@ -1097,6 +1145,17 @@ packages:
lru-cache: 5.1.1
semver: 6.3.0
+ /@babel/helper-compilation-targets@7.25.9:
+ resolution: {integrity: sha512-j9Db8Suy6yV/VHa4qzrj9yZfZxhLWQdVnRlXxmKLYlhWUVB1sB2G5sxuWYXk/whHD9iW76PmNzxZ4UCnTQTVEQ==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/compat-data': 7.26.3
+ '@babel/helper-validator-option': 7.25.9
+ browserslist: 4.24.3
+ lru-cache: 5.1.1
+ semver: 6.3.1
+ dev: true
+
/@babel/helper-create-class-features-plugin@7.21.4(@babel/core@7.21.4):
resolution: {integrity: sha512-46QrX2CQlaFRF4TkwfTt6nJD7IHq8539cCL7SDpqWSDeJKY1xylKKY5F/33mJhLZ3mFvKv2gGrVS6NkyF6qs+Q==}
engines: {node: '>=6.9.0'}
@@ -1180,6 +1239,16 @@ packages:
dependencies:
'@babel/types': 7.21.4
+ /@babel/helper-module-imports@7.25.9:
+ resolution: {integrity: sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/traverse': 7.26.4
+ '@babel/types': 7.26.3
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
/@babel/helper-module-transforms@7.21.2:
resolution: {integrity: sha512-79yj2AR4U/Oqq/WOV7Lx6hUjau1Zfo4cI+JLAVYeMV5XIlbOhmjEk5ulbTc9fMpmlojzZHkUUxAiK+UKn+hNQQ==}
engines: {node: '>=6.9.0'}
@@ -1195,6 +1264,20 @@ packages:
transitivePeerDependencies:
- supports-color
+ /@babel/helper-module-transforms@7.26.0(@babel/core@7.26.0):
+ resolution: {integrity: sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==}
+ engines: {node: '>=6.9.0'}
+ peerDependencies:
+ '@babel/core': ^7.0.0
+ dependencies:
+ '@babel/core': 7.26.0
+ '@babel/helper-module-imports': 7.25.9
+ '@babel/helper-validator-identifier': 7.25.9
+ '@babel/traverse': 7.26.4
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
/@babel/helper-optimise-call-expression@7.18.6:
resolution: {integrity: sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA==}
engines: {node: '>=6.9.0'}
@@ -1258,14 +1341,29 @@ packages:
resolution: {integrity: sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==}
engines: {node: '>=6.9.0'}
+ /@babel/helper-string-parser@7.25.9:
+ resolution: {integrity: sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==}
+ engines: {node: '>=6.9.0'}
+ dev: true
+
/@babel/helper-validator-identifier@7.19.1:
resolution: {integrity: sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==}
engines: {node: '>=6.9.0'}
+ /@babel/helper-validator-identifier@7.25.9:
+ resolution: {integrity: sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==}
+ engines: {node: '>=6.9.0'}
+ dev: true
+
/@babel/helper-validator-option@7.21.0:
resolution: {integrity: sha512-rmL/B8/f0mKS2baE9ZpyTcTavvEuWhTTW8amjzXNvYG4AwBsqTLikfXsEofsJEfKHf+HQVQbFOHy6o+4cnC/fQ==}
engines: {node: '>=6.9.0'}
+ /@babel/helper-validator-option@7.25.9:
+ resolution: {integrity: sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==}
+ engines: {node: '>=6.9.0'}
+ dev: true
+
/@babel/helper-wrap-function@7.20.5:
resolution: {integrity: sha512-bYMxIWK5mh+TgXGVqAtnu5Yn1un+v8DDZtqyzKRLUzrh70Eal2O3aZ7aPYiMADO4uKlkzOiRiZ6GX5q3qxvW9Q==}
engines: {node: '>=6.9.0'}
@@ -1288,6 +1386,14 @@ packages:
transitivePeerDependencies:
- supports-color
+ /@babel/helpers@7.26.0:
+ resolution: {integrity: sha512-tbhNuIxNcVb21pInl3ZSjksLCvgdZy9KwJ8brv993QtIVKJBBkYXz4q4ZbAv31GdnC+R90np23L5FbEBlthAEw==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/template': 7.25.9
+ '@babel/types': 7.26.3
+ dev: true
+
/@babel/highlight@7.18.6:
resolution: {integrity: sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==}
engines: {node: '>=6.9.0'}
@@ -1303,6 +1409,14 @@ packages:
dependencies:
'@babel/types': 7.21.4
+ /@babel/parser@7.26.3:
+ resolution: {integrity: sha512-WJ/CvmY8Mea8iDXo6a7RK2wbmJITT5fN3BEkRuFlxVyNx8jOKIIhmC4fSkTcPcf8JyavbBwIe6OpiCOBXt/IcA==}
+ engines: {node: '>=6.0.0'}
+ hasBin: true
+ dependencies:
+ '@babel/types': 7.26.3
+ dev: true
+
/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.18.6(@babel/core@7.21.4):
resolution: {integrity: sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ==}
engines: {node: '>=6.9.0'}
@@ -1597,7 +1711,6 @@ packages:
dependencies:
'@babel/core': 7.21.4
'@babel/helper-plugin-utils': 7.20.2
- dev: false
/@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.21.4):
resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==}
@@ -2204,6 +2317,15 @@ packages:
'@babel/parser': 7.21.4
'@babel/types': 7.21.4
+ /@babel/template@7.25.9:
+ resolution: {integrity: sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/code-frame': 7.26.2
+ '@babel/parser': 7.26.3
+ '@babel/types': 7.26.3
+ dev: true
+
/@babel/traverse@7.21.4:
resolution: {integrity: sha512-eyKrRHKdyZxqDm+fV1iqL9UAHMoIg0nDaGqfIOd8rKH17m5snv7Gn4qgjBoFfLz9APvjFU/ICT00NVCv1Epp8Q==}
engines: {node: '>=6.9.0'}
@@ -2221,6 +2343,21 @@ packages:
transitivePeerDependencies:
- supports-color
+ /@babel/traverse@7.26.4:
+ resolution: {integrity: sha512-fH+b7Y4p3yqvApJALCPJcwb0/XaOSgtK4pzV6WVjPR5GLFQBRI7pfoX2V2iM48NXvX07NUxxm1Vw98YjqTcU5w==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/code-frame': 7.26.2
+ '@babel/generator': 7.26.3
+ '@babel/parser': 7.26.3
+ '@babel/template': 7.25.9
+ '@babel/types': 7.26.3
+ debug: 4.3.4
+ globals: 11.12.0
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
/@babel/types@7.21.4:
resolution: {integrity: sha512-rU2oY501qDxE8Pyo7i/Orqma4ziCOrby0/9mvbDUGEfvZjb279Nk9k19e2fiCxHbRRpY2ZyrgW1eq22mvmOIzA==}
engines: {node: '>=6.9.0'}
@@ -2229,6 +2366,14 @@ packages:
'@babel/helper-validator-identifier': 7.19.1
to-fast-properties: 2.0.0
+ /@babel/types@7.26.3:
+ resolution: {integrity: sha512-vN5p+1kl59GVKMvTHt55NzzmYVxprfJD+ql7U9NFIfKCBkYE55LYtS+WtPlaYOyzydrKI8Nezd+aZextrd+FMA==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/helper-string-parser': 7.25.9
+ '@babel/helper-validator-identifier': 7.25.9
+ dev: true
+
/@bcoe/v8-coverage@0.2.3:
resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==}
dev: true
@@ -2440,57 +2585,57 @@ packages:
engines: {node: '>=8'}
dev: true
- /@jest/console@28.1.3:
- resolution: {integrity: sha512-QPAkP5EwKdK/bxIr6C1I4Vs0rm2nHiANzj/Z5X2JQkrZo6IqvC4ldZ9K95tF0HdidhA8Bo6egxSzUFPYKcEXLw==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /@jest/console@29.7.0:
+ resolution: {integrity: sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- '@jest/types': 28.1.3
+ '@jest/types': 29.6.3
'@types/node': 16.18.25
chalk: 4.1.2
- jest-message-util: 28.1.3
- jest-util: 28.1.3
+ jest-message-util: 29.7.0
+ jest-util: 29.7.0
slash: 3.0.0
dev: true
- /@jest/core@28.1.3(ts-node@10.9.1):
- resolution: {integrity: sha512-CIKBrlaKOzA7YG19BEqCw3SLIsEwjZkeJzf5bdooVnW4bH5cktqe3JX+G2YV1aK5vP8N9na1IGWFzYaTp6k6NA==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /@jest/core@29.7.0(ts-node@10.9.1):
+ resolution: {integrity: sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
peerDependencies:
node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0
peerDependenciesMeta:
node-notifier:
optional: true
dependencies:
- '@jest/console': 28.1.3
- '@jest/reporters': 28.1.3
- '@jest/test-result': 28.1.3
- '@jest/transform': 28.1.3
- '@jest/types': 28.1.3
+ '@jest/console': 29.7.0
+ '@jest/reporters': 29.7.0
+ '@jest/test-result': 29.7.0
+ '@jest/transform': 29.7.0
+ '@jest/types': 29.6.3
'@types/node': 16.18.25
ansi-escapes: 4.3.2
chalk: 4.1.2
ci-info: 3.8.0
exit: 0.1.2
graceful-fs: 4.2.11
- jest-changed-files: 28.1.3
- jest-config: 28.1.3(@types/node@16.18.25)(ts-node@10.9.1)
- jest-haste-map: 28.1.3
- jest-message-util: 28.1.3
- jest-regex-util: 28.0.2
- jest-resolve: 28.1.3
- jest-resolve-dependencies: 28.1.3
- jest-runner: 28.1.3
- jest-runtime: 28.1.3
- jest-snapshot: 28.1.3
- jest-util: 28.1.3
- jest-validate: 28.1.3
- jest-watcher: 28.1.3
+ jest-changed-files: 29.7.0
+ jest-config: 29.7.0(@types/node@16.18.25)(ts-node@10.9.1)
+ jest-haste-map: 29.7.0
+ jest-message-util: 29.7.0
+ jest-regex-util: 29.6.3
+ jest-resolve: 29.7.0
+ jest-resolve-dependencies: 29.7.0
+ jest-runner: 29.7.0
+ jest-runtime: 29.7.0
+ jest-snapshot: 29.7.0
+ jest-util: 29.7.0
+ jest-validate: 29.7.0
+ jest-watcher: 29.7.0
micromatch: 4.0.5
- pretty-format: 28.1.3
- rimraf: 3.0.2
+ pretty-format: 29.7.0
slash: 3.0.0
strip-ansi: 6.0.1
transitivePeerDependencies:
+ - babel-plugin-macros
- supports-color
- ts-node
dev: true
@@ -2502,14 +2647,14 @@ packages:
'@jest/types': 27.5.1
dev: true
- /@jest/environment@28.1.3:
- resolution: {integrity: sha512-1bf40cMFTEkKyEf585R9Iz1WayDjHoHqvts0XFYEqyKM3cFWDpeMoqKKTAF9LSYQModPUlh8FKptoM2YcMWAXA==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /@jest/environment@29.7.0:
+ resolution: {integrity: sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- '@jest/fake-timers': 28.1.3
- '@jest/types': 28.1.3
+ '@jest/fake-timers': 29.7.0
+ '@jest/types': 29.6.3
'@types/node': 16.18.25
- jest-mock: 28.1.3
+ jest-mock: 29.7.0
dev: true
/@jest/expect-utils@28.1.3:
@@ -2519,42 +2664,50 @@ packages:
jest-get-type: 28.0.2
dev: true
- /@jest/expect@28.1.3:
- resolution: {integrity: sha512-lzc8CpUbSoE4dqT0U+g1qODQjBRHPpCPXissXD4mS9+sWQdmmpeJ9zSH1rS1HEkrsMN0fb7nKrJ9giAR1d3wBw==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /@jest/expect-utils@29.7.0:
+ resolution: {integrity: sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- expect: 28.1.3
- jest-snapshot: 28.1.3
+ jest-get-type: 29.6.3
+ dev: true
+
+ /@jest/expect@29.7.0:
+ resolution: {integrity: sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
+ dependencies:
+ expect: 29.7.0
+ jest-snapshot: 29.7.0
transitivePeerDependencies:
- supports-color
dev: true
- /@jest/fake-timers@28.1.3:
- resolution: {integrity: sha512-D/wOkL2POHv52h+ok5Oj/1gOG9HSywdoPtFsRCUmlCILXNn5eIWmcnd3DIiWlJnpGvQtmajqBP95Ei0EimxfLw==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /@jest/fake-timers@29.7.0:
+ resolution: {integrity: sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- '@jest/types': 28.1.3
- '@sinonjs/fake-timers': 9.1.2
+ '@jest/types': 29.6.3
+ '@sinonjs/fake-timers': 10.3.0
'@types/node': 16.18.25
- jest-message-util: 28.1.3
- jest-mock: 28.1.3
- jest-util: 28.1.3
+ jest-message-util: 29.7.0
+ jest-mock: 29.7.0
+ jest-util: 29.7.0
dev: true
- /@jest/globals@28.1.3:
- resolution: {integrity: sha512-XFU4P4phyryCXu1pbcqMO0GSQcYe1IsalYCDzRNyhetyeyxMcIxa11qPNDpVNLeretItNqEmYYQn1UYz/5x1NA==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /@jest/globals@29.7.0:
+ resolution: {integrity: sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- '@jest/environment': 28.1.3
- '@jest/expect': 28.1.3
- '@jest/types': 28.1.3
+ '@jest/environment': 29.7.0
+ '@jest/expect': 29.7.0
+ '@jest/types': 29.6.3
+ jest-mock: 29.7.0
transitivePeerDependencies:
- supports-color
dev: true
- /@jest/reporters@28.1.3:
- resolution: {integrity: sha512-JuAy7wkxQZVNU/V6g9xKzCGC5LVXx9FDcABKsSXp5MiKPEE2144a/vXTEDoyzjUpZKfVwp08Wqg5A4WfTMAzjg==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /@jest/reporters@29.7.0:
+ resolution: {integrity: sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
peerDependencies:
node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0
peerDependenciesMeta:
@@ -2562,10 +2715,10 @@ packages:
optional: true
dependencies:
'@bcoe/v8-coverage': 0.2.3
- '@jest/console': 28.1.3
- '@jest/test-result': 28.1.3
- '@jest/transform': 28.1.3
- '@jest/types': 28.1.3
+ '@jest/console': 29.7.0
+ '@jest/test-result': 29.7.0
+ '@jest/transform': 29.7.0
+ '@jest/types': 29.6.3
'@jridgewell/trace-mapping': 0.3.18
'@types/node': 16.18.25
chalk: 4.1.2
@@ -2574,17 +2727,16 @@ packages:
glob: 7.2.3
graceful-fs: 4.2.11
istanbul-lib-coverage: 3.2.0
- istanbul-lib-instrument: 5.2.1
+ istanbul-lib-instrument: 6.0.3
istanbul-lib-report: 3.0.0
istanbul-lib-source-maps: 4.0.1
istanbul-reports: 3.1.5
- jest-message-util: 28.1.3
- jest-util: 28.1.3
- jest-worker: 28.1.3
+ jest-message-util: 29.7.0
+ jest-util: 29.7.0
+ jest-worker: 29.7.0
slash: 3.0.0
string-length: 4.0.2
strip-ansi: 6.0.1
- terminal-link: 2.1.1
v8-to-istanbul: 9.1.0
transitivePeerDependencies:
- supports-color
@@ -2597,50 +2749,57 @@ packages:
'@sinclair/typebox': 0.24.51
dev: true
- /@jest/source-map@28.1.2:
- resolution: {integrity: sha512-cV8Lx3BeStJb8ipPHnqVw/IM2VCMWO3crWZzYodSIkxXnRcXJipCdx1JCK0K5MsJJouZQTH73mzf4vgxRaH9ww==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /@jest/schemas@29.6.3:
+ resolution: {integrity: sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
+ dependencies:
+ '@sinclair/typebox': 0.27.8
+ dev: true
+
+ /@jest/source-map@29.6.3:
+ resolution: {integrity: sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
'@jridgewell/trace-mapping': 0.3.18
callsites: 3.1.0
graceful-fs: 4.2.11
dev: true
- /@jest/test-result@28.1.3:
- resolution: {integrity: sha512-kZAkxnSE+FqE8YjW8gNuoVkkC9I7S1qmenl8sGcDOLropASP+BkcGKwhXoyqQuGOGeYY0y/ixjrd/iERpEXHNg==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /@jest/test-result@29.7.0:
+ resolution: {integrity: sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- '@jest/console': 28.1.3
- '@jest/types': 28.1.3
+ '@jest/console': 29.7.0
+ '@jest/types': 29.6.3
'@types/istanbul-lib-coverage': 2.0.4
collect-v8-coverage: 1.0.1
dev: true
- /@jest/test-sequencer@28.1.3:
- resolution: {integrity: sha512-NIMPEqqa59MWnDi1kvXXpYbqsfQmSJsIbnd85mdVGkiDfQ9WQQTXOLsvISUfonmnBT+w85WEgneCigEEdHDFxw==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /@jest/test-sequencer@29.7.0:
+ resolution: {integrity: sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- '@jest/test-result': 28.1.3
+ '@jest/test-result': 29.7.0
graceful-fs: 4.2.11
- jest-haste-map: 28.1.3
+ jest-haste-map: 29.7.0
slash: 3.0.0
dev: true
- /@jest/transform@28.1.3:
- resolution: {integrity: sha512-u5dT5di+oFI6hfcLOHGTAfmUxFRrjK+vnaP0kkVow9Md/M7V/MxqQMOz/VV25UZO8pzeA9PjfTpOu6BDuwSPQA==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /@jest/transform@29.7.0:
+ resolution: {integrity: sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
'@babel/core': 7.21.4
- '@jest/types': 28.1.3
+ '@jest/types': 29.6.3
'@jridgewell/trace-mapping': 0.3.18
babel-plugin-istanbul: 6.1.1
chalk: 4.1.2
- convert-source-map: 1.9.0
+ convert-source-map: 2.0.0
fast-json-stable-stringify: 2.1.0
graceful-fs: 4.2.11
- jest-haste-map: 28.1.3
- jest-regex-util: 28.0.2
- jest-util: 28.1.3
+ jest-haste-map: 29.7.0
+ jest-regex-util: 29.6.3
+ jest-util: 29.7.0
micromatch: 4.0.5
pirates: 4.0.5
slash: 3.0.0
@@ -2672,6 +2831,18 @@ packages:
chalk: 4.1.2
dev: true
+ /@jest/types@29.6.3:
+ resolution: {integrity: sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
+ dependencies:
+ '@jest/schemas': 29.6.3
+ '@types/istanbul-lib-coverage': 2.0.4
+ '@types/istanbul-reports': 3.0.1
+ '@types/node': 16.18.25
+ '@types/yargs': 17.0.24
+ chalk: 4.1.2
+ dev: true
+
/@jridgewell/gen-mapping@0.3.3:
resolution: {integrity: sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==}
engines: {node: '>=6.0.0'}
@@ -2680,6 +2851,15 @@ packages:
'@jridgewell/sourcemap-codec': 1.4.15
'@jridgewell/trace-mapping': 0.3.18
+ /@jridgewell/gen-mapping@0.3.8:
+ resolution: {integrity: sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==}
+ engines: {node: '>=6.0.0'}
+ dependencies:
+ '@jridgewell/set-array': 1.2.1
+ '@jridgewell/sourcemap-codec': 1.4.15
+ '@jridgewell/trace-mapping': 0.3.25
+ dev: true
+
/@jridgewell/resolve-uri@3.1.0:
resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==}
engines: {node: '>=6.0.0'}
@@ -2693,6 +2873,11 @@ packages:
resolution: {integrity: sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==}
engines: {node: '>=6.0.0'}
+ /@jridgewell/set-array@1.2.1:
+ resolution: {integrity: sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==}
+ engines: {node: '>=6.0.0'}
+ dev: true
+
/@jridgewell/sourcemap-codec@1.4.14:
resolution: {integrity: sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==}
@@ -2705,6 +2890,13 @@ packages:
'@jridgewell/resolve-uri': 3.1.0
'@jridgewell/sourcemap-codec': 1.4.14
+ /@jridgewell/trace-mapping@0.3.25:
+ resolution: {integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==}
+ dependencies:
+ '@jridgewell/resolve-uri': 3.1.1
+ '@jridgewell/sourcemap-codec': 1.4.15
+ dev: true
+
/@jridgewell/trace-mapping@0.3.9:
resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==}
dependencies:
@@ -2966,16 +3158,20 @@ packages:
resolution: {integrity: sha512-1P1OROm/rdubP5aFDSZQILU0vrLCJ4fvHt6EoqHEM+2D/G5MK3bIaymUKLit8Js9gbns5UyJnkP/TZROLw4tUA==}
dev: true
- /@sinonjs/commons@1.8.6:
- resolution: {integrity: sha512-Ky+XkAkqPZSm3NLBeUng77EBQl3cmeJhITaGHdYH8kjVB+aun3S4XBRti2zt17mtt0mIUDiNxYeoJm6drVvBJQ==}
+ /@sinclair/typebox@0.27.8:
+ resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==}
+ dev: true
+
+ /@sinonjs/commons@3.0.1:
+ resolution: {integrity: sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==}
dependencies:
type-detect: 4.0.8
dev: true
- /@sinonjs/fake-timers@9.1.2:
- resolution: {integrity: sha512-BPS4ynJW/o92PUR4wgriz2Ud5gpST5vz6GQfMixEDK0Z8ZCUv2M7SkBLykH56T++Xs+8ln9zTGbOvNGIe02/jw==}
+ /@sinonjs/fake-timers@10.3.0:
+ resolution: {integrity: sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==}
dependencies:
- '@sinonjs/commons': 1.8.6
+ '@sinonjs/commons': 3.0.1
dev: true
/@smithy/abort-controller@3.1.9:
@@ -3861,10 +4057,6 @@ packages:
pg-protocol: 1.6.0
pg-types: 2.2.0
- /@types/prettier@2.7.2:
- resolution: {integrity: sha512-KufADq8uQqo1pYKVIYzfKbJfBAc0sOeXqGbFaSpv8MRmC/zXgowNZmFcbngndGk922QDmOASEXUZCaY48gs4cg==}
- dev: true
-
/@types/qs@6.9.11:
resolution: {integrity: sha512-oGk0gmhnEJK4Yyk+oI7EfXsLayXatCWPHary1MtcmbAifkobT9cM9yutG/hZKIseOU0MqbIwQ/u2nn/Gb+ltuQ==}
dev: true
@@ -4471,17 +4663,17 @@ packages:
- supports-color
dev: true
- /babel-jest@28.1.3(@babel/core@7.21.4):
- resolution: {integrity: sha512-epUaPOEWMk3cWX0M/sPvCHHCe9fMFAa/9hXEgKP8nFfNl/jlGkE9ucq9NqkZGXLDduCJYS0UvSlPUwC0S+rH6Q==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /babel-jest@29.7.0(@babel/core@7.21.4):
+ resolution: {integrity: sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
peerDependencies:
'@babel/core': ^7.8.0
dependencies:
'@babel/core': 7.21.4
- '@jest/transform': 28.1.3
+ '@jest/transform': 29.7.0
'@types/babel__core': 7.20.0
babel-plugin-istanbul: 6.1.1
- babel-preset-jest: 28.1.3(@babel/core@7.21.4)
+ babel-preset-jest: 29.6.3(@babel/core@7.21.4)
chalk: 4.1.2
graceful-fs: 4.2.11
slash: 3.0.0
@@ -4502,9 +4694,9 @@ packages:
- supports-color
dev: true
- /babel-plugin-jest-hoist@28.1.3:
- resolution: {integrity: sha512-Ys3tUKAmfnkRUpPdpa98eYrAR0nV+sSFUZZEGuQ2EbFd1y4SOLtD5QDNHAq+bb9a+bbXvYQC4b+ID/THIMcU6Q==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /babel-plugin-jest-hoist@29.6.3:
+ resolution: {integrity: sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
'@babel/template': 7.20.7
'@babel/types': 7.21.4
@@ -4568,14 +4760,14 @@ packages:
'@babel/plugin-syntax-top-level-await': 7.14.5(@babel/core@7.21.4)
dev: true
- /babel-preset-jest@28.1.3(@babel/core@7.21.4):
- resolution: {integrity: sha512-L+fupJvlWAHbQfn74coNX3zf60LXMJsezNvvx8eIh7iOR1luJ1poxYgQk1F8PYtNq/6QODDHCqsSnTFSWC491A==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /babel-preset-jest@29.6.3(@babel/core@7.21.4):
+ resolution: {integrity: sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
peerDependencies:
'@babel/core': ^7.0.0
dependencies:
'@babel/core': 7.21.4
- babel-plugin-jest-hoist: 28.1.3
+ babel-plugin-jest-hoist: 29.6.3
babel-preset-current-node-syntax: 1.0.1(@babel/core@7.21.4)
dev: true
@@ -4803,6 +4995,17 @@ packages:
node-releases: 2.0.10
update-browserslist-db: 1.0.11(browserslist@4.21.5)
+ /browserslist@4.24.3:
+ resolution: {integrity: sha512-1CPmv8iobE2fyRMV97dAcMVegvvWKxmq94hkLiAkUGwKVTyDLw33K+ZxiFrREKmmps4rIw6grcCFCnTMSZ/YiA==}
+ engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7}
+ hasBin: true
+ dependencies:
+ caniuse-lite: 1.0.30001690
+ electron-to-chromium: 1.5.78
+ node-releases: 2.0.19
+ update-browserslist-db: 1.1.1(browserslist@4.24.3)
+ dev: true
+
/bser@2.1.1:
resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==}
dependencies:
@@ -4977,6 +5180,10 @@ packages:
/caniuse-lite@1.0.30001481:
resolution: {integrity: sha512-KCqHwRnaa1InZBtqXzP98LPg0ajCVujMKjqKDhZEthIpAsJl/YEIa3YvXjGXPVqzZVguccuu7ga9KOE1J9rKPQ==}
+ /caniuse-lite@1.0.30001690:
+ resolution: {integrity: sha512-5ExiE3qQN6oF8Clf8ifIDcMRCRE/dMGcETG/XGMD8/XiXm6HXQgQTh1yZYLXXpSOsEUlJm1Xr7kGULZTuGtP/w==}
+ dev: true
+
/catharsis@0.9.0:
resolution: {integrity: sha512-prMTQVpcns/tzFgFVkVp6ak6RykZyWb3gu8ckUpd6YkTlacOd3DXGJjIpD4Q6zJirizvaiAjSSHlOsA+6sNh2A==}
engines: {node: '>= 10'}
@@ -5214,6 +5421,10 @@ packages:
/convert-source-map@1.9.0:
resolution: {integrity: sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==}
+ /convert-source-map@2.0.0:
+ resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==}
+ dev: true
+
/cookie-signature@1.0.6:
resolution: {integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==}
dev: false
@@ -5285,6 +5496,25 @@ packages:
sha.js: 2.4.11
dev: true
+ /create-jest@29.7.0(@types/node@16.18.25)(ts-node@10.9.1):
+ resolution: {integrity: sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
+ hasBin: true
+ dependencies:
+ '@jest/types': 29.6.3
+ chalk: 4.1.2
+ exit: 0.1.2
+ graceful-fs: 4.2.11
+ jest-config: 29.7.0(@types/node@16.18.25)(ts-node@10.9.1)
+ jest-util: 29.7.0
+ prompts: 2.4.2
+ transitivePeerDependencies:
+ - '@types/node'
+ - babel-plugin-macros
+ - supports-color
+ - ts-node
+ dev: true
+
/create-require@1.1.1:
resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==}
dev: true
@@ -5489,8 +5719,13 @@ packages:
dependencies:
ms: 2.1.2
- /dedent@0.7.0:
- resolution: {integrity: sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==}
+ /dedent@1.5.3:
+ resolution: {integrity: sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ==}
+ peerDependencies:
+ babel-plugin-macros: ^3.1.0
+ peerDependenciesMeta:
+ babel-plugin-macros:
+ optional: true
dev: true
/deep-is@0.1.4:
@@ -5605,6 +5840,11 @@ packages:
engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
dev: true
+ /diff-sequences@29.6.3:
+ resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
+ dev: true
+
/diff@4.0.2:
resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==}
engines: {node: '>=0.3.1'}
@@ -5682,6 +5922,10 @@ packages:
/electron-to-chromium@1.4.373:
resolution: {integrity: sha512-whGyixOVSRlyOBQDsRH9xltFaMij2/+DQRdaYahCq0P/fiVnAVGaW7OVsFnEjze/qUo298ez9C46gnALpo6ukg==}
+ /electron-to-chromium@1.5.78:
+ resolution: {integrity: sha512-UmwIt7HRKN1rsJfddG5UG7rCTCTAKoS9JeOy/R0zSenAyaZ8SU3RuXlwcratxhdxGRNpk03iq8O7BA3W7ibLVw==}
+ dev: true
+
/elliptic@6.5.4:
resolution: {integrity: sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==}
dependencies:
@@ -5694,8 +5938,8 @@ packages:
minimalistic-crypto-utils: 1.0.1
dev: true
- /emittery@0.10.2:
- resolution: {integrity: sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw==}
+ /emittery@0.13.1:
+ resolution: {integrity: sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==}
engines: {node: '>=12'}
dev: true
@@ -5823,6 +6067,11 @@ packages:
resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==}
engines: {node: '>=6'}
+ /escalade@3.2.0:
+ resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==}
+ engines: {node: '>=6'}
+ dev: true
+
/escape-html@1.0.3:
resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==}
dev: false
@@ -6181,6 +6430,17 @@ packages:
jest-util: 28.1.3
dev: true
+ /expect@29.7.0:
+ resolution: {integrity: sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
+ dependencies:
+ '@jest/expect-utils': 29.7.0
+ jest-get-type: 29.6.3
+ jest-matcher-utils: 29.7.0
+ jest-message-util: 29.7.0
+ jest-util: 29.7.0
+ dev: true
+
/exponential-backoff@3.1.1:
resolution: {integrity: sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw==}
dev: false
@@ -7347,8 +7607,8 @@ packages:
resolution: {integrity: sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==}
engines: {node: '>=8'}
dependencies:
- '@babel/core': 7.21.4
- '@babel/parser': 7.21.4
+ '@babel/core': 7.26.0
+ '@babel/parser': 7.26.3
'@istanbuljs/schema': 0.1.3
istanbul-lib-coverage: 3.2.0
semver: 6.3.1
@@ -7356,6 +7616,19 @@ packages:
- supports-color
dev: true
+ /istanbul-lib-instrument@6.0.3:
+ resolution: {integrity: sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==}
+ engines: {node: '>=10'}
+ dependencies:
+ '@babel/core': 7.26.0
+ '@babel/parser': 7.26.3
+ '@istanbuljs/schema': 0.1.3
+ istanbul-lib-coverage: 3.2.0
+ semver: 7.5.4
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
/istanbul-lib-report@3.0.0:
resolution: {integrity: sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==}
engines: {node: '>=8'}
@@ -7393,44 +7666,47 @@ packages:
'@pkgjs/parseargs': 0.11.0
dev: false
- /jest-changed-files@28.1.3:
- resolution: {integrity: sha512-esaOfUWJXk2nfZt9SPyC8gA1kNfdKLkQWyzsMlqq8msYSlNKfmZxfRgZn4Cd4MGVUF+7v6dBs0d5TOAKa7iIiA==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-changed-files@29.7.0:
+ resolution: {integrity: sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
execa: 5.1.1
+ jest-util: 29.7.0
p-limit: 3.1.0
dev: true
- /jest-circus@28.1.3:
- resolution: {integrity: sha512-cZ+eS5zc79MBwt+IhQhiEp0OeBddpc1n8MBo1nMB8A7oPMKEO+Sre+wHaLJexQUj9Ya/8NOBY0RESUgYjB6fow==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-circus@29.7.0:
+ resolution: {integrity: sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- '@jest/environment': 28.1.3
- '@jest/expect': 28.1.3
- '@jest/test-result': 28.1.3
- '@jest/types': 28.1.3
+ '@jest/environment': 29.7.0
+ '@jest/expect': 29.7.0
+ '@jest/test-result': 29.7.0
+ '@jest/types': 29.6.3
'@types/node': 16.18.25
chalk: 4.1.2
co: 4.6.0
- dedent: 0.7.0
+ dedent: 1.5.3
is-generator-fn: 2.1.0
- jest-each: 28.1.3
- jest-matcher-utils: 28.1.3
- jest-message-util: 28.1.3
- jest-runtime: 28.1.3
- jest-snapshot: 28.1.3
- jest-util: 28.1.3
+ jest-each: 29.7.0
+ jest-matcher-utils: 29.7.0
+ jest-message-util: 29.7.0
+ jest-runtime: 29.7.0
+ jest-snapshot: 29.7.0
+ jest-util: 29.7.0
p-limit: 3.1.0
- pretty-format: 28.1.3
+ pretty-format: 29.7.0
+ pure-rand: 6.1.0
slash: 3.0.0
stack-utils: 2.0.6
transitivePeerDependencies:
+ - babel-plugin-macros
- supports-color
dev: true
- /jest-cli@28.1.3(@types/node@16.18.25)(ts-node@10.9.1):
- resolution: {integrity: sha512-roY3kvrv57Azn1yPgdTebPAXvdR2xfezaKKYzVxZ6It/5NCxzJym6tUI5P1zkdWhfUYkxEI9uZWcQdaFLo8mJQ==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-cli@29.7.0(@types/node@16.18.25)(ts-node@10.9.1):
+ resolution: {integrity: sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
hasBin: true
peerDependencies:
node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0
@@ -7438,27 +7714,27 @@ packages:
node-notifier:
optional: true
dependencies:
- '@jest/core': 28.1.3(ts-node@10.9.1)
- '@jest/test-result': 28.1.3
- '@jest/types': 28.1.3
+ '@jest/core': 29.7.0(ts-node@10.9.1)
+ '@jest/test-result': 29.7.0
+ '@jest/types': 29.6.3
chalk: 4.1.2
+ create-jest: 29.7.0(@types/node@16.18.25)(ts-node@10.9.1)
exit: 0.1.2
- graceful-fs: 4.2.11
import-local: 3.1.0
- jest-config: 28.1.3(@types/node@16.18.25)(ts-node@10.9.1)
- jest-util: 28.1.3
- jest-validate: 28.1.3
- prompts: 2.4.2
+ jest-config: 29.7.0(@types/node@16.18.25)(ts-node@10.9.1)
+ jest-util: 29.7.0
+ jest-validate: 29.7.0
yargs: 17.7.1
transitivePeerDependencies:
- '@types/node'
+ - babel-plugin-macros
- supports-color
- ts-node
dev: true
- /jest-config@28.1.3(@types/node@16.18.25)(ts-node@10.9.1):
- resolution: {integrity: sha512-MG3INjByJ0J4AsNBm7T3hsuxKQqFIiRo/AUqb1q9LRKI5UU6Aar9JHbr9Ivn1TVwfUD9KirRoM/T6u8XlcQPHQ==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-config@29.7.0(@types/node@16.18.25)(ts-node@10.9.1):
+ resolution: {integrity: sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
peerDependencies:
'@types/node': '*'
ts-node: '>=9.0.0'
@@ -7469,30 +7745,31 @@ packages:
optional: true
dependencies:
'@babel/core': 7.21.4
- '@jest/test-sequencer': 28.1.3
- '@jest/types': 28.1.3
+ '@jest/test-sequencer': 29.7.0
+ '@jest/types': 29.6.3
'@types/node': 16.18.25
- babel-jest: 28.1.3(@babel/core@7.21.4)
+ babel-jest: 29.7.0(@babel/core@7.21.4)
chalk: 4.1.2
ci-info: 3.8.0
deepmerge: 4.3.1
glob: 7.2.3
graceful-fs: 4.2.11
- jest-circus: 28.1.3
- jest-environment-node: 28.1.3
- jest-get-type: 28.0.2
- jest-regex-util: 28.0.2
- jest-resolve: 28.1.3
- jest-runner: 28.1.3
- jest-util: 28.1.3
- jest-validate: 28.1.3
+ jest-circus: 29.7.0
+ jest-environment-node: 29.7.0
+ jest-get-type: 29.6.3
+ jest-regex-util: 29.6.3
+ jest-resolve: 29.7.0
+ jest-runner: 29.7.0
+ jest-util: 29.7.0
+ jest-validate: 29.7.0
micromatch: 4.0.5
parse-json: 5.2.0
- pretty-format: 28.1.3
+ pretty-format: 29.7.0
slash: 3.0.0
strip-json-comments: 3.1.1
ts-node: 10.9.1(@swc/core@1.3.55)(@types/node@16.18.25)(typescript@4.9.5)
transitivePeerDependencies:
+ - babel-plugin-macros
- supports-color
dev: true
@@ -7506,34 +7783,44 @@ packages:
pretty-format: 28.1.3
dev: true
- /jest-docblock@28.1.1:
- resolution: {integrity: sha512-3wayBVNiOYx0cwAbl9rwm5kKFP8yHH3d/fkEaL02NPTkDojPtheGB7HZSFY4wzX+DxyrvhXz0KSCVksmCknCuA==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-diff@29.7.0:
+ resolution: {integrity: sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
+ dependencies:
+ chalk: 4.1.2
+ diff-sequences: 29.6.3
+ jest-get-type: 29.6.3
+ pretty-format: 29.7.0
+ dev: true
+
+ /jest-docblock@29.7.0:
+ resolution: {integrity: sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
detect-newline: 3.1.0
dev: true
- /jest-each@28.1.3:
- resolution: {integrity: sha512-arT1z4sg2yABU5uogObVPvSlSMQlDA48owx07BDPAiasW0yYpYHYOo4HHLz9q0BVzDVU4hILFjzJw0So9aCL/g==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-each@29.7.0:
+ resolution: {integrity: sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- '@jest/types': 28.1.3
+ '@jest/types': 29.6.3
chalk: 4.1.2
- jest-get-type: 28.0.2
- jest-util: 28.1.3
- pretty-format: 28.1.3
+ jest-get-type: 29.6.3
+ jest-util: 29.7.0
+ pretty-format: 29.7.0
dev: true
- /jest-environment-node@28.1.3:
- resolution: {integrity: sha512-ugP6XOhEpjAEhGYvp5Xj989ns5cB1K6ZdjBYuS30umT4CQEETaxSiPcZ/E1kFktX4GkrcM4qu07IIlDYX1gp+A==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-environment-node@29.7.0:
+ resolution: {integrity: sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- '@jest/environment': 28.1.3
- '@jest/fake-timers': 28.1.3
- '@jest/types': 28.1.3
+ '@jest/environment': 29.7.0
+ '@jest/fake-timers': 29.7.0
+ '@jest/types': 29.6.3
'@types/node': 16.18.25
- jest-mock: 28.1.3
- jest-util: 28.1.3
+ jest-mock: 29.7.0
+ jest-util: 29.7.0
dev: true
/jest-get-type@28.0.2:
@@ -7541,31 +7828,36 @@ packages:
engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
dev: true
- /jest-haste-map@28.1.3:
- resolution: {integrity: sha512-3S+RQWDXccXDKSWnkHa/dPwt+2qwA8CJzR61w3FoYCvoo3Pn8tvGcysmMF0Bj0EX5RYvAI2EIvC57OmotfdtKA==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-get-type@29.6.3:
+ resolution: {integrity: sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
+ dev: true
+
+ /jest-haste-map@29.7.0:
+ resolution: {integrity: sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- '@jest/types': 28.1.3
+ '@jest/types': 29.6.3
'@types/graceful-fs': 4.1.6
'@types/node': 16.18.25
anymatch: 3.1.3
fb-watchman: 2.0.2
graceful-fs: 4.2.11
- jest-regex-util: 28.0.2
- jest-util: 28.1.3
- jest-worker: 28.1.3
+ jest-regex-util: 29.6.3
+ jest-util: 29.7.0
+ jest-worker: 29.7.0
micromatch: 4.0.5
walker: 1.0.8
optionalDependencies:
fsevents: 2.3.3
dev: true
- /jest-leak-detector@28.1.3:
- resolution: {integrity: sha512-WFVJhnQsiKtDEo5lG2mM0v40QWnBM+zMdHHyJs8AWZ7J0QZJS59MsyKeJHWhpBZBH32S48FOVvGyOFT1h0DlqA==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-leak-detector@29.7.0:
+ resolution: {integrity: sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- jest-get-type: 28.0.2
- pretty-format: 28.1.3
+ jest-get-type: 29.6.3
+ pretty-format: 29.7.0
dev: true
/jest-matcher-utils@28.1.3:
@@ -7578,6 +7870,16 @@ packages:
pretty-format: 28.1.3
dev: true
+ /jest-matcher-utils@29.7.0:
+ resolution: {integrity: sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
+ dependencies:
+ chalk: 4.1.2
+ jest-diff: 29.7.0
+ jest-get-type: 29.6.3
+ pretty-format: 29.7.0
+ dev: true
+
/jest-message-util@28.1.3:
resolution: {integrity: sha512-PFdn9Iewbt575zKPf1286Ht9EPoJmYT7P0kY+RibeYZ2XtOr53pDLEFoTWXbd1h4JiGiWpTBC84fc8xMXQMb7g==}
engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
@@ -7593,15 +7895,31 @@ packages:
stack-utils: 2.0.6
dev: true
- /jest-mock@28.1.3:
- resolution: {integrity: sha512-o3J2jr6dMMWYVH4Lh/NKmDXdosrsJgi4AviS8oXLujcjpCMBb1FMsblDnOXKZKfSiHLxYub1eS0IHuRXsio9eA==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-message-util@29.7.0:
+ resolution: {integrity: sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- '@jest/types': 28.1.3
+ '@babel/code-frame': 7.21.4
+ '@jest/types': 29.6.3
+ '@types/stack-utils': 2.0.1
+ chalk: 4.1.2
+ graceful-fs: 4.2.11
+ micromatch: 4.0.5
+ pretty-format: 29.7.0
+ slash: 3.0.0
+ stack-utils: 2.0.6
+ dev: true
+
+ /jest-mock@29.7.0:
+ resolution: {integrity: sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
+ dependencies:
+ '@jest/types': 29.6.3
'@types/node': 16.18.25
+ jest-util: 29.7.0
dev: true
- /jest-pnp-resolver@1.2.3(jest-resolve@28.1.3):
+ /jest-pnp-resolver@1.2.3(jest-resolve@29.7.0):
resolution: {integrity: sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==}
engines: {node: '>=6'}
peerDependencies:
@@ -7610,124 +7928,121 @@ packages:
jest-resolve:
optional: true
dependencies:
- jest-resolve: 28.1.3
+ jest-resolve: 29.7.0
dev: true
- /jest-regex-util@28.0.2:
- resolution: {integrity: sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-regex-util@29.6.3:
+ resolution: {integrity: sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dev: true
- /jest-resolve-dependencies@28.1.3:
- resolution: {integrity: sha512-qa0QO2Q0XzQoNPouMbCc7Bvtsem8eQgVPNkwn9LnS+R2n8DaVDPL/U1gngC0LTl1RYXJU0uJa2BMC2DbTfFrHA==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-resolve-dependencies@29.7.0:
+ resolution: {integrity: sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- jest-regex-util: 28.0.2
- jest-snapshot: 28.1.3
+ jest-regex-util: 29.6.3
+ jest-snapshot: 29.7.0
transitivePeerDependencies:
- supports-color
dev: true
- /jest-resolve@28.1.3:
- resolution: {integrity: sha512-Z1W3tTjE6QaNI90qo/BJpfnvpxtaFTFw5CDgwpyE/Kz8U/06N1Hjf4ia9quUhCh39qIGWF1ZuxFiBiJQwSEYKQ==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-resolve@29.7.0:
+ resolution: {integrity: sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
chalk: 4.1.2
graceful-fs: 4.2.11
- jest-haste-map: 28.1.3
- jest-pnp-resolver: 1.2.3(jest-resolve@28.1.3)
- jest-util: 28.1.3
- jest-validate: 28.1.3
+ jest-haste-map: 29.7.0
+ jest-pnp-resolver: 1.2.3(jest-resolve@29.7.0)
+ jest-util: 29.7.0
+ jest-validate: 29.7.0
resolve: 1.22.8
- resolve.exports: 1.1.1
+ resolve.exports: 2.0.3
slash: 3.0.0
dev: true
- /jest-runner@28.1.3:
- resolution: {integrity: sha512-GkMw4D/0USd62OVO0oEgjn23TM+YJa2U2Wu5zz9xsQB1MxWKDOlrnykPxnMsN0tnJllfLPinHTka61u0QhaxBA==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-runner@29.7.0:
+ resolution: {integrity: sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- '@jest/console': 28.1.3
- '@jest/environment': 28.1.3
- '@jest/test-result': 28.1.3
- '@jest/transform': 28.1.3
- '@jest/types': 28.1.3
+ '@jest/console': 29.7.0
+ '@jest/environment': 29.7.0
+ '@jest/test-result': 29.7.0
+ '@jest/transform': 29.7.0
+ '@jest/types': 29.6.3
'@types/node': 16.18.25
chalk: 4.1.2
- emittery: 0.10.2
+ emittery: 0.13.1
graceful-fs: 4.2.11
- jest-docblock: 28.1.1
- jest-environment-node: 28.1.3
- jest-haste-map: 28.1.3
- jest-leak-detector: 28.1.3
- jest-message-util: 28.1.3
- jest-resolve: 28.1.3
- jest-runtime: 28.1.3
- jest-util: 28.1.3
- jest-watcher: 28.1.3
- jest-worker: 28.1.3
+ jest-docblock: 29.7.0
+ jest-environment-node: 29.7.0
+ jest-haste-map: 29.7.0
+ jest-leak-detector: 29.7.0
+ jest-message-util: 29.7.0
+ jest-resolve: 29.7.0
+ jest-runtime: 29.7.0
+ jest-util: 29.7.0
+ jest-watcher: 29.7.0
+ jest-worker: 29.7.0
p-limit: 3.1.0
source-map-support: 0.5.13
transitivePeerDependencies:
- supports-color
dev: true
- /jest-runtime@28.1.3:
- resolution: {integrity: sha512-NU+881ScBQQLc1JHG5eJGU7Ui3kLKrmwCPPtYsJtBykixrM2OhVQlpMmFWJjMyDfdkGgBMNjXCGB/ebzsgNGQw==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-runtime@29.7.0:
+ resolution: {integrity: sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- '@jest/environment': 28.1.3
- '@jest/fake-timers': 28.1.3
- '@jest/globals': 28.1.3
- '@jest/source-map': 28.1.2
- '@jest/test-result': 28.1.3
- '@jest/transform': 28.1.3
- '@jest/types': 28.1.3
+ '@jest/environment': 29.7.0
+ '@jest/fake-timers': 29.7.0
+ '@jest/globals': 29.7.0
+ '@jest/source-map': 29.6.3
+ '@jest/test-result': 29.7.0
+ '@jest/transform': 29.7.0
+ '@jest/types': 29.6.3
+ '@types/node': 16.18.25
chalk: 4.1.2
cjs-module-lexer: 1.2.2
collect-v8-coverage: 1.0.1
- execa: 5.1.1
glob: 7.2.3
graceful-fs: 4.2.11
- jest-haste-map: 28.1.3
- jest-message-util: 28.1.3
- jest-mock: 28.1.3
- jest-regex-util: 28.0.2
- jest-resolve: 28.1.3
- jest-snapshot: 28.1.3
- jest-util: 28.1.3
+ jest-haste-map: 29.7.0
+ jest-message-util: 29.7.0
+ jest-mock: 29.7.0
+ jest-regex-util: 29.6.3
+ jest-resolve: 29.7.0
+ jest-snapshot: 29.7.0
+ jest-util: 29.7.0
slash: 3.0.0
strip-bom: 4.0.0
transitivePeerDependencies:
- supports-color
dev: true
- /jest-snapshot@28.1.3:
- resolution: {integrity: sha512-4lzMgtiNlc3DU/8lZfmqxN3AYD6GGLbl+72rdBpXvcV+whX7mDrREzkPdp2RnmfIiWBg1YbuFSkXduF2JcafJg==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-snapshot@29.7.0:
+ resolution: {integrity: sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
'@babel/core': 7.21.4
'@babel/generator': 7.21.4
+ '@babel/plugin-syntax-jsx': 7.21.4(@babel/core@7.21.4)
'@babel/plugin-syntax-typescript': 7.21.4(@babel/core@7.21.4)
- '@babel/traverse': 7.21.4
'@babel/types': 7.21.4
- '@jest/expect-utils': 28.1.3
- '@jest/transform': 28.1.3
- '@jest/types': 28.1.3
- '@types/babel__traverse': 7.18.5
- '@types/prettier': 2.7.2
+ '@jest/expect-utils': 29.7.0
+ '@jest/transform': 29.7.0
+ '@jest/types': 29.6.3
babel-preset-current-node-syntax: 1.0.1(@babel/core@7.21.4)
chalk: 4.1.2
- expect: 28.1.3
+ expect: 29.7.0
graceful-fs: 4.2.11
- jest-diff: 28.1.3
- jest-get-type: 28.0.2
- jest-haste-map: 28.1.3
- jest-matcher-utils: 28.1.3
- jest-message-util: 28.1.3
- jest-util: 28.1.3
+ jest-diff: 29.7.0
+ jest-get-type: 29.6.3
+ jest-matcher-utils: 29.7.0
+ jest-message-util: 29.7.0
+ jest-util: 29.7.0
natural-compare: 1.4.0
- pretty-format: 28.1.3
+ pretty-format: 29.7.0
semver: 7.5.4
transitivePeerDependencies:
- supports-color
@@ -7745,44 +8060,57 @@ packages:
picomatch: 2.3.1
dev: true
- /jest-validate@28.1.3:
- resolution: {integrity: sha512-SZbOGBWEsaTxBGCOpsRWlXlvNkvTkY0XxRfh7zYmvd8uL5Qzyg0CHAXiXKROflh801quA6+/DsT4ODDthOC/OA==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-util@29.7.0:
+ resolution: {integrity: sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- '@jest/types': 28.1.3
+ '@jest/types': 29.6.3
+ '@types/node': 16.18.25
+ chalk: 4.1.2
+ ci-info: 3.8.0
+ graceful-fs: 4.2.11
+ picomatch: 2.3.1
+ dev: true
+
+ /jest-validate@29.7.0:
+ resolution: {integrity: sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
+ dependencies:
+ '@jest/types': 29.6.3
camelcase: 6.3.0
chalk: 4.1.2
- jest-get-type: 28.0.2
+ jest-get-type: 29.6.3
leven: 3.1.0
- pretty-format: 28.1.3
+ pretty-format: 29.7.0
dev: true
- /jest-watcher@28.1.3:
- resolution: {integrity: sha512-t4qcqj9hze+jviFPUN3YAtAEeFnr/azITXQEMARf5cMwKY2SMBRnCQTXLixTl20OR6mLh9KLMrgVJgJISym+1g==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-watcher@29.7.0:
+ resolution: {integrity: sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- '@jest/test-result': 28.1.3
- '@jest/types': 28.1.3
+ '@jest/test-result': 29.7.0
+ '@jest/types': 29.6.3
'@types/node': 16.18.25
ansi-escapes: 4.3.2
chalk: 4.1.2
- emittery: 0.10.2
- jest-util: 28.1.3
+ emittery: 0.13.1
+ jest-util: 29.7.0
string-length: 4.0.2
dev: true
- /jest-worker@28.1.3:
- resolution: {integrity: sha512-CqRA220YV/6jCo8VWvAt1KKx6eek1VIHMPeLEbpcfSfkEeWyBNppynM/o6q+Wmw+sOhos2ml34wZbSX3G13//g==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest-worker@29.7.0:
+ resolution: {integrity: sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
'@types/node': 16.18.25
+ jest-util: 29.7.0
merge-stream: 2.0.0
supports-color: 8.1.1
dev: true
- /jest@28.1.3(@types/node@16.18.25)(ts-node@10.9.1):
- resolution: {integrity: sha512-N4GT5on8UkZgH0O5LUavMRV1EDEhNTL0KEfRmDIeZHSV7p2XgLoY9t9VDUgL6o+yfdgYHVxuz81G8oB9VG5uyA==}
- engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
+ /jest@29.7.0(@types/node@16.18.25)(ts-node@10.9.1):
+ resolution: {integrity: sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
hasBin: true
peerDependencies:
node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0
@@ -7790,12 +8118,13 @@ packages:
node-notifier:
optional: true
dependencies:
- '@jest/core': 28.1.3(ts-node@10.9.1)
- '@jest/types': 28.1.3
+ '@jest/core': 29.7.0(ts-node@10.9.1)
+ '@jest/types': 29.6.3
import-local: 3.1.0
- jest-cli: 28.1.3(@types/node@16.18.25)(ts-node@10.9.1)
+ jest-cli: 29.7.0(@types/node@16.18.25)(ts-node@10.9.1)
transitivePeerDependencies:
- '@types/node'
+ - babel-plugin-macros
- supports-color
- ts-node
dev: true
@@ -7866,6 +8195,12 @@ packages:
engines: {node: '>=4'}
hasBin: true
+ /jsesc@3.1.0:
+ resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==}
+ engines: {node: '>=6'}
+ hasBin: true
+ dev: true
+
/json-bigint@1.0.0:
resolution: {integrity: sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==}
dependencies:
@@ -8631,6 +8966,10 @@ packages:
/node-releases@2.0.10:
resolution: {integrity: sha512-5GFldHPXVG/YZmFzJvKK2zDSzPKhEp0+ZR5SVaoSag9fsL5YgHbUHDfnG5494ISANDcK4KwPXAx2xqVEydmd7w==}
+ /node-releases@2.0.19:
+ resolution: {integrity: sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==}
+ dev: true
+
/node-schedule@2.1.1:
resolution: {integrity: sha512-OXdegQq03OmXEjt2hZP33W2YPs/E5BcFQks46+G2gAxs4gHOIVD1u7EqlYLYSKsaIpyKCK9Gbk0ta1/gjRSMRQ==}
engines: {node: '>=6'}
@@ -9044,6 +9383,10 @@ packages:
/picocolors@1.0.0:
resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==}
+ /picocolors@1.1.1:
+ resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==}
+ dev: true
+
/picomatch@2.3.1:
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
engines: {node: '>=8.6'}
@@ -9168,6 +9511,15 @@ packages:
react-is: 18.2.0
dev: true
+ /pretty-format@29.7.0:
+ resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==}
+ engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
+ dependencies:
+ '@jest/schemas': 29.6.3
+ ansi-styles: 5.2.0
+ react-is: 18.2.0
+ dev: true
+
/pretty-hrtime@1.0.3:
resolution: {integrity: sha512-66hKPCr+72mlfiSjlEB1+45IjXSqvVAIy6mocupoww4tBFE9R9IhwwUGoI4G++Tc9Aq+2rxOt0RFU6gPcrte0A==}
engines: {node: '>= 0.8'}
@@ -9311,6 +9663,10 @@ packages:
engines: {node: '>=6'}
dev: true
+ /pure-rand@6.1.0:
+ resolution: {integrity: sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==}
+ dev: true
+
/qs@6.11.0:
resolution: {integrity: sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==}
engines: {node: '>=0.6'}
@@ -9535,8 +9891,8 @@ packages:
engines: {node: '>=8'}
dev: true
- /resolve.exports@1.1.1:
- resolution: {integrity: sha512-/NtpHNDN7jWhAaQ9BvBUYZ6YTXsRBgfqWFWP7BZBaoMJO/I3G5OFzvTuWNlZC3aPjins1F+TNrLKsGbH4rfsRQ==}
+ /resolve.exports@2.0.3:
+ resolution: {integrity: sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A==}
engines: {node: '>=10'}
dev: true
@@ -10141,14 +10497,6 @@ packages:
has-flag: 4.0.0
dev: true
- /supports-hyperlinks@2.3.0:
- resolution: {integrity: sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==}
- engines: {node: '>=8'}
- dependencies:
- has-flag: 4.0.0
- supports-color: 7.2.0
- dev: true
-
/supports-preserve-symlinks-flag@1.0.0:
resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==}
engines: {node: '>= 0.4'}
@@ -10200,14 +10548,6 @@ packages:
- supports-color
dev: false
- /terminal-link@2.1.1:
- resolution: {integrity: sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ==}
- engines: {node: '>=8'}
- dependencies:
- ansi-escapes: 4.3.2
- supports-hyperlinks: 2.3.0
- dev: true
-
/test-exclude@6.0.0:
resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==}
engines: {node: '>=8'}
@@ -10606,6 +10946,17 @@ packages:
escalade: 3.1.1
picocolors: 1.0.0
+ /update-browserslist-db@1.1.1(browserslist@4.24.3):
+ resolution: {integrity: sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==}
+ hasBin: true
+ peerDependencies:
+ browserslist: '>= 4.21.0'
+ dependencies:
+ browserslist: 4.24.3
+ escalade: 3.2.0
+ picocolors: 1.1.1
+ dev: true
+
/upper-case@1.1.3:
resolution: {integrity: sha512-WRbjgmYzgXkCV7zNVpy5YgrHgbBv126rMALQQMrmzOVC4GM2waQ9x7xtm8VU+1yF2kWyPzI9zbZ48n4vSxwfSA==}
dev: true
@@ -10925,4 +11276,5 @@ packages:
file:../rust/cyclotron-node:
resolution: {directory: ../rust/cyclotron-node, type: directory}
name: '@posthog/cyclotron'
+ version: 0.1.7
dev: false
diff --git a/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts b/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts
index 18d0d578cdaab..c54be841678d6 100644
--- a/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts
+++ b/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts
@@ -12,7 +12,7 @@ import { KafkaProducerWrapper } from '../../../../utils/db/kafka-producer-wrappe
import { status } from '../../../../utils/status'
import { captureIngestionWarning } from '../../../../worker/ingestion/utils'
import { eventDroppedCounter } from '../../metrics'
-import { createSessionReplayEvent } from '../process-event'
+import { createSessionReplayEvent, RRWebEventType } from '../process-event'
import { IncomingRecordingMessage } from '../types'
import { OffsetHighWaterMarker } from './offset-high-water-marker'
@@ -138,6 +138,16 @@ export class ReplayEventsIngester {
if (replayRecord !== null) {
const asDate = DateTime.fromSQL(replayRecord.first_timestamp)
if (!asDate.isValid || Math.abs(asDate.diffNow('day').days) >= 7) {
+ const eventTypes: { type: number; timestamp: number }[] = []
+ const customEvents: typeof rrwebEvents = []
+
+ for (const event of rrwebEvents) {
+ eventTypes.push({ type: event.type, timestamp: event.timestamp })
+ if (event.type === RRWebEventType.Custom) {
+ customEvents.push(event)
+ }
+ }
+
await captureIngestionWarning(
new KafkaProducerWrapper(this.producer),
event.team_id,
@@ -148,6 +158,8 @@ export class ReplayEventsIngester {
isValid: asDate.isValid,
daysFromNow: Math.round(Math.abs(asDate.diffNow('day').days)),
processingTimestamp: DateTime.now().toISO(),
+ eventTypes,
+ customEvents,
},
{ key: event.session_id }
)
diff --git a/plugin-server/tests/cdp/cdp-e2e.test.ts b/plugin-server/tests/cdp/cdp-e2e.test.ts
index 2dc228da8705d..1de80a324ad2e 100644
--- a/plugin-server/tests/cdp/cdp-e2e.test.ts
+++ b/plugin-server/tests/cdp/cdp-e2e.test.ts
@@ -129,11 +129,11 @@ describe('CDP E2E', () => {
expect(mockFetch).toHaveBeenCalledTimes(1)
expect(mockFetch.mock.calls[0]).toMatchInlineSnapshot(`
- Array [
+ [
"https://example.com/posthog-webhook",
- Object {
- "body": "{\\"event\\":{\\"uuid\\":\\"b3a1fe86-b10c-43cc-acaf-d208977608d0\\",\\"event\\":\\"$pageview\\",\\"elements_chain\\":\\"\\",\\"distinct_id\\":\\"distinct_id\\",\\"url\\":\\"http://localhost:8000/events/1\\",\\"properties\\":{\\"$current_url\\":\\"https://posthog.com\\",\\"$lib_version\\":\\"1.0.0\\"},\\"timestamp\\":\\"2024-09-03T09:00:00Z\\"},\\"groups\\":{},\\"nested\\":{\\"foo\\":\\"http://localhost:8000/events/1\\"},\\"person\\":{\\"id\\":\\"uuid\\",\\"name\\":\\"test\\",\\"url\\":\\"http://localhost:8000/persons/1\\",\\"properties\\":{\\"email\\":\\"test@posthog.com\\",\\"first_name\\":\\"Pumpkin\\"}},\\"event_url\\":\\"http://localhost:8000/events/1-test\\"}",
- "headers": Object {
+ {
+ "body": "{"event":{"uuid":"b3a1fe86-b10c-43cc-acaf-d208977608d0","event":"$pageview","elements_chain":"","distinct_id":"distinct_id","url":"http://localhost:8000/events/1","properties":{"$current_url":"https://posthog.com","$lib_version":"1.0.0"},"timestamp":"2024-09-03T09:00:00Z"},"groups":{},"nested":{"foo":"http://localhost:8000/events/1"},"person":{"id":"uuid","name":"test","url":"http://localhost:8000/persons/1","properties":{"email":"test@posthog.com","first_name":"Pumpkin"}},"event_url":"http://localhost:8000/events/1-test"}",
+ "headers": {
"version": "v=1.0.0",
},
"method": "POST",
diff --git a/plugin-server/tests/cdp/groups-manager.test.ts b/plugin-server/tests/cdp/groups-manager.test.ts
index f489d6b019045..ab315ac555a80 100644
--- a/plugin-server/tests/cdp/groups-manager.test.ts
+++ b/plugin-server/tests/cdp/groups-manager.test.ts
@@ -60,18 +60,18 @@ describe('Groups Manager', () => {
await groupsManager.enrichGroups([globals])
expect(globals.groups).toMatchInlineSnapshot(`
- Object {
- "GroupA": Object {
+ {
+ "GroupA": {
"id": "id-1",
"index": 0,
- "properties": Object {},
+ "properties": {},
"type": "GroupA",
"url": "http://localhost:8000/projects/1/groups/0/id-1",
},
- "GroupB": Object {
+ "GroupB": {
"id": "id-2",
"index": 1,
- "properties": Object {},
+ "properties": {},
"type": "GroupB",
"url": "http://localhost:8000/projects/1/groups/1/id-2",
},
@@ -95,20 +95,20 @@ describe('Groups Manager', () => {
await groupsManager.enrichGroups([globals])
expect(globals.groups).toMatchInlineSnapshot(`
- Object {
- "GroupA": Object {
+ {
+ "GroupA": {
"id": "id-1",
"index": 0,
- "properties": Object {
+ "properties": {
"prop": "value-1",
},
"type": "GroupA",
"url": "http://localhost:8000/projects/1/groups/0/id-1",
},
- "GroupB": Object {
+ "GroupB": {
"id": "id-2",
"index": 1,
- "properties": Object {
+ "properties": {
"prop": "value-2",
},
"type": "GroupB",
@@ -145,31 +145,31 @@ describe('Groups Manager', () => {
await groupsManager.enrichGroups(items)
expect(items[0].groups).toMatchInlineSnapshot(`
- Object {
- "GroupA": Object {
+ {
+ "GroupA": {
"id": "id-1",
"index": 0,
- "properties": Object {
+ "properties": {
"prop": "value-team-1",
},
"type": "GroupA",
"url": "http://localhost:8000/projects/1/groups/0/id-1",
},
- "GroupB": Object {
+ "GroupB": {
"id": "id-2",
"index": 1,
- "properties": Object {},
+ "properties": {},
"type": "GroupB",
"url": "http://localhost:8000/projects/1/groups/1/id-2",
},
}
`)
expect(items[1].groups).toMatchInlineSnapshot(`
- Object {
- "GroupA": Object {
+ {
+ "GroupA": {
"id": "id-1",
"index": 0,
- "properties": Object {
+ "properties": {
"prop": "value-team-1",
},
"type": "GroupA",
@@ -178,11 +178,11 @@ describe('Groups Manager', () => {
}
`)
expect(items[2].groups).toMatchInlineSnapshot(`
- Object {
- "GroupA": Object {
+ {
+ "GroupA": {
"id": "id-1",
"index": 1,
- "properties": Object {
+ "properties": {
"prop": "value-team-2",
},
"type": "GroupA",
diff --git a/plugin-server/tests/cdp/hog-executor.test.ts b/plugin-server/tests/cdp/hog-executor.test.ts
index 99feb53d62207..4d0c3d52ab815 100644
--- a/plugin-server/tests/cdp/hog-executor.test.ts
+++ b/plugin-server/tests/cdp/hog-executor.test.ts
@@ -125,13 +125,13 @@ describe('Hog Executor', () => {
const result = executor.execute(invocation)
expect(result.logs.map((x) => x.message)).toMatchInlineSnapshot(`
- Array [
+ [
"Executing function",
"test",
- "{\\"nested\\":{\\"foo\\":\\"***REDACTED***\\",\\"null\\":null,\\"bool\\":false}}",
- "{\\"foo\\":\\"***REDACTED***\\",\\"null\\":null,\\"bool\\":false}",
+ "{"nested":{"foo":"***REDACTED***","null":null,"bool":false}}",
+ "{"foo":"***REDACTED***","null":null,"bool":false}",
"substring: ***REDACTED***",
- "{\\"input_1\\":\\"test\\",\\"secret_input_2\\":{\\"foo\\":\\"***REDACTED***\\",\\"null\\":null,\\"bool\\":false},\\"secret_input_3\\":\\"***REDACTED***\\"}",
+ "{"input_1":"test","secret_input_2":{"foo":"***REDACTED***","null":null,"bool":false},"secret_input_3":"***REDACTED***"}",
"Function completed in 0ms. Sync: 0ms. Mem: 169 bytes. Ops: 28. Event: 'http://localhost:8000/events/1'",
]
`)
@@ -190,11 +190,11 @@ describe('Hog Executor', () => {
expect(secondResult.finished).toBe(true)
expect(secondResult.error).toBeUndefined()
expect(logs.map((log) => log.message)).toMatchInlineSnapshot(`
- Array [
+ [
"Executing function",
"Suspending function due to async function call 'fetch'. Payload: 1951 bytes. Event: uuid",
"Resuming function",
- "Fetch response:, {\\"status\\":200,\\"body\\":\\"success\\"}",
+ "Fetch response:, {"status":200,"body":"success"}",
"Function completed in 100ms. Sync: 0ms. Mem: 812 bytes. Ops: 22. Event: 'http://localhost:8000/events/1'",
]
`)
@@ -209,11 +209,11 @@ describe('Hog Executor', () => {
logs.push(...secondResult.logs)
expect(logs.map((log) => log.message)).toMatchInlineSnapshot(`
- Array [
+ [
"Executing function",
"Suspending function due to async function call 'fetch'. Payload: 1951 bytes. Event: uuid",
"Resuming function",
- "Fetch response:, {\\"status\\":200,\\"body\\":{\\"foo\\":\\"bar\\"}}",
+ "Fetch response:, {"status":200,"body":{"foo":"bar"}}",
"Function completed in 100ms. Sync: 0ms. Mem: 812 bytes. Ops: 22. Event: 'http://localhost:8000/events/1'",
]
`)
@@ -240,13 +240,13 @@ describe('Hog Executor', () => {
logs.push(...secondResult.logs)
expect(logs.map((log) => log.message)).toMatchInlineSnapshot(`
- Array [
+ [
"Executing function",
"Suspending function due to async function call 'fetch'. Payload: 1951 bytes. Event: uuid",
"Fetch failed after 1 attempts",
"Fetch failure of kind failurestatus with status 404 and message 404 Not Found",
"Resuming function",
- "Fetch response:, {\\"status\\":404,\\"body\\":{\\"foo\\":\\"bar\\"}}",
+ "Fetch response:, {"status":404,"body":{"foo":"bar"}}",
"Function completed in 100ms. Sync: 0ms. Mem: 812 bytes. Ops: 22. Event: 'http://localhost:8000/events/1'",
]
`)
diff --git a/plugin-server/tests/cdp/hog-watcher.test.ts b/plugin-server/tests/cdp/hog-watcher.test.ts
index b13df966e5ca3..c5a34b4554b6e 100644
--- a/plugin-server/tests/cdp/hog-watcher.test.ts
+++ b/plugin-server/tests/cdp/hog-watcher.test.ts
@@ -78,13 +78,13 @@ describe('HogWatcher', () => {
it('should retrieve empty state', async () => {
const res = await watcher.getStates(['id1', 'id2'])
expect(res).toMatchInlineSnapshot(`
- Object {
- "id1": Object {
+ {
+ "id1": {
"rating": 1,
"state": 1,
"tokens": 10000,
},
- "id2": Object {
+ "id2": {
"rating": 1,
"state": 1,
"tokens": 10000,
@@ -143,7 +143,7 @@ describe('HogWatcher', () => {
await watcher.observeResults(lotsOfResults)
expect(await watcher.getState('id1')).toMatchInlineSnapshot(`
- Object {
+ {
"rating": -0.0001,
"state": 3,
"tokens": -1,
@@ -155,7 +155,7 @@ describe('HogWatcher', () => {
await watcher.observeResults(lotsOfResults)
expect(await watcher.getState('id2')).toMatchInlineSnapshot(`
- Object {
+ {
"rating": 1,
"state": 1,
"tokens": 10000,
@@ -190,7 +190,7 @@ describe('HogWatcher', () => {
])
expect(await watcher.getState('id1')).toMatchInlineSnapshot(`
- Object {
+ {
"rating": 0,
"state": 3,
"tokens": 0,
@@ -201,7 +201,7 @@ describe('HogWatcher', () => {
// Should still be disabled even though tokens have been refilled
expect(await watcher.getState('id1')).toMatchInlineSnapshot(`
- Object {
+ {
"rating": 0.01,
"state": 3,
"tokens": 100,
@@ -213,7 +213,7 @@ describe('HogWatcher', () => {
it('should force healthy', async () => {
await watcher.forceStateChange('id1', HogWatcherState.healthy)
expect(await watcher.getState('id1')).toMatchInlineSnapshot(`
- Object {
+ {
"rating": 1,
"state": 1,
"tokens": 10000,
@@ -224,7 +224,7 @@ describe('HogWatcher', () => {
it('should force degraded', async () => {
await watcher.forceStateChange('id1', HogWatcherState.degraded)
expect(await watcher.getState('id1')).toMatchInlineSnapshot(`
- Object {
+ {
"rating": 0.8,
"state": 1,
"tokens": 8000,
@@ -235,7 +235,7 @@ describe('HogWatcher', () => {
it('should force disabledForPeriod', async () => {
await watcher.forceStateChange('id1', HogWatcherState.disabledForPeriod)
expect(await watcher.getState('id1')).toMatchInlineSnapshot(`
- Object {
+ {
"rating": 0,
"state": 3,
"tokens": 0,
@@ -249,7 +249,7 @@ describe('HogWatcher', () => {
it('should force disabledIndefinitely', async () => {
await watcher.forceStateChange('id1', HogWatcherState.disabledIndefinitely)
expect(await watcher.getState('id1')).toMatchInlineSnapshot(`
- Object {
+ {
"rating": 0,
"state": 4,
"tokens": 0,
diff --git a/plugin-server/tests/cdp/utils.test.ts b/plugin-server/tests/cdp/utils.test.ts
index c343f8e6461a1..7572d7aa95f5f 100644
--- a/plugin-server/tests/cdp/utils.test.ts
+++ b/plugin-server/tests/cdp/utils.test.ts
@@ -56,8 +56,8 @@ describe('Utils', () => {
const prepared = prepareLogEntriesForClickhouse(example)
expect(prepared).toMatchInlineSnapshot(`
- Array [
- Object {
+ [
+ {
"instance_id": "inv-1",
"level": "info",
"log_source": "hog_function",
@@ -66,7 +66,7 @@ describe('Utils', () => {
"team_id": 1,
"timestamp": "2021-05-03 00:00:00.000",
},
- Object {
+ {
"instance_id": "inv-1",
"level": "info",
"log_source": "hog_function",
@@ -75,7 +75,7 @@ describe('Utils', () => {
"team_id": 1,
"timestamp": "2021-05-03 00:00:00.001",
},
- Object {
+ {
"instance_id": "inv-1",
"level": "info",
"log_source": "hog_function",
@@ -84,7 +84,7 @@ describe('Utils', () => {
"team_id": 1,
"timestamp": "2021-05-03 00:00:00.002",
},
- Object {
+ {
"instance_id": "inv-1",
"level": "info",
"log_source": "hog_function",
diff --git a/plugin-server/tests/main/ingestion-queues/each-batch-webhooks.test.ts b/plugin-server/tests/main/ingestion-queues/each-batch-webhooks.test.ts
index 8bac349ef8633..d4210888fa3f0 100644
--- a/plugin-server/tests/main/ingestion-queues/each-batch-webhooks.test.ts
+++ b/plugin-server/tests/main/ingestion-queues/each-batch-webhooks.test.ts
@@ -157,16 +157,16 @@ describe('eachMessageWebhooksHandlers', () => {
// on hookCannon, but that would require a little more setup, and it
// is at the least testing a little bit more than we were before.
expect(matchSpy.mock.calls[0][0]).toMatchInlineSnapshot(`
- Object {
+ {
"distinctId": "my_id",
"elementsList": undefined,
"event": "$pageview",
"eventUuid": "uuid1",
- "groups": Object {
- "organization": Object {
+ "groups": {
+ "organization": {
"index": 0,
"key": "org_posthog",
- "properties": Object {
+ "properties": {
"name": "PostHog",
},
"type": "organization",
@@ -174,10 +174,10 @@ describe('eachMessageWebhooksHandlers', () => {
},
"person_created_at": "2020-02-20T02:15:00.000Z",
"person_id": "F99FA0A1-E0C2-4CFE-A09A-4C3C4327A4CC",
- "person_properties": Object {},
+ "person_properties": {},
"projectId": 2,
- "properties": Object {
- "$groups": Object {
+ "properties": {
+ "$groups": {
"organization": "org_posthog",
},
"$ip": "127.0.0.1",
@@ -189,8 +189,8 @@ describe('eachMessageWebhooksHandlers', () => {
expect(postWebhookSpy).toHaveBeenCalledTimes(1)
expect(JSON.parse(postWebhookSpy.mock.calls[0][0].webhook.body)).toMatchInlineSnapshot(`
- Object {
- "text": "[Test Action](/project/2/action/1) was triggered by [my\\\\_id](/project/2/person/my\\\\_id) in organization [PostHog](/project/2/groups/0/org\\\\_posthog)",
+ {
+ "text": "[Test Action](/project/2/action/1) was triggered by [my\\_id](/project/2/person/my\\_id) in organization [PostHog](/project/2/groups/0/org\\_posthog)",
}
`)
})
diff --git a/plugin-server/tests/main/ingestion-queues/session-recording/__snapshots__/utils.test.ts.snap b/plugin-server/tests/main/ingestion-queues/session-recording/__snapshots__/utils.test.ts.snap
index 6a23a3d988282..21ffe0f2c3b0e 100644
--- a/plugin-server/tests/main/ingestion-queues/session-recording/__snapshots__/utils.test.ts.snap
+++ b/plugin-server/tests/main/ingestion-queues/session-recording/__snapshots__/utils.test.ts.snap
@@ -1,35 +1,35 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`session-recording utils parseKafkaBatch can parse and reduce a batch of messages 1`] = `
-Array [
- Object {
+[
+ {
"distinct_id": "c3936f0b-875f-4992-8e8a-26499d1f3a0a",
- "eventsByWindowId": Object {
- "b8d205d5-dd89-4465-b2d5-eb4d1eceb3ea": Array [
- Object {
- "data": Object {},
+ "eventsByWindowId": {
+ "b8d205d5-dd89-4465-b2d5-eb4d1eceb3ea": [
+ {
+ "data": {},
"timestamp": 123,
"type": 6,
},
- Object {
- "data": Object {},
+ {
+ "data": {},
"timestamp": 438,
"type": 6,
},
],
- "c74d85fa-ccbb-43ba-981c-5e7d17f211de": Array [
- Object {
- "data": Object {},
+ "c74d85fa-ccbb-43ba-981c-5e7d17f211de": [
+ {
+ "data": {},
"timestamp": 433,
"type": 6,
},
],
},
- "eventsRange": Object {
+ "eventsRange": {
"end": 438,
"start": 123,
},
- "metadata": Object {
+ "metadata": {
"consoleLogIngestionEnabled": true,
"highOffset": 235,
"lowOffset": 232,
@@ -42,22 +42,22 @@ Array [
"snapshot_source": undefined,
"team_id": 1,
},
- Object {
+ {
"distinct_id": "207f0e52-f265-4932-86e5-cec62844d990",
- "eventsByWindowId": Object {
- "0bbe7878-6516-46b2-80cf-e387839d7313": Array [
- Object {
- "data": Object {},
+ "eventsByWindowId": {
+ "0bbe7878-6516-46b2-80cf-e387839d7313": [
+ {
+ "data": {},
"timestamp": 222,
"type": 6,
},
],
},
- "eventsRange": Object {
+ "eventsRange": {
"end": 222,
"start": 222,
},
- "metadata": Object {
+ "metadata": {
"consoleLogIngestionEnabled": true,
"highOffset": 233,
"lowOffset": 233,
@@ -70,22 +70,22 @@ Array [
"snapshot_source": undefined,
"team_id": 1,
},
- Object {
+ {
"distinct_id": "9696eba5-4f24-4f06-957b-10f98e26f2a9",
- "eventsByWindowId": Object {
- "1260fae8-08b5-4e5f-bea1-b8abd6250b70": Array [
- Object {
- "data": Object {},
+ "eventsByWindowId": {
+ "1260fae8-08b5-4e5f-bea1-b8abd6250b70": [
+ {
+ "data": {},
"timestamp": 432,
"type": 6,
},
],
},
- "eventsRange": Object {
+ "eventsRange": {
"end": 432,
"start": 432,
},
- "metadata": Object {
+ "metadata": {
"consoleLogIngestionEnabled": true,
"highOffset": 500,
"lowOffset": 500,
@@ -102,28 +102,28 @@ Array [
`;
exports[`session-recording utils parseKafkaBatch does not merge sessions for different teams 1`] = `
-Array [
- Object {
+[
+ {
"distinct_id": "c3936f0b-875f-4992-8e8a-26499d1f3a0a",
- "eventsByWindowId": Object {
- "b8d205d5-dd89-4465-b2d5-eb4d1eceb3ea": Array [
- Object {
- "data": Object {},
+ "eventsByWindowId": {
+ "b8d205d5-dd89-4465-b2d5-eb4d1eceb3ea": [
+ {
+ "data": {},
"timestamp": 123,
"type": 6,
},
- Object {
- "data": Object {},
+ {
+ "data": {},
"timestamp": 124,
"type": 6,
},
],
},
- "eventsRange": Object {
+ "eventsRange": {
"end": 124,
"start": 123,
},
- "metadata": Object {
+ "metadata": {
"consoleLogIngestionEnabled": true,
"highOffset": 233,
"lowOffset": 232,
@@ -136,22 +136,22 @@ Array [
"snapshot_source": undefined,
"team_id": 9,
},
- Object {
+ {
"distinct_id": "c3936f0b-875f-4992-8e8a-26499d1f3a0a",
- "eventsByWindowId": Object {
- "b8d205d5-dd89-4465-b2d5-eb4d1eceb3ea": Array [
- Object {
- "data": Object {},
+ "eventsByWindowId": {
+ "b8d205d5-dd89-4465-b2d5-eb4d1eceb3ea": [
+ {
+ "data": {},
"timestamp": 127,
"type": 6,
},
],
},
- "eventsRange": Object {
+ "eventsRange": {
"end": 127,
"start": 127,
},
- "metadata": Object {
+ "metadata": {
"consoleLogIngestionEnabled": true,
"highOffset": 234,
"lowOffset": 234,
@@ -168,18 +168,18 @@ Array [
`;
exports[`session-recording utils parseKafkaMessage can parse a message correctly 1`] = `
-Object {
+{
"distinct_id": "my-distinct-id",
- "eventsByWindowId": Object {
- "018a47c2-2f4a-70a8-b480-5e52f5480448": Array [
- Object {
- "data": Object {
- "payload": Object {
+ "eventsByWindowId": {
+ "018a47c2-2f4a-70a8-b480-5e52f5480448": [
+ {
+ "data": {
+ "payload": {
"level": "log",
- "payload": Array [
- "\\"Hedgehog: Will 'jump' for 2916.6666666666665ms\\"",
+ "payload": [
+ ""Hedgehog: Will 'jump' for 2916.6666666666665ms"",
],
- "trace": Array [
+ "trace": [
"HedgehogActor.setAnimation (http://127.0.0.1:8000/static/toolbar.js?_ts=1693421010000:105543:17)",
"HedgehogActor.setRandomAnimation (http://127.0.0.1:8000/static/toolbar.js?_ts=1693421010000:105550:14)",
"HedgehogActor.update (http://127.0.0.1:8000/static/toolbar.js?_ts=1693421010000:105572:16)",
@@ -193,11 +193,11 @@ Object {
},
],
},
- "eventsRange": Object {
+ "eventsRange": {
"end": 1693422950693,
"start": 1693422950693,
},
- "metadata": Object {
+ "metadata": {
"consoleLogIngestionEnabled": false,
"highOffset": 1,
"lowOffset": 1,
diff --git a/plugin-server/tests/utils/db/redis.test.ts b/plugin-server/tests/utils/db/redis.test.ts
index 9aeb806af04ea..b1e368a599eea 100644
--- a/plugin-server/tests/utils/db/redis.test.ts
+++ b/plugin-server/tests/utils/db/redis.test.ts
@@ -18,8 +18,8 @@ describe('Redis', () => {
it('should respond with unique options if all values set', () => {
expect(getRedisConnectionOptions(config, 'posthog')).toMatchInlineSnapshot(`
- Object {
- "options": Object {
+ {
+ "options": {
"password": "posthog-password",
"port": 6379,
},
@@ -27,16 +27,16 @@ describe('Redis', () => {
}
`)
expect(getRedisConnectionOptions(config, 'ingestion')).toMatchInlineSnapshot(`
- Object {
- "options": Object {
+ {
+ "options": {
"port": 6479,
},
"url": "ingestion-redis",
}
`)
expect(getRedisConnectionOptions(config, 'session-recording')).toMatchInlineSnapshot(`
- Object {
- "options": Object {
+ {
+ "options": {
"port": 6579,
},
"url": "session-recording-redis",
@@ -50,17 +50,17 @@ describe('Redis', () => {
config.POSTHOG_SESSION_RECORDING_REDIS_HOST = ''
expect(getRedisConnectionOptions(config, 'posthog')).toMatchInlineSnapshot(`
- Object {
+ {
"url": "redis://localhost:6379",
}
`)
expect(getRedisConnectionOptions(config, 'ingestion')).toMatchInlineSnapshot(`
- Object {
+ {
"url": "redis://localhost:6379",
}
`)
expect(getRedisConnectionOptions(config, 'session-recording')).toMatchInlineSnapshot(`
- Object {
+ {
"url": "redis://localhost:6379",
}
`)
@@ -70,8 +70,8 @@ describe('Redis', () => {
config.INGESTION_REDIS_HOST = ''
expect(getRedisConnectionOptions(config, 'ingestion')).toMatchInlineSnapshot(`
- Object {
- "options": Object {
+ {
+ "options": {
"password": "posthog-password",
"port": 6379,
},
diff --git a/plugin-server/tests/utils/db/utils.test.ts b/plugin-server/tests/utils/db/utils.test.ts
index 1afcf4a06481e..9770fbb0b16dd 100644
--- a/plugin-server/tests/utils/db/utils.test.ts
+++ b/plugin-server/tests/utils/db/utils.test.ts
@@ -31,7 +31,7 @@ describe('personInitialAndUTMProperties()', () => {
}
expect(personInitialAndUTMProperties(properties)).toMatchInlineSnapshot(`
- Object {
+ {
"$app_build": 2,
"$app_name": "my app",
"$app_namespace": "com.posthog.myapp",
@@ -39,14 +39,14 @@ describe('personInitialAndUTMProperties()', () => {
"$browser": "Chrome",
"$browser_version": "95",
"$current_url": "https://test.com",
- "$elements": Array [
- Object {
+ "$elements": [
+ {
"attr__class": "btn btn-sm",
"nth_child": 1,
"nth_of_type": 2,
"tag_name": "a",
},
- Object {
+ {
"$el_text": "💻",
"nth_child": 1,
"nth_of_type": 2,
@@ -57,7 +57,7 @@ describe('personInitialAndUTMProperties()', () => {
"$os_version": "10.15.7",
"$referrer": "https://google.com/?q=posthog",
"$referring_domain": "https://google.com",
- "$set": Object {
+ "$set": {
"$app_build": 2,
"$app_name": "my app",
"$app_namespace": "com.posthog.myapp",
@@ -73,7 +73,7 @@ describe('personInitialAndUTMProperties()', () => {
"msclkid": "BING ADS ID",
"utm_medium": "twitter",
},
- "$set_once": Object {
+ "$set_once": {
"$initial_app_build": 2,
"$initial_app_name": "my app",
"$initial_app_namespace": "com.posthog.myapp",
diff --git a/plugin-server/tests/worker/ingestion/__snapshots__/app-metrics.test.ts.snap b/plugin-server/tests/worker/ingestion/__snapshots__/app-metrics.test.ts.snap
index 1894a82b49dbd..198c666d8cebb 100644
--- a/plugin-server/tests/worker/ingestion/__snapshots__/app-metrics.test.ts.snap
+++ b/plugin-server/tests/worker/ingestion/__snapshots__/app-metrics.test.ts.snap
@@ -1,13 +1,13 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`AppMetrics() flush() flushes queued messages 1`] = `
-Array [
- Array [
- Object {
- "kafkaMessage": Object {
- "messages": Array [
- Object {
- "value": "{\\"timestamp\\":\\"1970-01-01 00:16:40.000\\",\\"team_id\\":2,\\"plugin_config_id\\":2,\\"job_id\\":\\"000-000\\",\\"category\\":\\"processEvent\\",\\"successes\\":1,\\"successes_on_retry\\":0,\\"failures\\":0}",
+[
+ [
+ {
+ "kafkaMessage": {
+ "messages": [
+ {
+ "value": "{"timestamp":"1970-01-01 00:16:40.000","team_id":2,"plugin_config_id":2,"job_id":"000-000","category":"processEvent","successes":1,"successes_on_retry":0,"failures":0}",
},
],
"topic": "clickhouse_app_metrics_test",
diff --git a/plugin-server/tests/worker/ingestion/__snapshots__/webhook-formatter.test.ts.snap b/plugin-server/tests/worker/ingestion/__snapshots__/webhook-formatter.test.ts.snap
index c64b13e0431ce..bd89402f8bcd5 100644
--- a/plugin-server/tests/worker/ingestion/__snapshots__/webhook-formatter.test.ts.snap
+++ b/plugin-server/tests/worker/ingestion/__snapshots__/webhook-formatter.test.ts.snap
@@ -20,7 +20,7 @@ exports[`WebhookFormatter webhook formatting options {
exports[`WebhookFormatter webhook formatting options {
messageFormat: '[person.properties.pizza_ingredient_ranking]',
personProperties: [Object]
-} 1`] = `"[\\"pineapple\\",\\"broccoli\\",\\"aubergine\\"\\\\]"`;
+} 1`] = `"["pineapple","broccoli","aubergine"\\]"`;
exports[`WebhookFormatter webhook formatting options {
messageFormat: '[person]',
@@ -49,7 +49,7 @@ exports[`WebhookFormatter webhook formatting options { messageFormat: '[event.uu
exports[`WebhookFormatter webhook formatting options { messageFormat: '[event]' } 1`] = `"[$pageview](http://localhost:8000/project/123/events/123/2021-10-31T00%253A44%253A00.000Z)"`;
-exports[`WebhookFormatter webhook formatting options { messageFormat: '[event]', event: [Object] } 1`] = `"[text\\\\]\\\\(yes\\\\!\\\\), \\\\[new link](http://localhost:8000/project/123/events/\\\\*\\\\*\\\\)/2021-10-31T00%253A44%253A00.000Z)"`;
+exports[`WebhookFormatter webhook formatting options { messageFormat: '[event]', event: [Object] } 1`] = `"[text\\]\\(yes\\!\\), \\[new link](http://localhost:8000/project/123/events/\\*\\*\\)/2021-10-31T00%253A44%253A00.000Z)"`;
exports[`WebhookFormatter webhook formatting options { messageFormat: '[groups.missing]' } 1`] = `"(event without 'missing')"`;
diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/__snapshots__/extractHeatmapDataStep.test.ts.snap b/plugin-server/tests/worker/ingestion/event-pipeline/__snapshots__/extractHeatmapDataStep.test.ts.snap
index 12238bd7ab350..724035afff4e2 100644
--- a/plugin-server/tests/worker/ingestion/event-pipeline/__snapshots__/extractHeatmapDataStep.test.ts.snap
+++ b/plugin-server/tests/worker/ingestion/event-pipeline/__snapshots__/extractHeatmapDataStep.test.ts.snap
@@ -1,13 +1,13 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`extractHeatmapDataStep() parses and ingests correct $heatmap_data 2`] = `
-Array [
- Array [
- Object {
+[
+ [
+ {
"key": "018eebf3-cb48-750b-bfad-36409ea6f2b2",
"topic": undefined,
- "value": Object {
- "data": Array [
+ "value": {
+ "data": [
123,
34,
116,
@@ -330,12 +330,12 @@ Array [
"waitForAck": true,
},
],
- Array [
- Object {
+ [
+ {
"key": "018eebf3-cb48-750b-bfad-36409ea6f2b2",
"topic": undefined,
- "value": Object {
- "data": Array [
+ "value": {
+ "data": [
123,
34,
116,
@@ -654,12 +654,12 @@ Array [
"waitForAck": true,
},
],
- Array [
- Object {
+ [
+ {
"key": "018eebf3-cb48-750b-bfad-36409ea6f2b2",
"topic": undefined,
- "value": Object {
- "data": Array [
+ "value": {
+ "data": [
123,
34,
116,
@@ -978,12 +978,12 @@ Array [
"waitForAck": true,
},
],
- Array [
- Object {
+ [
+ {
"key": "018eebf3-cb48-750b-bfad-36409ea6f2b2",
"topic": undefined,
- "value": Object {
- "data": Array [
+ "value": {
+ "data": [
123,
34,
116,
@@ -1306,12 +1306,12 @@ Array [
"waitForAck": true,
},
],
- Array [
- Object {
+ [
+ {
"key": "018eebf3-cb48-750b-bfad-36409ea6f2b2",
"topic": undefined,
- "value": Object {
- "data": Array [
+ "value": {
+ "data": [
123,
34,
116,
@@ -1630,12 +1630,12 @@ Array [
"waitForAck": true,
},
],
- Array [
- Object {
+ [
+ {
"key": "018eebf3-cb48-750b-bfad-36409ea6f2b2",
"topic": undefined,
- "value": Object {
- "data": Array [
+ "value": {
+ "data": [
123,
34,
116,
@@ -1954,12 +1954,12 @@ Array [
"waitForAck": true,
},
],
- Array [
- Object {
+ [
+ {
"key": "018eebf3-cb48-750b-bfad-36409ea6f2b2",
"topic": undefined,
- "value": Object {
- "data": Array [
+ "value": {
+ "data": [
123,
34,
116,
@@ -2278,12 +2278,12 @@ Array [
"waitForAck": true,
},
],
- Array [
- Object {
+ [
+ {
"key": "018eebf3-cb48-750b-bfad-36409ea6f2b2",
"topic": undefined,
- "value": Object {
- "data": Array [
+ "value": {
+ "data": [
123,
34,
116,
@@ -2602,12 +2602,12 @@ Array [
"waitForAck": true,
},
],
- Array [
- Object {
+ [
+ {
"key": "018eebf3-cb48-750b-bfad-36409ea6f2b2",
"topic": undefined,
- "value": Object {
- "data": Array [
+ "value": {
+ "data": [
123,
34,
116,
@@ -2930,12 +2930,12 @@ Array [
"waitForAck": true,
},
],
- Array [
- Object {
+ [
+ {
"key": "018eebf3-cb48-750b-bfad-36409ea6f2b2",
"topic": undefined,
- "value": Object {
- "data": Array [
+ "value": {
+ "data": [
123,
34,
116,
@@ -3258,12 +3258,12 @@ Array [
"waitForAck": true,
},
],
- Array [
- Object {
+ [
+ {
"key": "018eebf3-cb48-750b-bfad-36409ea6f2b2",
"topic": undefined,
- "value": Object {
- "data": Array [
+ "value": {
+ "data": [
123,
34,
116,
@@ -3582,12 +3582,12 @@ Array [
"waitForAck": true,
},
],
- Array [
- Object {
+ [
+ {
"key": "018eebf3-cb48-750b-bfad-36409ea6f2b2",
"topic": undefined,
- "value": Object {
- "data": Array [
+ "value": {
+ "data": [
123,
34,
116,
@@ -3906,12 +3906,12 @@ Array [
"waitForAck": true,
},
],
- Array [
- Object {
+ [
+ {
"key": "018eebf3-cb48-750b-bfad-36409ea6f2b2",
"topic": undefined,
- "value": Object {
- "data": Array [
+ "value": {
+ "data": [
123,
34,
116,
@@ -4234,12 +4234,12 @@ Array [
"waitForAck": true,
},
],
- Array [
- Object {
+ [
+ {
"key": "018eebf3-cb48-750b-bfad-36409ea6f2b2",
"topic": undefined,
- "value": Object {
- "data": Array [
+ "value": {
+ "data": [
123,
34,
116,
@@ -4558,12 +4558,12 @@ Array [
"waitForAck": true,
},
],
- Array [
- Object {
+ [
+ {
"key": "018eebf3-cb48-750b-bfad-36409ea6f2b2",
"topic": undefined,
- "value": Object {
- "data": Array [
+ "value": {
+ "data": [
123,
34,
116,
@@ -4882,12 +4882,12 @@ Array [
"waitForAck": true,
},
],
- Array [
- Object {
+ [
+ {
"key": "018eebf3-cb48-750b-bfad-36409ea6f2b2",
"topic": undefined,
- "value": Object {
- "data": Array [
+ "value": {
+ "data": [
123,
34,
116,
diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/__snapshots__/runner.test.ts.snap b/plugin-server/tests/worker/ingestion/event-pipeline/__snapshots__/runner.test.ts.snap
index f4dc48882a622..13a552d23183c 100644
--- a/plugin-server/tests/worker/ingestion/event-pipeline/__snapshots__/runner.test.ts.snap
+++ b/plugin-server/tests/worker/ingestion/event-pipeline/__snapshots__/runner.test.ts.snap
@@ -1,16 +1,16 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`EventPipelineRunner runEventPipeline() runs steps starting from populateTeamDataStep 1`] = `
-Array [
- Array [
+[
+ [
"populateTeamDataStep",
- Array [
- Object {
+ [
+ {
"distinct_id": "my_id",
"event": "default event",
"ip": "127.0.0.1",
"now": "2020-02-23T02:15:00.000Z",
- "properties": Object {},
+ "properties": {},
"site_url": "http://localhost",
"team_id": null,
"timestamp": "2020-02-23T02:15:00.000Z",
@@ -19,15 +19,15 @@ Array [
},
],
],
- Array [
+ [
"pluginsProcessEventStep",
- Array [
- Object {
+ [
+ {
"distinct_id": "my_id",
"event": "default event",
"ip": "127.0.0.1",
"now": "2020-02-23T02:15:00.000Z",
- "properties": Object {},
+ "properties": {},
"site_url": "http://localhost",
"team_id": 2,
"timestamp": "2020-02-23T02:15:00.000Z",
@@ -35,21 +35,21 @@ Array [
},
],
],
- Array [
+ [
"normalizeEventStep",
- Array [
+ [
true,
],
],
- Array [
+ [
"processPersonsStep",
- Array [
- Object {
+ [
+ {
"distinct_id": "my_id",
"event": "default event",
"ip": null,
"now": "2020-02-23T02:15:00.000Z",
- "properties": Object {
+ "properties": {
"$ip": "127.0.0.1",
},
"site_url": "http://localhost",
@@ -61,15 +61,15 @@ Array [
true,
],
],
- Array [
+ [
"prepareEventStep",
- Array [
- Object {
+ [
+ {
"distinct_id": "my_id",
"event": "default event",
"ip": null,
"now": "2020-02-23T02:15:00.000Z",
- "properties": Object {
+ "properties": {
"$ip": "127.0.0.1",
},
"site_url": "http://localhost",
@@ -80,58 +80,58 @@ Array [
true,
],
],
- Array [
+ [
"extractHeatmapDataStep",
- Array [
- Object {
+ [
+ {
"distinctId": "my_id",
- "elementsList": Array [],
+ "elementsList": [],
"event": "$pageview",
"eventUuid": "uuid1",
"ip": "127.0.0.1",
"projectId": 1,
- "properties": Object {},
+ "properties": {},
"teamId": 2,
"timestamp": "2020-02-23T02:15:00.000Z",
},
],
],
- Array [
+ [
"createEventStep",
- Array [
- Object {
+ [
+ {
"distinctId": "my_id",
- "elementsList": Array [],
+ "elementsList": [],
"event": "$pageview",
"eventUuid": "uuid1",
"ip": "127.0.0.1",
"projectId": 1,
- "properties": Object {},
+ "properties": {},
"teamId": 2,
"timestamp": "2020-02-23T02:15:00.000Z",
},
- Object {
- "person": Object {
+ {
+ "person": {
"created_at": "2020-02-23T02:15:00.000Z",
"id": 123,
"is_identified": true,
"is_user_id": 0,
- "properties": Object {},
- "properties_last_operation": Object {},
- "properties_last_updated_at": Object {},
+ "properties": {},
+ "properties_last_operation": {},
+ "properties_last_updated_at": {},
"team_id": 2,
"uuid": "uuid",
"version": 0,
},
- "personUpdateProperties": Object {},
+ "personUpdateProperties": {},
},
true,
],
],
- Array [
+ [
"emitEventStep",
- Array [
- Object {
+ [
+ {
"created_at": "2024-11-18 14:54:33.606",
"distinct_id": "my_id",
"elements_chain": "",
diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/extractHeatmapDataStep.test.ts b/plugin-server/tests/worker/ingestion/event-pipeline/extractHeatmapDataStep.test.ts
index 52de14eabf552..11035a94472c4 100644
--- a/plugin-server/tests/worker/ingestion/event-pipeline/extractHeatmapDataStep.test.ts
+++ b/plugin-server/tests/worker/ingestion/event-pipeline/extractHeatmapDataStep.test.ts
@@ -154,7 +154,7 @@ describe('extractHeatmapDataStep()', () => {
const parsed = JSON.parse(firstProduceCall.value.toString())
expect(parsed).toMatchInlineSnapshot(`
- Object {
+ {
"current_url": "http://localhost:3000/",
"distinct_id": "018eebf3-79b1-7082-a7c6-eeb56a36002f",
"pointer_target_fixed": false,
@@ -196,7 +196,7 @@ describe('extractHeatmapDataStep()', () => {
)
expect(allParsedMessages.find((x) => x.type === 'scrolldepth')).toMatchInlineSnapshot(`
- Object {
+ {
"current_url": "http://localhost:3000/test",
"distinct_id": "018eebf3-79b1-7082-a7c6-eeb56a36002f",
"pointer_target_fixed": false,
diff --git a/plugin-server/tests/worker/ingestion/hooks.test.ts b/plugin-server/tests/worker/ingestion/hooks.test.ts
index 8b7de003c4657..c55fd2d8b0427 100644
--- a/plugin-server/tests/worker/ingestion/hooks.test.ts
+++ b/plugin-server/tests/worker/ingestion/hooks.test.ts
@@ -48,23 +48,23 @@ describe('hooks', () => {
expect(fetch).toHaveBeenCalledTimes(1)
expect(fetch.mock.calls[0]).toMatchInlineSnapshot(`
- Array [
+ [
"https://example.com/",
- Object {
+ {
"body": "{
- \\"hook\\": {
- \\"id\\": \\"id\\",
- \\"event\\": \\"foo\\",
- \\"target\\": \\"https://example.com/\\"
+ "hook": {
+ "id": "id",
+ "event": "foo",
+ "target": "https://example.com/"
},
- \\"data\\": {
- \\"event\\": \\"foo\\",
- \\"properties\\": {},
- \\"elementsList\\": [],
- \\"person\\": {}
+ "data": {
+ "event": "foo",
+ "properties": {},
+ "elementsList": [],
+ "person": {}
}
}",
- "headers": Object {
+ "headers": {
"Content-Type": "application/json",
},
"method": "POST",
@@ -95,33 +95,33 @@ describe('hooks', () => {
)
expect(fetch).toHaveBeenCalledTimes(1)
expect(fetch.mock.calls[0]).toMatchInlineSnapshot(`
- Array [
+ [
"https://example.com/",
- Object {
+ {
"body": "{
- \\"hook\\": {
- \\"id\\": \\"id\\",
- \\"event\\": \\"foo\\",
- \\"target\\": \\"https://example.com/\\"
+ "hook": {
+ "id": "id",
+ "event": "foo",
+ "target": "https://example.com/"
},
- \\"data\\": {
- \\"eventUuid\\": \\"018f39d3-d94c-0000-eeef-df4a793f8844\\",
- \\"event\\": \\"foo\\",
- \\"teamId\\": 1,
- \\"distinctId\\": \\"WALL-E\\",
- \\"properties\\": {},
- \\"timestamp\\": \\"2024-01-01T00:00:00.000Z\\",
- \\"elementsList\\": [],
- \\"person\\": {
- \\"uuid\\": \\"018f39d3-d94c-0000-eeef-df4a793f8844\\",
- \\"properties\\": {
- \\"foo\\": \\"bar\\"
+ "data": {
+ "eventUuid": "018f39d3-d94c-0000-eeef-df4a793f8844",
+ "event": "foo",
+ "teamId": 1,
+ "distinctId": "WALL-E",
+ "properties": {},
+ "timestamp": "2024-01-01T00:00:00.000Z",
+ "elementsList": [],
+ "person": {
+ "uuid": "018f39d3-d94c-0000-eeef-df4a793f8844",
+ "properties": {
+ "foo": "bar"
},
- \\"created_at\\": \\"2024-01-01T00:00:00.000Z\\"
+ "created_at": "2024-01-01T00:00:00.000Z"
}
}
}",
- "headers": Object {
+ "headers": {
"Content-Type": "application/json",
},
"method": "POST",
diff --git a/plugin-server/tests/worker/plugins/run.test.ts b/plugin-server/tests/worker/plugins/run.test.ts
index ea60a641e7448..a6347a3adccd7 100644
--- a/plugin-server/tests/worker/plugins/run.test.ts
+++ b/plugin-server/tests/worker/plugins/run.test.ts
@@ -170,14 +170,14 @@ describe('runOnEvent', () => {
expect(onEvent).toHaveBeenCalledTimes(2)
expect(onEvent.mock.calls[0][0]).toMatchInlineSnapshot(`
- Object {
+ {
"$set": undefined,
"$set_once": undefined,
"distinct_id": "my_id",
- "elements": Array [],
+ "elements": [],
"event": "$autocapture",
"ip": null,
- "properties": Object {},
+ "properties": {},
"team_id": 2,
"timestamp": "2020-02-23T02:15:00.000Z",
"uuid": "uuid1",
@@ -286,10 +286,10 @@ describe('runComposeWebhook', () => {
expect(composeWebhook).toHaveBeenCalledTimes(1)
expect(composeWebhook.mock.calls[0][0]).toMatchInlineSnapshot(`
- Object {
+ {
"distinct_id": "my_id",
"event": "$autocapture",
- "properties": Object {},
+ "properties": {},
"team_id": 2,
"timestamp": 2020-02-23T02:15:00.000Z,
"uuid": "uuid1",
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 2ec8c34ed9528..46e9e8fc554c0 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -305,8 +305,8 @@ dependencies:
specifier: ^9.3.0
version: 9.3.0(postcss@8.4.31)
posthog-js:
- specifier: 1.203.3
- version: 1.203.3
+ specifier: 1.205.0
+ version: 1.205.0
posthog-js-lite:
specifier: 3.0.0
version: 3.0.0
@@ -17952,8 +17952,8 @@ packages:
resolution: {integrity: sha512-dyajjnfzZD1tht4N7p7iwf7nBnR1MjVaVu+MKr+7gBgA39bn28wizCIJZztZPtHy4PY0YwtSGgwfBCuG/hnHgA==}
dev: false
- /posthog-js@1.203.3:
- resolution: {integrity: sha512-DTK6LfL87xC7PPleKDParEIfkXl7hXtuDeSOPfhcyCXLuVspq0z7YyRB5dQE9Pbalf3yoGqUKvomYFp/BGVfQg==}
+ /posthog-js@1.205.0:
+ resolution: {integrity: sha512-zP4SQ9Dg9JwqkEteoAOviAAAMdT/nJ4vk1jqfE6fVudziEa3szkQWd7czk5ehlEdrKFUE85MonCKW4L/uwtybA==}
dependencies:
core-js: 3.39.0
fflate: 0.4.8
diff --git a/posthog/api/__init__.py b/posthog/api/__init__.py
index 358f00f826694..5e0cd8d81c1e5 100644
--- a/posthog/api/__init__.py
+++ b/posthog/api/__init__.py
@@ -513,12 +513,12 @@ def register_grandfathered_environment_nested_viewset(
["team_id"],
)
-# projects_router.register(
-# r"error_tracking",
-# error_tracking.ErrorTrackingGroupViewSet,
-# "project_error_tracking",
-# ["team_id"],
-# )
+projects_router.register(
+ r"error_tracking/issue",
+ error_tracking.ErrorTrackingIssueViewSet,
+ "project_error_tracking_issue",
+ ["team_id"],
+)
projects_router.register(
r"error_tracking/stack_frames",
diff --git a/posthog/api/error_tracking.py b/posthog/api/error_tracking.py
index 7f554880ff2cf..8519685ee2a3c 100644
--- a/posthog/api/error_tracking.py
+++ b/posthog/api/error_tracking.py
@@ -34,7 +34,7 @@ class Meta:
fields = ["assignee", "status"]
-class ErrorTrackingGroupViewSet(TeamAndOrgViewSetMixin, ForbidDestroyModel, viewsets.ModelViewSet):
+class ErrorTrackingIssueViewSet(TeamAndOrgViewSetMixin, ForbidDestroyModel, viewsets.ModelViewSet):
scope_object = "INTERNAL"
queryset = ErrorTrackingIssue.objects.all()
serializer_class = ErrorTrackingIssueSerializer
diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py
index f20c1a4a6105a..949fd33241a4b 100644
--- a/posthog/api/feature_flag.py
+++ b/posthog/api/feature_flag.py
@@ -385,6 +385,7 @@ def update(self, instance: FeatureFlag, validated_data: dict, *args: Any, **kwar
request = self.context["request"]
validated_key = validated_data.get("key", None)
if validated_key:
+ # Delete any soft deleted feature flags with the same key to prevent conflicts
FeatureFlag.objects.filter(
key=validated_key, team__project_id=instance.team.project_id, deleted=True
).delete()
@@ -396,6 +397,8 @@ def update(self, instance: FeatureFlag, validated_data: dict, *args: Any, **kwar
for dashboard in analytics_dashboards:
FeatureFlagDashboards.objects.get_or_create(dashboard=dashboard, feature_flag=instance)
+ old_key = instance.key
+
instance = super().update(instance, validated_data)
# Propagate the new variants and aggregation group type index to the linked experiments
@@ -415,6 +418,9 @@ def update(self, instance: FeatureFlag, validated_data: dict, *args: Any, **kwar
experiment.parameters.pop("aggregation_group_type_index", None)
experiment.save()
+ if old_key != instance.key:
+ _update_feature_flag_dashboard(instance, old_key)
+
report_user_action(request.user, "feature flag updated", instance.get_analytics_metadata())
return instance
@@ -446,6 +452,15 @@ def _create_usage_dashboard(feature_flag: FeatureFlag, user):
return usage_dashboard
+def _update_feature_flag_dashboard(feature_flag: FeatureFlag, old_key: str) -> None:
+ from posthog.helpers.dashboard_templates import update_feature_flag_dashboard
+
+ if not old_key:
+ return
+
+ update_feature_flag_dashboard(feature_flag, old_key)
+
+
class MinimalFeatureFlagSerializer(serializers.ModelSerializer):
filters = serializers.DictField(source="get_filters", required=False)
diff --git a/posthog/api/shared.py b/posthog/api/shared.py
index a3419c6dda293..cee64c8dfe43d 100644
--- a/posthog/api/shared.py
+++ b/posthog/api/shared.py
@@ -29,6 +29,7 @@ class Meta:
"email",
"is_email_verified",
"hedgehog_config",
+ "role_at_organization",
]
def get_hedgehog_config(self, user: User) -> Optional[dict]:
diff --git a/posthog/api/signup.py b/posthog/api/signup.py
index a781c7972b01f..b0e78139e893b 100644
--- a/posthog/api/signup.py
+++ b/posthog/api/signup.py
@@ -111,6 +111,7 @@ def create(self, validated_data, **kwargs):
create_team=self.create_team,
is_staff=is_instance_first_user,
is_email_verified=self.is_email_auto_verified(),
+ role_at_organization=role_at_organization,
**validated_data,
)
except IntegrityError:
@@ -241,6 +242,7 @@ def create(self, validated_data, **kwargs):
validated_data.pop("password"),
validated_data.pop("first_name"),
is_email_verified=False,
+ role_at_organization=role_at_organization,
**validated_data,
)
except IntegrityError:
diff --git a/posthog/api/test/__snapshots__/test_action.ambr b/posthog/api/test/__snapshots__/test_action.ambr
index eb4628373a9c8..be7461a298a6c 100644
--- a/posthog/api/test/__snapshots__/test_action.ambr
+++ b/posthog/api/test/__snapshots__/test_action.ambr
@@ -19,6 +19,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -284,6 +285,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -324,6 +326,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -601,6 +604,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -797,6 +801,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -837,6 +842,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
diff --git a/posthog/api/test/__snapshots__/test_annotation.ambr b/posthog/api/test/__snapshots__/test_annotation.ambr
index f746ffb3f3dc2..e12cd7a78b6ca 100644
--- a/posthog/api/test/__snapshots__/test_annotation.ambr
+++ b/posthog/api/test/__snapshots__/test_annotation.ambr
@@ -19,6 +19,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -281,6 +282,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -319,6 +321,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -605,6 +608,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -765,6 +769,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
diff --git a/posthog/api/test/__snapshots__/test_decide.ambr b/posthog/api/test/__snapshots__/test_decide.ambr
index aa8e0f8a2d7f9..f6affd2437a9c 100644
--- a/posthog/api/test/__snapshots__/test_decide.ambr
+++ b/posthog/api/test/__snapshots__/test_decide.ambr
@@ -19,6 +19,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -908,6 +909,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -2135,6 +2137,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -3177,6 +3180,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -4285,6 +4289,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -5174,6 +5179,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -7699,6 +7705,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -9382,6 +9389,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
diff --git a/posthog/api/test/__snapshots__/test_early_access_feature.ambr b/posthog/api/test/__snapshots__/test_early_access_feature.ambr
index fcfee36ed4ace..5b1dcc653d1d8 100644
--- a/posthog/api/test/__snapshots__/test_early_access_feature.ambr
+++ b/posthog/api/test/__snapshots__/test_early_access_feature.ambr
@@ -469,6 +469,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -1208,6 +1209,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -2196,6 +2198,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
diff --git a/posthog/api/test/__snapshots__/test_element.ambr b/posthog/api/test/__snapshots__/test_element.ambr
index d58b57cd97468..903b9ab705a8e 100644
--- a/posthog/api/test/__snapshots__/test_element.ambr
+++ b/posthog/api/test/__snapshots__/test_element.ambr
@@ -19,6 +19,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
diff --git a/posthog/api/test/__snapshots__/test_feature_flag.ambr b/posthog/api/test/__snapshots__/test_feature_flag.ambr
index 63dd2c1ae434a..07eb0a9ef7ed1 100644
--- a/posthog/api/test/__snapshots__/test_feature_flag.ambr
+++ b/posthog/api/test/__snapshots__/test_feature_flag.ambr
@@ -1298,6 +1298,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -1572,7 +1573,8 @@
"posthog_experiment"."type",
"posthog_experiment"."variants",
"posthog_experiment"."metrics",
- "posthog_experiment"."metrics_secondary"
+ "posthog_experiment"."metrics_secondary",
+ "posthog_experiment"."stats_config"
FROM "posthog_experiment"
WHERE "posthog_experiment"."exposure_cohort_id" = 99999
'''
@@ -1792,6 +1794,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
diff --git a/posthog/api/test/__snapshots__/test_insight.ambr b/posthog/api/test/__snapshots__/test_insight.ambr
index 9ff000b4d7a64..82d0c4fc65096 100644
--- a/posthog/api/test/__snapshots__/test_insight.ambr
+++ b/posthog/api/test/__snapshots__/test_insight.ambr
@@ -658,6 +658,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -1265,6 +1266,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -1649,6 +1651,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -1675,6 +1678,7 @@
T5."has_seen_product_intro_for",
T5."strapi_id",
T5."is_active",
+ T5."role_at_organization",
T5."theme_mode",
T5."partial_notification_settings",
T5."anonymize_data",
diff --git a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr
index 23980835406eb..1ac0107628d8a 100644
--- a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr
+++ b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr
@@ -19,6 +19,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -1857,7 +1858,8 @@
"posthog_experiment"."type",
"posthog_experiment"."variants",
"posthog_experiment"."metrics",
- "posthog_experiment"."metrics_secondary"
+ "posthog_experiment"."metrics_secondary",
+ "posthog_experiment"."stats_config"
FROM "posthog_experiment"
WHERE "posthog_experiment"."feature_flag_id" = 99999
'''
@@ -2666,6 +2668,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -2825,6 +2828,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -2858,6 +2862,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
diff --git a/posthog/api/test/__snapshots__/test_plugin.ambr b/posthog/api/test/__snapshots__/test_plugin.ambr
index 50037107d482e..e102d6c21bb76 100644
--- a/posthog/api/test/__snapshots__/test_plugin.ambr
+++ b/posthog/api/test/__snapshots__/test_plugin.ambr
@@ -19,6 +19,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -272,6 +273,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -551,6 +553,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -914,6 +917,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
diff --git a/posthog/api/test/__snapshots__/test_preflight.ambr b/posthog/api/test/__snapshots__/test_preflight.ambr
index 19b1ae0472e77..2077015ad7de9 100644
--- a/posthog/api/test/__snapshots__/test_preflight.ambr
+++ b/posthog/api/test/__snapshots__/test_preflight.ambr
@@ -19,6 +19,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
diff --git a/posthog/api/test/__snapshots__/test_survey.ambr b/posthog/api/test/__snapshots__/test_survey.ambr
index 42a59d41db143..af29d9df060c0 100644
--- a/posthog/api/test/__snapshots__/test_survey.ambr
+++ b/posthog/api/test/__snapshots__/test_survey.ambr
@@ -1791,6 +1791,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
diff --git a/posthog/api/test/batch_exports/conftest.py b/posthog/api/test/batch_exports/conftest.py
index 94ef6055d20f9..229f548052a51 100644
--- a/posthog/api/test/batch_exports/conftest.py
+++ b/posthog/api/test/batch_exports/conftest.py
@@ -6,15 +6,17 @@
from contextlib import contextmanager
import pytest
+import temporalio.worker
from asgiref.sync import async_to_sync
-from django.conf import settings
from temporalio.client import Client as TemporalClient
from temporalio.service import RPCError
from temporalio.worker import UnsandboxedWorkflowRunner, Worker
-from posthog.constants import BATCH_EXPORTS_TASK_QUEUE
+
+from posthog import constants
from posthog.batch_exports.models import BatchExport
-from posthog.temporal.common.client import sync_connect
+from posthog.constants import BATCH_EXPORTS_TASK_QUEUE
from posthog.temporal.batch_exports import ACTIVITIES, WORKFLOWS
+from posthog.temporal.common.client import sync_connect
class ThreadedWorker(Worker):
@@ -91,10 +93,10 @@ async def describe_workflow(temporal: TemporalClient, workflow_id: str):
def start_test_worker(temporal: TemporalClient):
with ThreadedWorker(
client=temporal,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=WORKFLOWS,
activities=ACTIVITIES,
- workflow_runner=UnsandboxedWorkflowRunner(),
+ workflow_runner=temporalio.worker.UnsandboxedWorkflowRunner(),
graceful_shutdown_timeout=dt.timedelta(seconds=5),
).run_in_thread():
yield
diff --git a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
index c35ba96a70ce0..7da87ca1ef1ed 100644
--- a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
+++ b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
@@ -19,6 +19,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -209,6 +210,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -470,6 +472,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -629,6 +632,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -655,6 +659,7 @@
T6."has_seen_product_intro_for",
T6."strapi_id",
T6."is_active",
+ T6."role_at_organization",
T6."theme_mode",
T6."partial_notification_settings",
T6."anonymize_data",
@@ -927,6 +932,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -953,6 +959,7 @@
T6."has_seen_product_intro_for",
T6."strapi_id",
T6."is_active",
+ T6."role_at_organization",
T6."theme_mode",
T6."partial_notification_settings",
T6."anonymize_data",
@@ -1506,6 +1513,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -2197,6 +2205,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -2505,6 +2514,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -2883,6 +2893,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -3372,6 +3383,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -3911,6 +3923,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -3937,6 +3950,7 @@
T6."has_seen_product_intro_for",
T6."strapi_id",
T6."is_active",
+ T6."role_at_organization",
T6."theme_mode",
T6."partial_notification_settings",
T6."anonymize_data",
@@ -4565,6 +4579,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -4774,6 +4789,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -4824,6 +4840,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -4983,6 +5000,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -5009,6 +5027,7 @@
T6."has_seen_product_intro_for",
T6."strapi_id",
T6."is_active",
+ T6."role_at_organization",
T6."theme_mode",
T6."partial_notification_settings",
T6."anonymize_data",
@@ -5281,6 +5300,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -5307,6 +5327,7 @@
T6."has_seen_product_intro_for",
T6."strapi_id",
T6."is_active",
+ T6."role_at_organization",
T6."theme_mode",
T6."partial_notification_settings",
T6."anonymize_data",
@@ -6031,6 +6052,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -6057,6 +6079,7 @@
T6."has_seen_product_intro_for",
T6."strapi_id",
T6."is_active",
+ T6."role_at_organization",
T6."theme_mode",
T6."partial_notification_settings",
T6."anonymize_data",
@@ -6117,6 +6140,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -7545,6 +7569,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -8573,6 +8598,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -9287,6 +9313,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -9548,6 +9575,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -9707,6 +9735,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -9733,6 +9762,7 @@
T6."has_seen_product_intro_for",
T6."strapi_id",
T6."is_active",
+ T6."role_at_organization",
T6."theme_mode",
T6."partial_notification_settings",
T6."anonymize_data",
@@ -9895,6 +9925,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -9921,6 +9952,7 @@
T6."has_seen_product_intro_for",
T6."strapi_id",
T6."is_active",
+ T6."role_at_organization",
T6."theme_mode",
T6."partial_notification_settings",
T6."anonymize_data",
@@ -9979,6 +10011,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -10267,6 +10300,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -10293,6 +10327,7 @@
T6."has_seen_product_intro_for",
T6."strapi_id",
T6."is_active",
+ T6."role_at_organization",
T6."theme_mode",
T6."partial_notification_settings",
T6."anonymize_data",
@@ -10353,6 +10388,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -10857,6 +10893,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -10883,6 +10920,7 @@
T6."has_seen_product_intro_for",
T6."strapi_id",
T6."is_active",
+ T6."role_at_organization",
T6."theme_mode",
T6."partial_notification_settings",
T6."anonymize_data",
@@ -10943,6 +10981,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -11251,6 +11290,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
diff --git a/posthog/api/test/dashboards/test_dashboard_text_tiles.py b/posthog/api/test/dashboards/test_dashboard_text_tiles.py
index fa6bc85b65c9b..7d939cfe03f4c 100644
--- a/posthog/api/test/dashboards/test_dashboard_text_tiles.py
+++ b/posthog/api/test/dashboards/test_dashboard_text_tiles.py
@@ -29,6 +29,7 @@ def _serialised_user(user: Optional[User]) -> Optional[dict[str, Optional[Union[
"uuid": str(user.uuid),
"is_email_verified": None,
"hedgehog_config": None,
+ "role_at_organization": None,
}
def _expected_text(
diff --git a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
index 49cd5c39fed8b..c728a480610ed 100644
--- a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
+++ b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
@@ -19,6 +19,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -272,6 +273,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -298,6 +300,7 @@
T5."has_seen_product_intro_for",
T5."strapi_id",
T5."is_active",
+ T5."role_at_organization",
T5."theme_mode",
T5."partial_notification_settings",
T5."anonymize_data",
@@ -375,6 +378,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -407,6 +411,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -660,6 +665,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -807,6 +813,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
diff --git a/posthog/api/test/test_feature_flag.py b/posthog/api/test/test_feature_flag.py
index 4ae2d364d1f03..35bfa12fe3c34 100644
--- a/posthog/api/test/test_feature_flag.py
+++ b/posthog/api/test/test_feature_flag.py
@@ -792,6 +792,447 @@ def test_updating_feature_flag(self, mock_capture):
],
)
+ @patch("posthog.api.feature_flag.report_user_action")
+ def test_updating_feature_flag_key(self, mock_capture):
+ with freeze_time("2021-08-25T22:09:14.252Z") as frozen_datetime:
+ response = self.client.post(
+ f"/api/projects/{self.team.id}/feature_flags/",
+ {"name": "original name", "key": "a-feature-flag-that-is-updated"},
+ format="json",
+ )
+ self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+ flag_id = response.json()["id"]
+
+ frozen_datetime.tick(delta=datetime.timedelta(minutes=10))
+
+ # Assert that the insights were created properly.
+ feature_flag = FeatureFlag.objects.get(id=flag_id)
+ assert feature_flag.usage_dashboard is not None, "Usage dashboard was not created"
+ insights = feature_flag.usage_dashboard.insights
+ total_volume_insight = insights.get(name="Feature Flag Called Total Volume")
+ self.assertEqual(
+ total_volume_insight.description,
+ "Shows the number of total calls made on feature flag with key: a-feature-flag-that-is-updated",
+ )
+ self.assertEqual(
+ total_volume_insight.query["source"]["properties"]["values"][0]["values"][0]["value"],
+ "a-feature-flag-that-is-updated",
+ )
+ unique_users_insight = insights.get(name="Feature Flag calls made by unique users per variant")
+ self.assertEqual(
+ unique_users_insight.description,
+ "Shows the number of unique user calls made on feature flag per variant with key: a-feature-flag-that-is-updated",
+ )
+ self.assertEqual(
+ unique_users_insight.query["source"]["properties"]["values"][0]["values"][0]["value"],
+ "a-feature-flag-that-is-updated",
+ )
+
+ # Update the feature flag key
+ response = self.client.patch(
+ f"/api/projects/{self.team.id}/feature_flags/{flag_id}",
+ {
+ "key": "a-new-feature-flag-key",
+ "filters": {
+ "groups": [
+ {
+ "rollout_percentage": 65,
+ "properties": [
+ {
+ "key": "email",
+ "type": "person",
+ "value": "@posthog.com",
+ "operator": "icontains",
+ }
+ ],
+ }
+ ]
+ },
+ },
+ format="json",
+ )
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ self.assertEqual(response.json()["key"], "a-new-feature-flag-key")
+ self.assertEqual(response.json()["filters"]["groups"][0]["rollout_percentage"], 65)
+
+ # Assert analytics are sent
+ mock_capture.assert_called_with(
+ self.user,
+ "feature flag updated",
+ {
+ "groups_count": 1,
+ "has_variants": False,
+ "variants_count": 0,
+ "has_rollout_percentage": True,
+ "has_filters": True,
+ "filter_count": 1,
+ "created_at": datetime.datetime.fromisoformat("2021-08-25T22:09:14.252000+00:00"),
+ "aggregating_by_groups": False,
+ "payload_count": 0,
+ },
+ )
+
+ self.assert_feature_flag_activity(
+ flag_id,
+ [
+ {
+ "user": {
+ "first_name": self.user.first_name,
+ "email": self.user.email,
+ },
+ "activity": "updated",
+ "created_at": "2021-08-25T22:19:14.252000Z",
+ "scope": "FeatureFlag",
+ "item_id": str(flag_id),
+ "detail": {
+ "changes": [
+ {
+ "type": "FeatureFlag",
+ "action": "changed",
+ "field": "key",
+ "before": "a-feature-flag-that-is-updated",
+ "after": "a-new-feature-flag-key",
+ },
+ {
+ "type": "FeatureFlag",
+ "action": "created",
+ "field": "filters",
+ "before": None,
+ "after": {
+ "groups": [
+ {
+ "properties": [
+ {
+ "key": "email",
+ "type": "person",
+ "value": "@posthog.com",
+ "operator": "icontains",
+ }
+ ],
+ "rollout_percentage": 65,
+ }
+ ]
+ },
+ },
+ ],
+ "trigger": None,
+ "type": None,
+ "name": "a-new-feature-flag-key",
+ "short_id": None,
+ },
+ },
+ {
+ "user": {
+ "first_name": self.user.first_name,
+ "email": self.user.email,
+ },
+ "activity": "created",
+ "created_at": "2021-08-25T22:09:14.252000Z",
+ "scope": "FeatureFlag",
+ "item_id": str(flag_id),
+ "detail": {
+ "changes": None,
+ "trigger": None,
+ "type": None,
+ "name": "a-feature-flag-that-is-updated",
+ "short_id": None,
+ },
+ },
+ ],
+ )
+
+ feature_flag = FeatureFlag.objects.get(id=flag_id)
+ assert feature_flag.usage_dashboard is not None, "Usage dashboard was not created"
+ insights = feature_flag.usage_dashboard.insights
+ total_volume_insight = insights.get(name="Feature Flag Called Total Volume")
+ self.assertEqual(
+ total_volume_insight.description,
+ "Shows the number of total calls made on feature flag with key: a-new-feature-flag-key",
+ )
+ self.assertEqual(
+ total_volume_insight.query["source"]["properties"]["values"][0]["values"][0]["value"],
+ "a-new-feature-flag-key",
+ )
+ unique_users_insight = insights.get(name="Feature Flag calls made by unique users per variant")
+ self.assertEqual(
+ unique_users_insight.description,
+ "Shows the number of unique user calls made on feature flag per variant with key: a-new-feature-flag-key",
+ )
+ self.assertEqual(
+ unique_users_insight.query["source"]["properties"]["values"][0]["values"][0]["value"],
+ "a-new-feature-flag-key",
+ )
+
+ @patch("posthog.api.feature_flag.report_user_action")
+ def test_updating_feature_flag_key_does_not_update_insight_with_changed_description(self, mock_capture):
+ with freeze_time("2021-08-25T22:09:14.252Z") as frozen_datetime:
+ response = self.client.post(
+ f"/api/projects/{self.team.id}/feature_flags/",
+ {"name": "original name", "key": "a-feature-flag-that-is-updated"},
+ format="json",
+ )
+ self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+ flag_id = response.json()["id"]
+
+ frozen_datetime.tick(delta=datetime.timedelta(minutes=10))
+
+ # Assert that the insights were created properly.
+ feature_flag = FeatureFlag.objects.get(id=flag_id)
+ assert feature_flag.usage_dashboard is not None, "Usage dashboard was not created"
+ insights = feature_flag.usage_dashboard.insights
+ total_volume_insight = insights.get(name="Feature Flag Called Total Volume")
+ self.assertEqual(
+ total_volume_insight.description,
+ "Shows the number of total calls made on feature flag with key: a-feature-flag-that-is-updated",
+ )
+ self.assertEqual(
+ total_volume_insight.query["source"]["properties"]["values"][0]["values"][0]["value"],
+ "a-feature-flag-that-is-updated",
+ )
+ unique_users_insight = insights.get(name="Feature Flag calls made by unique users per variant")
+ self.assertEqual(
+ unique_users_insight.description,
+ "Shows the number of unique user calls made on feature flag per variant with key: a-feature-flag-that-is-updated",
+ )
+ self.assertEqual(
+ unique_users_insight.query["source"]["properties"]["values"][0]["values"][0]["value"],
+ "a-feature-flag-that-is-updated",
+ )
+ total_volume_insight.name = "This is a changed description"
+ total_volume_insight.save()
+
+ # Update the feature flag key
+ response = self.client.patch(
+ f"/api/projects/{self.team.id}/feature_flags/{flag_id}",
+ {
+ "key": "a-new-feature-flag-key",
+ "filters": {
+ "groups": [
+ {
+ "rollout_percentage": 65,
+ "properties": [
+ {
+ "key": "email",
+ "type": "person",
+ "value": "@posthog.com",
+ "operator": "icontains",
+ }
+ ],
+ }
+ ]
+ },
+ },
+ format="json",
+ )
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ # Total volume insight should not be updated because we changed its description
+ # unique users insight should still be updated
+ feature_flag = FeatureFlag.objects.get(id=flag_id)
+ assert feature_flag.usage_dashboard is not None, "Usage dashboard was not created"
+ insights = feature_flag.usage_dashboard.insights
+ self.assertIsNone(insights.filter(name="Feature Flag Called Total Volume").first())
+ total_volume_insight = insights.get(name="This is a changed description")
+ self.assertEqual(
+ total_volume_insight.description,
+ "Shows the number of total calls made on feature flag with key: a-feature-flag-that-is-updated",
+ )
+ self.assertEqual(
+ total_volume_insight.query["source"]["properties"]["values"][0]["values"][0]["value"],
+ "a-feature-flag-that-is-updated",
+ )
+ unique_users_insight = insights.get(name="Feature Flag calls made by unique users per variant")
+ self.assertEqual(
+ unique_users_insight.description,
+ "Shows the number of unique user calls made on feature flag per variant with key: a-new-feature-flag-key",
+ )
+ self.assertEqual(
+ unique_users_insight.query["source"]["properties"]["values"][0]["values"][0]["value"],
+ "a-new-feature-flag-key",
+ )
+
+ @patch("posthog.api.feature_flag.report_user_action")
+ def test_updating_feature_flag_key_does_not_update_insight_with_changed_filter(self, mock_capture):
+ with freeze_time("2021-08-25T22:09:14.252Z") as frozen_datetime:
+ response = self.client.post(
+ f"/api/projects/{self.team.id}/feature_flags/",
+ {"name": "original name", "key": "a-feature-flag-that-is-updated"},
+ format="json",
+ )
+ self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+ flag_id = response.json()["id"]
+
+ frozen_datetime.tick(delta=datetime.timedelta(minutes=10))
+
+ # Assert that the insights were created properly.
+ feature_flag = FeatureFlag.objects.get(id=flag_id)
+ assert feature_flag.usage_dashboard is not None, "Usage dashboard was not created"
+ insights = feature_flag.usage_dashboard.insights
+ total_volume_insight = insights.get(name="Feature Flag Called Total Volume")
+ self.assertEqual(
+ total_volume_insight.description,
+ "Shows the number of total calls made on feature flag with key: a-feature-flag-that-is-updated",
+ )
+ self.assertEqual(
+ total_volume_insight.query["source"]["properties"]["values"][0]["values"][0]["value"],
+ "a-feature-flag-that-is-updated",
+ )
+ unique_users_insight = insights.get(name="Feature Flag calls made by unique users per variant")
+ self.assertEqual(
+ unique_users_insight.description,
+ "Shows the number of unique user calls made on feature flag per variant with key: a-feature-flag-that-is-updated",
+ )
+ self.assertEqual(
+ unique_users_insight.query["source"]["properties"]["values"][0]["values"][0]["value"],
+ "a-feature-flag-that-is-updated",
+ )
+ total_volume_insight.query["source"]["properties"]["values"][0]["values"][0]["value"] = (
+ "something_unexpected"
+ )
+ total_volume_insight.save()
+
+ # Update the feature flag key
+ response = self.client.patch(
+ f"/api/projects/{self.team.id}/feature_flags/{flag_id}",
+ {
+ "key": "a-new-feature-flag-key",
+ "filters": {
+ "groups": [
+ {
+ "rollout_percentage": 65,
+ "properties": [
+ {
+ "key": "email",
+ "type": "person",
+ "value": "@posthog.com",
+ "operator": "icontains",
+ }
+ ],
+ }
+ ]
+ },
+ },
+ format="json",
+ )
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ # Total volume insight should not be updated because we changed its description
+ # unique users insight should still be updated
+ feature_flag = FeatureFlag.objects.get(id=flag_id)
+ assert feature_flag.usage_dashboard is not None, "Usage dashboard was not created"
+ insights = feature_flag.usage_dashboard.insights
+ total_volume_insight = insights.get(name="Feature Flag Called Total Volume")
+ self.assertEqual(
+ total_volume_insight.description,
+ "Shows the number of total calls made on feature flag with key: a-feature-flag-that-is-updated",
+ )
+ self.assertEqual(
+ total_volume_insight.query["source"]["properties"]["values"][0]["values"][0]["value"],
+ "something_unexpected",
+ )
+ unique_users_insight = insights.get(name="Feature Flag calls made by unique users per variant")
+ self.assertEqual(
+ unique_users_insight.description,
+ "Shows the number of unique user calls made on feature flag per variant with key: a-new-feature-flag-key",
+ )
+ self.assertEqual(
+ unique_users_insight.query["source"]["properties"]["values"][0]["values"][0]["value"],
+ "a-new-feature-flag-key",
+ )
+
+ @patch("posthog.api.feature_flag.report_user_action")
+ def test_updating_feature_flag_key_does_not_update_insight_with_removed_filter(self, mock_capture):
+ with freeze_time("2021-08-25T22:09:14.252Z") as frozen_datetime:
+ response = self.client.post(
+ f"/api/projects/{self.team.id}/feature_flags/",
+ {"name": "original name", "key": "a-feature-flag-that-is-updated"},
+ format="json",
+ )
+ self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+ flag_id = response.json()["id"]
+
+ frozen_datetime.tick(delta=datetime.timedelta(minutes=10))
+
+ # Assert that the insights were created properly.
+ feature_flag = FeatureFlag.objects.get(id=flag_id)
+ assert feature_flag.usage_dashboard is not None, "Usage dashboard was not created"
+ insights = feature_flag.usage_dashboard.insights
+ total_volume_insight = insights.get(name="Feature Flag Called Total Volume")
+ self.assertEqual(
+ total_volume_insight.description,
+ "Shows the number of total calls made on feature flag with key: a-feature-flag-that-is-updated",
+ )
+ self.assertEqual(
+ total_volume_insight.query["source"]["properties"]["values"][0]["values"][0]["value"],
+ "a-feature-flag-that-is-updated",
+ )
+ unique_users_insight = insights.get(name="Feature Flag calls made by unique users per variant")
+ self.assertEqual(
+ unique_users_insight.description,
+ "Shows the number of unique user calls made on feature flag per variant with key: a-feature-flag-that-is-updated",
+ )
+ self.assertEqual(
+ unique_users_insight.query["source"]["properties"]["values"][0]["values"][0]["value"],
+ "a-feature-flag-that-is-updated",
+ )
+ # clear the values from total_volume_insight.query["source"]["properties"]["values"]
+ total_volume_insight.query["source"]["properties"]["values"] = []
+ total_volume_insight.save()
+
+ # Update the feature flag key
+ response = self.client.patch(
+ f"/api/projects/{self.team.id}/feature_flags/{flag_id}",
+ {
+ "key": "a-new-feature-flag-key",
+ "filters": {
+ "groups": [
+ {
+ "rollout_percentage": 65,
+ "properties": [
+ {
+ "key": "email",
+ "type": "person",
+ "value": "@posthog.com",
+ "operator": "icontains",
+ }
+ ],
+ }
+ ]
+ },
+ },
+ format="json",
+ )
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ # Total volume insight should not be updated because we changed its description
+ # unique users insight should still be updated
+ feature_flag = FeatureFlag.objects.get(id=flag_id)
+ assert feature_flag.usage_dashboard is not None, "Usage dashboard was not created"
+ insights = feature_flag.usage_dashboard.insights
+ total_volume_insight = insights.get(name="Feature Flag Called Total Volume")
+ self.assertEqual(
+ total_volume_insight.description,
+ "Shows the number of total calls made on feature flag with key: a-feature-flag-that-is-updated",
+ )
+ self.assertEqual(
+ total_volume_insight.query["source"]["properties"]["values"],
+ [],
+ )
+ unique_users_insight = insights.get(name="Feature Flag calls made by unique users per variant")
+ self.assertEqual(
+ unique_users_insight.description,
+ "Shows the number of unique user calls made on feature flag per variant with key: a-new-feature-flag-key",
+ )
+ self.assertEqual(
+ unique_users_insight.query["source"]["properties"]["values"][0]["values"][0]["value"],
+ "a-new-feature-flag-key",
+ )
+
def test_hard_deleting_feature_flag_is_forbidden(self):
new_user = User.objects.create_and_join(self.organization, "new_annotations@posthog.com", None)
diff --git a/posthog/api/test/test_insight.py b/posthog/api/test/test_insight.py
index 57024518b2a68..bc1530f0f8683 100644
--- a/posthog/api/test/test_insight.py
+++ b/posthog/api/test/test_insight.py
@@ -143,6 +143,7 @@ def test_created_updated_and_last_modified(self, mock_capture: mock.Mock) -> Non
"email": self.user.email,
"is_email_verified": None,
"hedgehog_config": None,
+ "role_at_organization": None,
}
alt_user_basic_serialized = {
"id": alt_user.id,
@@ -153,6 +154,7 @@ def test_created_updated_and_last_modified(self, mock_capture: mock.Mock) -> Non
"email": alt_user.email,
"is_email_verified": None,
"hedgehog_config": None,
+ "role_at_organization": None,
}
# Newly created insight should have created_at being the current time, and same last_modified_at
diff --git a/posthog/api/test/test_organization_invites.py b/posthog/api/test/test_organization_invites.py
index 017fc5d05720e..4d863974bbab4 100644
--- a/posthog/api/test/test_organization_invites.py
+++ b/posthog/api/test/test_organization_invites.py
@@ -94,6 +94,7 @@ def test_add_organization_invite_with_email(self, mock_capture):
"last_name": self.user.last_name,
"is_email_verified": self.user.is_email_verified,
"hedgehog_config": None,
+ "role_at_organization": None,
},
"is_expired": False,
"level": 1,
diff --git a/posthog/api/test/test_organization_members.py b/posthog/api/test/test_organization_members.py
index d50d4ce4523f4..9c96a3375b13f 100644
--- a/posthog/api/test/test_organization_members.py
+++ b/posthog/api/test/test_organization_members.py
@@ -118,6 +118,7 @@ def test_change_organization_member_level(self, mock_update_billing_organization
"email": user.email,
"is_email_verified": None,
"hedgehog_config": None,
+ "role_at_organization": None,
},
"level": OrganizationMembership.Level.ADMIN.value,
},
diff --git a/posthog/api/test/test_session.py b/posthog/api/test/test_session.py
index 7199f5d8fd557..aba46af386053 100644
--- a/posthog/api/test/test_session.py
+++ b/posthog/api/test/test_session.py
@@ -37,8 +37,10 @@ def test_expected_session_properties(self):
"$end_timestamp",
"$entry_current_url",
"$entry_pathname",
+ "$entry_hostname",
"$end_current_url",
"$end_pathname",
+ "$end_hostname",
"$entry_gad_source",
"$entry_gclid",
"$entry_referring_domain",
diff --git a/posthog/api/test/test_signup.py b/posthog/api/test/test_signup.py
index a84a4cd9555b0..9deeaa9eb387f 100644
--- a/posthog/api/test/test_signup.py
+++ b/posthog/api/test/test_signup.py
@@ -73,6 +73,7 @@ def test_api_sign_up(self, mock_capture):
"redirect_url": "/",
"is_email_verified": False,
"hedgehog_config": None,
+ "role_at_organization": "product",
},
)
@@ -80,6 +81,7 @@ def test_api_sign_up(self, mock_capture):
self.assertEqual(user.first_name, "John")
self.assertEqual(user.last_name, "Doe")
self.assertEqual(user.email, "hedgehog@posthog.com")
+ self.assertEqual(user.role_at_organization, "product")
self.assertTrue(user.is_staff) # True because this is the first user in the instance
self.assertFalse(user.is_email_verified)
@@ -143,6 +145,7 @@ def test_api_sign_up_requires_verification(self, mock_email_verifier, mock_is_em
"redirect_url": f"/verify_email/{user.uuid}",
"is_email_verified": False,
"hedgehog_config": None,
+ "role_at_organization": "product",
},
)
@@ -193,6 +196,7 @@ def test_api_sign_up_doesnt_require_verification_if_disabled(
"redirect_url": "/",
"is_email_verified": False,
"hedgehog_config": None,
+ "role_at_organization": "product",
},
)
mock_is_email_available.assert_called()
@@ -297,6 +301,7 @@ def test_signup_minimum_attrs(self, mock_capture):
"first_name": "Jane",
"email": "hedgehog2@posthog.com",
"password": VALID_TEST_PASSWORD,
+ "role_at_organization": "product",
},
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
@@ -315,6 +320,7 @@ def test_signup_minimum_attrs(self, mock_capture):
"redirect_url": "/",
"is_email_verified": False,
"hedgehog_config": None,
+ "role_at_organization": "product",
},
)
@@ -322,6 +328,7 @@ def test_signup_minimum_attrs(self, mock_capture):
self.assertEqual(user.first_name, "Jane")
self.assertEqual(user.email, "hedgehog2@posthog.com")
self.assertEqual(organization.name, f"{user.first_name}'s Organization")
+ self.assertEqual(user.role_at_organization, "product")
self.assertTrue(user.is_staff) # True because this is the first user in the instance
# Assert that the sign up event & identify calls were sent to PostHog analytics
@@ -475,6 +482,7 @@ def test_default_dashboard_is_created_on_signup(self):
"first_name": "Jane",
"email": "hedgehog75@posthog.com",
"password": VALID_TEST_PASSWORD,
+ "role_at_organization": "product",
},
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
@@ -493,6 +501,7 @@ def test_default_dashboard_is_created_on_signup(self):
"redirect_url": "/",
"is_email_verified": False,
"hedgehog_config": None,
+ "role_at_organization": "product",
},
)
@@ -1079,6 +1088,7 @@ def test_api_invite_sign_up(self, mock_capture):
"redirect_url": "/",
"is_email_verified": False,
"hedgehog_config": None,
+ "role_at_organization": "Engineering",
},
)
@@ -1096,6 +1106,7 @@ def test_api_invite_sign_up(self, mock_capture):
# Assert that the user was properly created
self.assertEqual(user.first_name, "Alice")
self.assertEqual(user.email, "test+99@posthog.com")
+ self.assertEqual(user.role_at_organization, "Engineering")
# Assert that the sign up event & identify calls were sent to PostHog analytics
mock_capture.assert_called_once()
@@ -1314,7 +1325,7 @@ def test_api_invite_sign_up_member_joined_email_is_not_sent_if_disabled(self):
def test_existing_user_can_sign_up_to_a_new_organization(
self, mock_update_billing_organization_users, mock_capture
):
- user = self._create_user("test+159@posthog.com", VALID_TEST_PASSWORD)
+ user = self._create_user("test+159@posthog.com", VALID_TEST_PASSWORD, role_at_organization="product")
new_org = Organization.objects.create(name="TestCo")
new_team = Team.objects.create(organization=new_org)
invite: OrganizationInvite = OrganizationInvite.objects.create(
@@ -1351,6 +1362,7 @@ def test_existing_user_can_sign_up_to_a_new_organization(
"redirect_url": "/",
"is_email_verified": None,
"hedgehog_config": None,
+ "role_at_organization": "product",
},
)
@@ -1370,6 +1382,7 @@ def test_existing_user_can_sign_up_to_a_new_organization(
self.assertEqual(user.first_name, "")
self.assertEqual(user.email, "test+159@posthog.com")
self.assertFalse(user.is_staff) # Not first user in the instance
+ self.assertEqual(user.role_at_organization, "product")
# Assert that the sign up event & identify calls were sent to PostHog analytics
mock_capture.assert_called_once_with(
@@ -1428,6 +1441,7 @@ def test_cannot_use_claim_invite_endpoint_to_update_user(self, mock_capture):
"redirect_url": "/",
"is_email_verified": None,
"hedgehog_config": None,
+ "role_at_organization": None,
}, # note the unchanged attributes
)
diff --git a/posthog/api/test/test_user.py b/posthog/api/test/test_user.py
index a289a83239eaf..3ce1bc50d7bbf 100644
--- a/posthog/api/test/test_user.py
+++ b/posthog/api/test/test_user.py
@@ -10,6 +10,8 @@
from django.core.cache import cache
from django.utils import timezone
from django.utils.text import slugify
+from django_otp.plugins.otp_static.models import StaticDevice
+from django_otp.plugins.otp_totp.models import TOTPDevice
from freezegun.api import freeze_time
from rest_framework import status
@@ -18,8 +20,6 @@
from posthog.models.instance_setting import set_instance_setting
from posthog.models.organization import Organization, OrganizationMembership
from posthog.test.base import APIBaseTest
-from django_otp.plugins.otp_static.models import StaticDevice
-from django_otp.plugins.otp_totp.models import TOTPDevice
def create_user(email: str, password: str, organization: Organization):
@@ -80,6 +80,7 @@ def test_retrieve_current_user(self):
self.assertEqual(response_data["team"]["api_token"], "token123")
self.assertNotIn("test_account_filters", response_data["team"]) # Ensure we're not returning the full `Team`
self.assertNotIn("event_names", response_data["team"])
+ self.assertEqual(response_data["role_at_organization"], self.user.role_at_organization)
self.assertEqual(response_data["organization"]["name"], self.organization.name)
self.assertEqual(response_data["organization"]["membership_level"], 1)
@@ -191,6 +192,7 @@ def test_update_current_user(self, mock_capture, mock_identify_task):
"id": 1, # should be ignored
"organization": str(another_org.id), # should be ignored
"team": str(another_team.id), # should be ignored
+ "role_at_organization": "engineering",
},
)
@@ -204,6 +206,7 @@ def test_update_current_user(self, mock_capture, mock_identify_task):
self.assertEqual(response_data["organization"]["id"], str(self.organization.id))
self.assertEqual(response_data["team"]["id"], self.team.id)
self.assertEqual(response_data["has_seen_product_intro_for"], {"feature_flags": True})
+ self.assertEqual(response_data["role_at_organization"], "engineering")
user.refresh_from_db()
self.assertNotEqual(user.pk, 1)
@@ -212,6 +215,7 @@ def test_update_current_user(self, mock_capture, mock_identify_task):
self.assertEqual(user.anonymize_data, True)
self.assertDictContainsSubset({"plugin_disabled": False}, user.notification_settings)
self.assertEqual(user.has_seen_product_intro_for, {"feature_flags": True})
+ self.assertEqual(user.role_at_organization, "engineering")
mock_capture.assert_called_once_with(
user.distinct_id,
@@ -223,6 +227,7 @@ def test_update_current_user(self, mock_capture, mock_identify_task):
"first_name",
"has_seen_product_intro_for",
"partial_notification_settings",
+ "role_at_organization",
],
"$set": mock.ANY,
},
@@ -943,6 +948,102 @@ def test_user_cannot_update_protected_fields(self):
response.json()[field] == initial_user[field]
), f"Updating field '{field}' to '{value}' worked when it shouldn't! Was {initial_user[field]} and is now {response.json()[field]}"
+ def test_can_update_notification_settings(self):
+ response = self.client.patch(
+ "/api/users/@me/",
+ {
+ "notification_settings": {
+ "plugin_disabled": False,
+ "project_weekly_digest_disabled": {123: True},
+ }
+ },
+ )
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ response_data = response.json()
+ self.assertEqual(
+ response_data["notification_settings"],
+ {
+ "plugin_disabled": False,
+ "project_weekly_digest_disabled": {"123": True}, # Note: JSON converts int keys to strings
+ "all_weekly_digest_disabled": False,
+ },
+ )
+
+ self.user.refresh_from_db()
+ self.assertEqual(
+ self.user.partial_notification_settings,
+ {
+ "plugin_disabled": False,
+ "project_weekly_digest_disabled": {"123": True},
+ "all_weekly_digest_disabled": False,
+ },
+ )
+
+ def test_notification_settings_project_settings_are_merged_not_replaced(self):
+ # First update
+ response = self.client.patch(
+ "/api/users/@me/", {"notification_settings": {"project_weekly_digest_disabled": {123: True}}}
+ )
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ # Second update with different project
+ response = self.client.patch(
+ "/api/users/@me/", {"notification_settings": {"project_weekly_digest_disabled": {456: True}}}
+ )
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ response_data = response.json()
+ self.assertEqual(
+ response_data["notification_settings"]["project_weekly_digest_disabled"], {"123": True, "456": True}
+ )
+
+ def test_invalid_notification_settings_returns_error(self):
+ response = self.client.patch("/api/users/@me/", {"notification_settings": {"invalid_key": True}})
+ self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+ self.assertEqual(
+ response.json(),
+ {
+ "type": "validation_error",
+ "code": "invalid_input",
+ "detail": "Key invalid_key is not valid as a key for notification settings",
+ "attr": "notification_settings",
+ },
+ )
+
+ def test_notification_settings_wrong_type_returns_error(self):
+ response = self.client.patch(
+ "/api/users/@me/",
+ {
+ "notification_settings": {
+ "project_weekly_digest_disabled": {"123": "not a boolean"} # This should be True or False
+ }
+ },
+ )
+ self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+ self.assertEqual(
+ response.json(),
+ {
+ "type": "validation_error",
+ "code": "invalid_input",
+ "detail": "Project notification setting values must be boolean, got instead",
+ "attr": "notification_settings",
+ },
+ )
+
+ def test_can_disable_all_notifications(self):
+ response = self.client.patch("/api/users/@me/", {"notification_settings": {"all_weekly_digest_disabled": True}})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ response_data = response.json()
+ self.assertEqual(
+ response_data["notification_settings"],
+ {
+ "plugin_disabled": True, # Default value
+ "project_weekly_digest_disabled": {}, # Default value
+ "all_weekly_digest_disabled": True,
+ },
+ )
+
class TestUserSlackWebhook(APIBaseTest):
ENDPOINT: str = "/api/user/test_slack_webhook/"
diff --git a/posthog/api/user.py b/posthog/api/user.py
index 0610abf047df7..0a12bf8776bd5 100644
--- a/posthog/api/user.py
+++ b/posthog/api/user.py
@@ -21,26 +21,26 @@
from django.views.decorators.http import require_http_methods
from django_filters.rest_framework import DjangoFilterBackend
from django_otp import login as otp_login
+from django_otp.plugins.otp_static.models import StaticDevice, StaticToken
+from django_otp.plugins.otp_totp.models import TOTPDevice
from django_otp.util import random_hex
from loginas.utils import is_impersonated_session
from prometheus_client import Counter
from rest_framework import exceptions, mixins, serializers, viewsets
-from posthog.api.utils import action
from rest_framework.exceptions import NotFound
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response
from two_factor.forms import TOTPDeviceForm
from two_factor.utils import default_device
-from django_otp.plugins.otp_static.models import StaticDevice, StaticToken
-from django_otp.plugins.otp_totp.models import TOTPDevice
from posthog.api.email_verification import EmailVerifier
from posthog.api.organization import OrganizationSerializer
from posthog.api.shared import OrganizationBasicSerializer, TeamBasicSerializer
from posthog.api.utils import (
+ ClassicBehaviorBooleanFieldSerializer,
PublicIPOnlyHttpAdapter,
+ action,
raise_if_user_provided_url_unsafe,
- ClassicBehaviorBooleanFieldSerializer,
unparsed_hostname_in_allowed_url_list,
)
from posthog.auth import (
@@ -59,7 +59,7 @@
from posthog.middleware import get_impersonated_session_expires_at
from posthog.models import Dashboard, Team, User, UserScenePersonalisation
from posthog.models.organization import Organization
-from posthog.models.user import NOTIFICATION_DEFAULTS, Notifications
+from posthog.models.user import NOTIFICATION_DEFAULTS, Notifications, ROLE_CHOICES
from posthog.permissions import APIScopePermission
from posthog.rate_limit import UserAuthenticationThrottle, UserEmailVerificationThrottle
from posthog.tasks import user_identify
@@ -94,6 +94,7 @@ class UserSerializer(serializers.ModelSerializer):
notification_settings = serializers.DictField(required=False)
scene_personalisation = ScenePersonalisationBasicSerializer(many=True, read_only=True)
anonymize_data = ClassicBehaviorBooleanFieldSerializer()
+ role_at_organization = serializers.ChoiceField(choices=ROLE_CHOICES, required=False)
class Meta:
model = User
@@ -128,6 +129,7 @@ class Meta:
"scene_personalisation",
"theme_mode",
"hedgehog_config",
+ "role_at_organization",
]
read_only_fields = [
@@ -210,15 +212,42 @@ def validate_set_current_team(self, value: str) -> Team:
raise serializers.ValidationError(f"Object with id={value} does not exist.", code="does_not_exist")
def validate_notification_settings(self, notification_settings: Notifications) -> Notifications:
+ instance = cast(User, self.instance)
+ current_settings = {**NOTIFICATION_DEFAULTS, **(instance.partial_notification_settings or {})}
+
for key, value in notification_settings.items():
if key not in Notifications.__annotations__:
- raise serializers.ValidationError(f"Key {key} is not valid as a key for notification settings")
-
- if not isinstance(value, Notifications.__annotations__[key]):
raise serializers.ValidationError(
- f"{value} is not a valid type for notification settings, should be {Notifications.__annotations__[key]}"
+ f"Key {key} is not valid as a key for notification settings", code="invalid_input"
)
- return {**NOTIFICATION_DEFAULTS, **notification_settings}
+
+ expected_type = Notifications.__annotations__[key]
+
+ if key == "project_weekly_digest_disabled":
+ if not isinstance(value, dict):
+ raise serializers.ValidationError(
+ f"project_weekly_digest_disabled must be a dictionary mapping project IDs to boolean values",
+ code="invalid_input",
+ )
+ # Validate each project setting is a boolean
+ for _, disabled in value.items():
+ if not isinstance(disabled, bool):
+ raise serializers.ValidationError(
+ f"Project notification setting values must be boolean, got {type(disabled)} instead",
+ code="invalid_input",
+ )
+ # Merge with existing settings
+ current_settings[key] = {**current_settings.get("project_weekly_digest_disabled", {}), **value}
+ else:
+ # For non-dict settings, validate type directly
+ if not isinstance(value, expected_type):
+ raise serializers.ValidationError(
+ f"{value} is not a valid type for notification settings, should be {expected_type}",
+ code="invalid_input",
+ )
+ current_settings[key] = value
+
+ return cast(Notifications, current_settings)
def validate_password_change(
self, instance: User, current_password: Optional[str], password: Optional[str]
@@ -250,6 +279,11 @@ def validate_is_staff(self, value: bool) -> bool:
raise exceptions.PermissionDenied("You are not a staff user, contact your instance admin.")
return value
+ def validate_role_at_organization(self, value):
+ if value and value not in dict(ROLE_CHOICES):
+ raise serializers.ValidationError("Invalid role selected")
+ return value
+
def update(self, instance: "User", validated_data: Any) -> Any:
# Update current_organization and current_team
current_organization = validated_data.pop("set_current_organization", None)
diff --git a/posthog/batch_exports/service.py b/posthog/batch_exports/service.py
index c7e47003a4e5b..44a5ffa3aa5c0 100644
--- a/posthog/batch_exports/service.py
+++ b/posthog/batch_exports/service.py
@@ -79,6 +79,7 @@ class S3BatchExportInputs:
region: The AWS region where the bucket is located.
prefix: A prefix for the file name to be created in S3.
For example, for one hour batches, this should be 3600.
+ max_file_size_mb: The maximum file size in MB for each file to be uploaded.
data_interval_end: For manual runs, the end date of the batch. This should be set to `None` for regularly
scheduled runs and for backfills.
"""
@@ -99,6 +100,7 @@ class S3BatchExportInputs:
kms_key_id: str | None = None
endpoint_url: str | None = None
file_format: str = "JSONLines"
+ max_file_size_mb: int | None = None
is_backfill: bool = False
is_earliest_backfill: bool = False
batch_export_model: BatchExportModel | None = None
@@ -492,10 +494,7 @@ async def start_backfill_batch_export_workflow(
"backfill-batch-export",
inputs,
id=workflow_id,
- # TODO: Backfills could also run in async queue.
- # But tests expect them not to, so we keep them in sync
- # queue after everything is migrated.
- task_queue=SYNC_BATCH_EXPORTS_TASK_QUEUE,
+ task_queue=BATCH_EXPORTS_TASK_QUEUE,
)
return workflow_id
@@ -646,11 +645,7 @@ def sync_batch_export(batch_export: BatchExport, created: bool):
destination_config_fields = {field.name for field in fields(workflow_inputs)}
destination_config = {k: v for k, v in batch_export.destination.config.items() if k in destination_config_fields}
- task_queue = (
- BATCH_EXPORTS_TASK_QUEUE
- if batch_export.destination.type in ("BigQuery", "Redshift")
- else SYNC_BATCH_EXPORTS_TASK_QUEUE
- )
+ task_queue = SYNC_BATCH_EXPORTS_TASK_QUEUE if batch_export.destination.type == "HTTP" else BATCH_EXPORTS_TASK_QUEUE
temporal = sync_connect()
schedule = Schedule(
diff --git a/posthog/caching/test/test_warming.py b/posthog/caching/test/test_warming.py
index 7c877f73f1666..097e513a6a3aa 100644
--- a/posthog/caching/test/test_warming.py
+++ b/posthog/caching/test/test_warming.py
@@ -1,4 +1,4 @@
-from posthog.caching.warming import priority_insights, schedule_warming_for_teams_task
+from posthog.caching.warming import insights_to_keep_fresh, schedule_warming_for_teams_task
from posthog.models import Insight, DashboardTile, InsightViewed, Dashboard
from datetime import datetime, timedelta, UTC
@@ -46,69 +46,69 @@ def setUp(self) -> None:
)
@patch("posthog.hogql_queries.query_cache.QueryCacheManager.get_stale_insights")
- def test_priority_insights_no_stale_insights(self, mock_get_stale_insights):
+ def test_insights_to_keep_fresh_no_stale_insights(self, mock_get_stale_insights):
mock_get_stale_insights.return_value = []
- insights = list(priority_insights(self.team))
+ insights = list(insights_to_keep_fresh(self.team))
self.assertEqual(insights, [])
@patch("posthog.hogql_queries.query_cache.QueryCacheManager.get_stale_insights")
- def test_priority_insights_no_stale_dashboard_insights(self, mock_get_stale_insights):
+ def test_insights_to_keep_fresh_no_stale_dashboard_insights(self, mock_get_stale_insights):
mock_get_stale_insights.return_value = [
"2345:",
]
- insights = list(priority_insights(self.team))
+ insights = list(insights_to_keep_fresh(self.team))
exptected_results = [
(2345, None),
]
self.assertEqual(insights, exptected_results)
@patch("posthog.hogql_queries.query_cache.QueryCacheManager.get_stale_insights")
- def test_priority_insights_only_insights_with_dashboards(self, mock_get_stale_insights):
+ def test_insights_to_keep_fresh_only_insights_with_dashboards(self, mock_get_stale_insights):
mock_get_stale_insights.return_value = [
"1234:5678",
"3456:7890",
]
- insights = list(priority_insights(self.team))
+ insights = list(insights_to_keep_fresh(self.team))
expected_results = [
(3456, 7890),
]
self.assertEqual(insights, expected_results)
@patch("posthog.hogql_queries.query_cache.QueryCacheManager.get_stale_insights")
- def test_priority_insights_mixed_valid_and_invalid_combos(self, mock_get_stale_insights):
+ def test_insights_to_keep_fresh_mixed_valid_and_invalid_combos(self, mock_get_stale_insights):
mock_get_stale_insights.return_value = [
"1234:5678",
"9999:",
"3456:7890",
"8888:7777",
]
- insights = list(priority_insights(self.team))
+ insights = list(insights_to_keep_fresh(self.team))
expected_results = [
(3456, 7890),
]
self.assertEqual(insights, expected_results)
@patch("posthog.hogql_queries.query_cache.QueryCacheManager.get_stale_insights")
- def test_priority_insights_insights_not_viewed_recently(self, mock_get_stale_insights):
+ def test_insights_to_keep_fresh_insights_not_viewed_recently(self, mock_get_stale_insights):
mock_get_stale_insights.return_value = ["4567:"]
- insights = list(priority_insights(self.team))
+ insights = list(insights_to_keep_fresh(self.team))
self.assertEqual(insights, [])
@patch("posthog.hogql_queries.query_cache.QueryCacheManager.get_stale_insights")
- def test_priority_insights_dashboards_not_accessed_recently(self, mock_get_stale_insights):
+ def test_insights_to_keep_fresh_dashboards_not_accessed_recently(self, mock_get_stale_insights):
mock_get_stale_insights.return_value = ["5678:8901"]
- insights = list(priority_insights(self.team))
+ insights = list(insights_to_keep_fresh(self.team))
self.assertEqual(insights, [])
@patch("posthog.hogql_queries.query_cache.QueryCacheManager.get_stale_insights")
- def test_priority_insights_combination_of_cases(self, mock_get_stale_insights):
+ def test_insights_to_keep_fresh_combination_of_cases(self, mock_get_stale_insights):
mock_get_stale_insights.return_value = [
"1234:5678",
"2345:",
"3456:7890",
"4567:",
]
- insights = list(priority_insights(self.team))
+ insights = list(insights_to_keep_fresh(self.team))
expected_results = [
(2345, None),
(3456, 7890),
@@ -124,31 +124,31 @@ def setUp(self) -> None:
self.team2 = self.create_team_with_organization(organization=self.organization)
@patch("posthog.caching.warming.largest_teams")
- @patch("posthog.caching.warming.priority_insights")
+ @patch("posthog.caching.warming.insights_to_keep_fresh")
@patch("posthog.caching.warming.warm_insight_cache_task.si")
def test_schedule_warming_for_teams_task_with_empty_insight_tuples(
- self, mock_warm_insight_cache_task_si, mock_priority_insights, mock_largest_teams
+ self, mock_warm_insight_cache_task_si, mock_insights_to_keep_fresh, mock_largest_teams
):
mock_largest_teams.return_value = [self.team1.pk, self.team2.pk]
- mock_priority_insights.return_value = iter([])
+ mock_insights_to_keep_fresh.return_value = iter([])
schedule_warming_for_teams_task()
- mock_priority_insights.assert_called()
+ mock_insights_to_keep_fresh.assert_called()
mock_warm_insight_cache_task_si.assert_not_called()
@patch("posthog.caching.warming.largest_teams")
- @patch("posthog.caching.warming.priority_insights")
+ @patch("posthog.caching.warming.insights_to_keep_fresh")
@patch("posthog.caching.warming.warm_insight_cache_task.si")
def test_schedule_warming_for_teams_task_with_non_empty_insight_tuples(
- self, mock_warm_insight_cache_task_si, mock_priority_insights, mock_largest_teams
+ self, mock_warm_insight_cache_task_si, mock_insights_to_keep_fresh, mock_largest_teams
):
mock_largest_teams.return_value = [self.team1.pk, self.team2.pk]
- mock_priority_insights.return_value = iter([("1234", "5678"), ("2345", None)])
+ mock_insights_to_keep_fresh.return_value = iter([("1234", "5678"), ("2345", None)])
schedule_warming_for_teams_task()
- mock_priority_insights.assert_called()
+ mock_insights_to_keep_fresh.assert_called()
self.assertEqual(mock_warm_insight_cache_task_si.call_count, 2)
self.assertEqual(mock_warm_insight_cache_task_si.call_args_list[0][0][0], "1234")
self.assertEqual(mock_warm_insight_cache_task_si.call_args_list[0][0][1], "5678")
diff --git a/posthog/caching/warming.py b/posthog/caching/warming.py
index d4a2818aa1964..16ee52fc9f1d9 100644
--- a/posthog/caching/warming.py
+++ b/posthog/caching/warming.py
@@ -20,6 +20,9 @@
from posthog.hogql_queries.query_runner import ExecutionMode
from posthog.models import Team, Insight, DashboardTile
from posthog.tasks.utils import CeleryQueue
+from posthog.ph_client import ph_us_client
+import posthoganalytics
+
logger = structlog.get_logger(__name__)
@@ -37,9 +40,42 @@
LAST_VIEWED_THRESHOLD = timedelta(days=7)
-def priority_insights(team: Team, shared_only: bool = False) -> Generator[tuple[int, Optional[int]], None, None]:
+def teams_enabled_for_cache_warming() -> list[int]:
+ enabled_team_ids = []
+
+ for team_id, organization_id, uuid in Team.objects.values_list(
+ "id",
+ "organization_id",
+ "uuid",
+ ).iterator(chunk_size=1000):
+ enabled = posthoganalytics.feature_enabled(
+ "cache-warming",
+ str(uuid),
+ groups={
+ "organization": str(organization_id),
+ "project": str(team_id),
+ },
+ group_properties={
+ "organization": {
+ "id": str(organization_id),
+ },
+ "project": {
+ "id": str(team_id),
+ },
+ },
+ only_evaluate_locally=True,
+ send_feature_flag_events=False,
+ )
+
+ if enabled:
+ enabled_team_ids.append(team_id)
+
+ return enabled_team_ids
+
+
+def insights_to_keep_fresh(team: Team, shared_only: bool = False) -> Generator[tuple[int, Optional[int]], None, None]:
"""
- This is the place to decide which insights should be kept warm.
+ This is the place to decide which insights should be kept warm for the provided team.
The reasoning is that this will be a yes or no decision. If we need to keep it warm, we try our best
to not let the cache go stale. There isn't any middle ground, like trying to refresh it once a day, since
that would be like clock that's only right twice a day.
@@ -47,6 +83,8 @@ def priority_insights(team: Team, shared_only: bool = False) -> Generator[tuple[
threshold = datetime.now(UTC) - LAST_VIEWED_THRESHOLD
QueryCacheManager.clean_up_stale_insights(team_id=team.pk, threshold=threshold)
+
+ # get all insights currently in the cache for the team
combos = QueryCacheManager.get_stale_insights(team_id=team.pk, limit=500)
STALE_INSIGHTS_GAUGE.labels(team_id=team.pk).set(len(combos))
@@ -88,33 +126,59 @@ def priority_insights(team: Team, shared_only: bool = False) -> Generator[tuple[
@shared_task(ignore_result=True, expires=60 * 15)
def schedule_warming_for_teams_task():
+ """
+ Runs every hour and schedule warming for all insights (picked from insights_to_cache)
+ for each team enabled for cache warming.
+
+ We trigger recalculation using ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE
+ so even though we might pick all insights for a team to recalculate,
+ only the stale ones (determined by `staleness_threshold_map`) get recalculated.
+ """
team_ids = largest_teams(limit=10)
threshold = datetime.now(UTC) - LAST_VIEWED_THRESHOLD
- prio_teams = Team.objects.filter(Q(pk__in=team_ids) | Q(extra_settings__insights_cache_warming=True))
+ enabled_teams = Team.objects.filter(
+ Q(pk__in=team_ids)
+ | Q(extra_settings__insights_cache_warming=True)
+ | Q(pk__in=teams_enabled_for_cache_warming())
+ )
teams_with_recently_viewed_shared = Team.objects.filter(
Q(
Q(sharingconfiguration__dashboard__last_accessed_at__gte=threshold)
| Q(sharingconfiguration__insight__insightviewed__last_viewed_at__gte=threshold)
),
sharingconfiguration__enabled=True,
- ).difference(prio_teams)
+ ).difference(enabled_teams)
all_teams = itertools.chain(
- zip(prio_teams, [False] * len(prio_teams)),
+ zip(enabled_teams, [False] * len(enabled_teams)),
zip(teams_with_recently_viewed_shared, [True] * len(teams_with_recently_viewed_shared)),
)
# Use a fixed expiration time since tasks in the chain are executed sequentially
expire_after = datetime.now(UTC) + timedelta(minutes=50)
- for team, shared_only in all_teams:
- insight_tuples = priority_insights(team, shared_only=shared_only)
+ with ph_us_client() as capture_ph_event:
+ for team, shared_only in all_teams:
+ insight_tuples = list(insights_to_keep_fresh(team, shared_only=shared_only))
+
+ capture_ph_event(
+ str(team.uuid),
+ "cache warming - insights to cache",
+ properties={
+ "count": len(insight_tuples),
+ "team_id": team.id,
+ "organization_id": team.organization_id,
+ },
+ )
- # We chain the task execution to prevent queries *for a single team* running at the same time
- chain(
- *(warm_insight_cache_task.si(*insight_tuple).set(expires=expire_after) for insight_tuple in insight_tuples)
- )()
+ # We chain the task execution to prevent queries *for a single team* running at the same time
+ chain(
+ *(
+ warm_insight_cache_task.si(*insight_tuple).set(expires=expire_after)
+ for insight_tuple in insight_tuples
+ )
+ )()
@shared_task(
@@ -157,11 +221,27 @@ def warm_insight_cache_task(insight_id: int, dashboard_id: Optional[int]):
dashboard_id=dashboard_id,
)
+ is_cached = getattr(results, "is_cached", False)
+
PRIORITY_INSIGHTS_COUNTER.labels(
team_id=insight.team_id,
dashboard=dashboard_id is not None,
- is_cached=getattr(results, "is_cached", False),
+ is_cached=is_cached,
).inc()
+
+ with ph_us_client() as capture_ph_event:
+ capture_ph_event(
+ str(insight.team.uuid),
+ "cache warming - warming insight",
+ properties={
+ "insight_id": insight.pk,
+ "dashboard_id": dashboard_id,
+ "is_cached": is_cached,
+ "team_id": insight.team_id,
+ "organization_id": insight.team.organization_id,
+ },
+ )
+
except CHQueryErrorTooManySimultaneousQueries:
raise
except Exception as e:
diff --git a/posthog/clickhouse/client/connection.py b/posthog/clickhouse/client/connection.py
index 26348eeddffb6..1769af54d59ab 100644
--- a/posthog/clickhouse/client/connection.py
+++ b/posthog/clickhouse/client/connection.py
@@ -2,6 +2,8 @@
from enum import Enum
from functools import cache
+from clickhouse_connect import get_client
+from clickhouse_connect.driver import Client as HttpClient, httputil
from clickhouse_driver import Client as SyncClient
from clickhouse_pool import ChPool
from django.conf import settings
@@ -19,7 +21,96 @@ class Workload(Enum):
_default_workload = Workload.ONLINE
-def get_pool(workload: Workload, team_id=None, readonly=False):
+class ProxyClient:
+ def __init__(self, client: HttpClient):
+ self._client = client
+
+ def execute(
+ self,
+ query,
+ params=None,
+ with_column_types=False,
+ external_tables=None,
+ query_id=None,
+ settings=None,
+ types_check=False,
+ columnar=False,
+ ):
+ if query_id:
+ settings["query_id"] = query_id
+ result = self._client.query(query=query, parameters=params, settings=settings, column_oriented=columnar)
+
+ # we must play with result summary here
+ written_rows = int(result.summary.get("written_rows", 0))
+ if written_rows > 0:
+ return written_rows
+ if with_column_types:
+ column_types_driver_format = list(zip(result.column_names, result.column_types))
+ return result.result_set, column_types_driver_format
+ return result.result_set
+
+ # Implement methods for session managment: https://peps.python.org/pep-0343/ so ProxyClient can be used in all places a clickhouse_driver.Client is.
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ pass
+
+
+_clickhouse_http_pool_mgr = httputil.get_pool_manager(
+ maxsize=settings.CLICKHOUSE_CONN_POOL_MAX, # max number of open connection per pool
+ block=True, # makes the maxsize limit per pool, keeps connections
+ num_pools=12, # number of pools
+)
+
+
+def get_http_client(**overrides):
+ kwargs = {
+ "host": settings.CLICKHOUSE_HOST,
+ "database": settings.CLICKHOUSE_DATABASE,
+ "secure": settings.CLICKHOUSE_SECURE,
+ "username": settings.CLICKHOUSE_USER,
+ "password": settings.CLICKHOUSE_PASSWORD,
+ "ca_cert": settings.CLICKHOUSE_CA,
+ "verify": settings.CLICKHOUSE_VERIFY,
+ "settings": {"mutations_sync": "1"} if settings.TEST else {},
+ # Without this, OPTIMIZE table and other queries will regularly run into timeouts
+ "send_receive_timeout": 30 if settings.TEST else 999_999_999,
+ "autogenerate_session_id": True, # beware, this makes each query to run in a separate session - no temporary tables will work
+ "pool_mgr": _clickhouse_http_pool_mgr,
+ **overrides,
+ }
+ return ProxyClient(get_client(**kwargs))
+
+
+def get_client_from_pool(workload: Workload = Workload.DEFAULT, team_id=None, readonly=False):
+ """
+ Returns the client for a given workload.
+
+ The connection pool for HTTP is managed by a library.
+ """
+ if settings.CLICKHOUSE_USE_HTTP:
+ if team_id is not None and str(team_id) in settings.CLICKHOUSE_PER_TEAM_SETTINGS:
+ return get_http_client(**settings.CLICKHOUSE_PER_TEAM_SETTINGS[str(team_id)])
+
+ # Note that `readonly` does nothing if the relevant vars are not set!
+ if readonly and settings.READONLY_CLICKHOUSE_USER is not None and settings.READONLY_CLICKHOUSE_PASSWORD:
+ return get_http_client(
+ username=settings.READONLY_CLICKHOUSE_USER,
+ password=settings.READONLY_CLICKHOUSE_PASSWORD,
+ )
+
+ if (
+ workload == Workload.OFFLINE or workload == Workload.DEFAULT and _default_workload == Workload.OFFLINE
+ ) and settings.CLICKHOUSE_OFFLINE_CLUSTER_HOST is not None:
+ return get_http_client(host=settings.CLICKHOUSE_OFFLINE_CLUSTER_HOST, verify=False)
+
+ return get_http_client()
+
+ return get_pool(workload=workload, team_id=team_id, readonly=readonly).get_client()
+
+
+def get_pool(workload: Workload = Workload.DEFAULT, team_id=None, readonly=False):
"""
Returns the right connection pool given a workload.
diff --git a/posthog/clickhouse/client/execute.py b/posthog/clickhouse/client/execute.py
index 7eb9ad3024817..93f7b503fbf0b 100644
--- a/posthog/clickhouse/client/execute.py
+++ b/posthog/clickhouse/client/execute.py
@@ -11,7 +11,7 @@
from clickhouse_driver import Client as SyncClient
from django.conf import settings as app_settings
-from posthog.clickhouse.client.connection import Workload, get_pool
+from posthog.clickhouse.client.connection import Workload, get_client_from_pool
from posthog.clickhouse.client.escape import substitute_params
from posthog.clickhouse.query_tagging import get_query_tag_value, get_query_tags
from posthog.errors import wrap_query_error
@@ -121,7 +121,7 @@ def sync_execute(
if get_query_tag_value("id") == "posthog.tasks.tasks.process_query_task":
workload = Workload.ONLINE
- with sync_client or get_pool(workload, team_id, readonly).get_client() as client:
+ with sync_client or get_client_from_pool(workload, team_id, readonly) as client:
start_time = perf_counter()
prepared_sql, prepared_args, tags = _prepare_query(client=client, query=query, args=args, workload=workload)
@@ -137,6 +137,7 @@ def sync_execute(
settings = {
**core_settings,
"log_comment": json.dumps(tags, separators=(",", ":")),
+ "query_id": query_id,
}
try:
diff --git a/posthog/clickhouse/client/test/test_execute_async.py b/posthog/clickhouse/client/test/test_execute_async.py
index 14199a488f712..fcdb7c68b8765 100644
--- a/posthog/clickhouse/client/test/test_execute_async.py
+++ b/posthog/clickhouse/client/test/test_execute_async.py
@@ -358,12 +358,12 @@ def test_client_strips_comments_from_request(self):
# request routing information for debugging purposes
self.assertIn(f"/* user_id:{self.user_id} request:1 */", first_query)
- @patch("posthog.clickhouse.client.execute.get_pool")
- def test_offline_workload_if_personal_api_key(self, mock_get_pool):
+ @patch("posthog.clickhouse.client.execute.get_client_from_pool")
+ def test_offline_workload_if_personal_api_key(self, mock_get_client):
from posthog.clickhouse.query_tagging import tag_queries
with self.capture_select_queries():
tag_queries(kind="request", id="1", access_method="personal_api_key")
sync_execute("select 1")
- self.assertEqual(mock_get_pool.call_args[0][0], Workload.OFFLINE)
+ self.assertEqual(mock_get_client.call_args[0][0], Workload.OFFLINE)
diff --git a/posthog/clickhouse/migrations/0064_materialize_elements_chain.py b/posthog/clickhouse/migrations/0064_materialize_elements_chain.py
index b32980dd65114..e9b468fe97673 100644
--- a/posthog/clickhouse/migrations/0064_materialize_elements_chain.py
+++ b/posthog/clickhouse/migrations/0064_materialize_elements_chain.py
@@ -1,6 +1,6 @@
from infi.clickhouse_orm import migrations
-from posthog.clickhouse.client.connection import ch_pool
+from posthog.clickhouse.client.connection import get_client_from_pool
from posthog.settings import CLICKHOUSE_CLUSTER
@@ -22,7 +22,7 @@
def add_columns_to_required_tables(_):
- with ch_pool.get_client() as client:
+ with get_client_from_pool() as client:
client.execute(ADD_COLUMNS_SHARDED_EVENTS.format(table="sharded_events", cluster=CLICKHOUSE_CLUSTER))
client.execute(ADD_COLUMNS_EVENTS.format(table="events", cluster=CLICKHOUSE_CLUSTER))
diff --git a/posthog/clickhouse/migrations/0072_materialize_elements_chain_ids.py b/posthog/clickhouse/migrations/0072_materialize_elements_chain_ids.py
index 89a9fa7cacd25..3263cee099b06 100644
--- a/posthog/clickhouse/migrations/0072_materialize_elements_chain_ids.py
+++ b/posthog/clickhouse/migrations/0072_materialize_elements_chain_ids.py
@@ -1,6 +1,6 @@
from infi.clickhouse_orm import migrations
-from posthog.clickhouse.client.connection import ch_pool
+from posthog.clickhouse.client.connection import get_client_from_pool
from posthog.settings import CLICKHOUSE_CLUSTER
@@ -16,7 +16,7 @@
def add_columns_to_required_tables(_):
- with ch_pool.get_client() as client:
+ with get_client_from_pool() as client:
client.execute(DROP_COLUMNS_SHARDED_EVENTS.format(table="sharded_events", cluster=CLICKHOUSE_CLUSTER))
client.execute(ADD_COLUMNS_SHARDED_EVENTS.format(table="sharded_events", cluster=CLICKHOUSE_CLUSTER))
diff --git a/posthog/clickhouse/migrations/0078_add_soft_delete_column_on_events.py b/posthog/clickhouse/migrations/0078_add_soft_delete_column_on_events.py
index c64810443c9b6..39a6cb0f9c7d3 100644
--- a/posthog/clickhouse/migrations/0078_add_soft_delete_column_on_events.py
+++ b/posthog/clickhouse/migrations/0078_add_soft_delete_column_on_events.py
@@ -1,6 +1,6 @@
from infi.clickhouse_orm import migrations
-from posthog.clickhouse.client.connection import ch_pool
+from posthog.clickhouse.client.connection import get_client_from_pool
from posthog.settings import CLICKHOUSE_CLUSTER
@@ -21,7 +21,7 @@
def add_columns_to_required_tables(_):
- with ch_pool.get_client() as client:
+ with get_client_from_pool() as client:
client.execute(DROP_COLUMNS_EVENTS.format(table="sharded_events", cluster=CLICKHOUSE_CLUSTER))
client.execute(DROP_COLUMNS_EVENTS.format(table="events", cluster=CLICKHOUSE_CLUSTER))
client.execute(ADD_COLUMNS_EVENTS.format(table="sharded_events", cluster=CLICKHOUSE_CLUSTER))
diff --git a/posthog/constants.py b/posthog/constants.py
index 7a04658989d6c..73a46e10f2476 100644
--- a/posthog/constants.py
+++ b/posthog/constants.py
@@ -263,6 +263,7 @@ class ExperimentNoResultsErrorKeys(StrEnum):
NO_CONTROL_VARIANT = "no-control-variant"
NO_TEST_VARIANT = "no-test-variant"
NO_RESULTS = "no-results"
+ NO_EXPOSURES = "no-exposures"
class PropertyOperatorType(StrEnum):
diff --git a/posthog/helpers/dashboard_templates.py b/posthog/helpers/dashboard_templates.py
index 313f8c6722a2a..7cfb390128335 100644
--- a/posthog/helpers/dashboard_templates.py
+++ b/posthog/helpers/dashboard_templates.py
@@ -557,6 +557,10 @@ def create_dashboard_from_template(template_key: str, dashboard: Dashboard) -> N
create_from_template(dashboard, template)
+FEATURE_FLAG_TOTAL_VOLUME_INSIGHT_NAME = "Feature Flag Called Total Volume"
+FEATURE_FLAG_UNIQUE_USERS_INSIGHT_NAME = "Feature Flag calls made by unique users per variant"
+
+
def create_feature_flag_dashboard(feature_flag, dashboard: Dashboard) -> None:
dashboard.filters = {"date_from": "-30d"}
if dashboard.team.organization.is_feature_available(AvailableFeature.TAGGING):
@@ -571,8 +575,8 @@ def create_feature_flag_dashboard(feature_flag, dashboard: Dashboard) -> None:
# 1 row
_create_tile_for_insight(
dashboard,
- name="Feature Flag Called Total Volume",
- description="Shows the number of total calls made on feature flag with key: " + feature_flag.key,
+ name=FEATURE_FLAG_TOTAL_VOLUME_INSIGHT_NAME,
+ description=_get_feature_flag_total_volume_insight_description(feature_flag.key),
query={
"kind": "InsightVizNode",
"source": {
@@ -627,9 +631,8 @@ def create_feature_flag_dashboard(feature_flag, dashboard: Dashboard) -> None:
_create_tile_for_insight(
dashboard,
- name="Feature Flag calls made by unique users per variant",
- description="Shows the number of unique user calls made on feature flag per variant with key: "
- + feature_flag.key,
+ name=FEATURE_FLAG_UNIQUE_USERS_INSIGHT_NAME,
+ description=_get_feature_flag_unique_users_insight_description(feature_flag.key),
query={
"kind": "InsightVizNode",
"source": {
@@ -690,6 +693,64 @@ def create_feature_flag_dashboard(feature_flag, dashboard: Dashboard) -> None:
)
+def _get_feature_flag_total_volume_insight_description(feature_flag_key: str) -> str:
+ return f"Shows the number of total calls made on feature flag with key: {feature_flag_key}"
+
+
+def _get_feature_flag_unique_users_insight_description(feature_flag_key: str) -> str:
+ return f"Shows the number of unique user calls made on feature flag per variant with key: {feature_flag_key}"
+
+
+def update_feature_flag_dashboard(feature_flag, old_key: str) -> None:
+ # We need to update the *system* created insights with the new key, so we search for them by name
+ dashboard = feature_flag.usage_dashboard
+
+ if not dashboard:
+ return
+
+ total_volume_insight = dashboard.insights.filter(name=FEATURE_FLAG_TOTAL_VOLUME_INSIGHT_NAME).first()
+ if total_volume_insight:
+ _update_tile_with_new_key(
+ total_volume_insight,
+ feature_flag.key,
+ old_key,
+ _get_feature_flag_total_volume_insight_description,
+ )
+
+ unique_users_insight = dashboard.insights.filter(name=FEATURE_FLAG_UNIQUE_USERS_INSIGHT_NAME).first()
+ if unique_users_insight:
+ _update_tile_with_new_key(
+ unique_users_insight,
+ feature_flag.key,
+ old_key,
+ _get_feature_flag_unique_users_insight_description,
+ )
+
+
+def _update_tile_with_new_key(insight, new_key: str, old_key: str, descriptionFunction: Callable[[str], str]) -> None:
+ old_description = descriptionFunction(old_key)
+ new_description = descriptionFunction(new_key)
+
+ if insight.description != old_description: # We don't touch insights that have been manually edited
+ return
+
+ if insight.query:
+ property_values = insight.query.get("source", {}).get("properties", {}).get("values", [])
+ if len(property_values) != 1: # Exit if not exactly one property group
+ return
+
+ property_group = property_values[0]
+ values = property_group.get("values", [])
+ # Only proceed if there's exactly one value and it's a feature flag
+ if len(values) == 1 and values[0].get("key") == "$feature_flag" and values[0].get("value") == old_key:
+ values[0]["value"] = new_key
+ insight.query = insight.query # Trigger field update
+ # Only update the insight if it matches what we expect for the system created insights
+ insight.description = new_description
+ insight.save()
+ return
+
+
def add_enriched_insights_to_feature_flag_dashboard(feature_flag, dashboard: Dashboard) -> None:
# 1 row
_create_tile_for_insight(
diff --git a/posthog/hogql/ai.py b/posthog/hogql/ai.py
index 222f59423a9e7..12aa624644e70 100644
--- a/posthog/hogql/ai.py
+++ b/posthog/hogql/ai.py
@@ -17,6 +17,7 @@
IDENTITY_MESSAGE = "HogQL is PostHog's variant of SQL. It supports most of ClickHouse SQL. You write HogQL based on a prompt. You don't help with other knowledge."
HOGQL_EXAMPLE_MESSAGE = """Example HogQL query for prompt "weekly active users that performed event ACTIVATION_EVENT on example.com/foo/ 3 times or more, by week":
+
SELECT week_of, countIf(weekly_event_count >= 3)
FROM (
SELECT person.id AS person_id, toStartOfWeek(timestamp) AS week_of, count() AS weekly_event_count
@@ -42,8 +43,8 @@
REQUEST_MESSAGE = (
"I need a robust HogQL query to get the following results: {prompt}\n"
- "Return nothing besides the SQL, just the query. "
- f'If my request doesn\'t make sense, return short and succint message starting with "{UNCLEAR_PREFIX}". '
+ "Return nothing besides the SQL, just the query. Do not wrap the SQL in backticks or quotes. "
+ f'If my request is irrelevant or doesn\'t make sense, return a short and succint message starting with "{UNCLEAR_PREFIX}". '
)
@@ -147,7 +148,7 @@ def write_sql_from_prompt(prompt: str, *, current_query: Optional[str] = None, t
def hit_openai(messages, user) -> tuple[str, int, int]:
result = openai.chat.completions.create(
model="gpt-4o-mini",
- temperature=0.8,
+ temperature=0,
messages=messages,
user=user, # The user ID is for tracking within OpenAI in case of overuse/abuse
)
diff --git a/posthog/hogql/database/schema/channel_type.py b/posthog/hogql/database/schema/channel_type.py
index ff664f241520f..32204ed3757c6 100644
--- a/posthog/hogql/database/schema/channel_type.py
+++ b/posthog/hogql/database/schema/channel_type.py
@@ -33,6 +33,9 @@ class ChannelTypeExprs:
medium: ast.Expr
campaign: ast.Expr
referring_domain: ast.Expr
+ url: ast.Expr
+ hostname: ast.Expr
+ pathname: ast.Expr
gclid: ast.Expr
gad_source: ast.Expr
@@ -68,6 +71,12 @@ def create_initial_channel_type(name: str, custom_rules: Optional[list[CustomCha
referring_domain=ast.Call(
name="toString", args=[ast.Field(chain=["properties", "$initial_referring_domain"])]
),
+ url=ast.Call(name="toString", args=[ast.Field(chain=["properties", "$initial_url"])]),
+ hostname=ast.Call(
+ name="domain",
+ args=[ast.Call(name="toString", args=[ast.Field(chain=["properties", "$initial_hostname"])])],
+ ),
+ pathname=ast.Call(name="toString", args=[ast.Field(chain=["properties", "$initial_pathname"])]),
gclid=ast.Call(name="toString", args=[ast.Field(chain=["properties", "$initial_gclid"])]),
gad_source=ast.Call(name="toString", args=[ast.Field(chain=["properties", "$initial_gad_source"])]),
),
@@ -155,6 +164,12 @@ def custom_rule_to_expr(custom_rule: CustomChannelRule, source_exprs: ChannelTyp
expr = source_exprs.medium
elif condition.key == CustomChannelField.UTM_CAMPAIGN:
expr = source_exprs.campaign
+ elif condition.key == CustomChannelField.URL:
+ expr = source_exprs.url
+ elif condition.key == CustomChannelField.HOSTNAME:
+ expr = source_exprs.hostname
+ elif condition.key == CustomChannelField.PATHNAME:
+ expr = source_exprs.pathname
elif condition.key == CustomChannelField.REFERRING_DOMAIN:
expr = source_exprs.referring_domain
else:
diff --git a/posthog/hogql/database/schema/sessions_v1.py b/posthog/hogql/database/schema/sessions_v1.py
index 298a96a70d41d..afbc9099a0181 100644
--- a/posthog/hogql/database/schema/sessions_v1.py
+++ b/posthog/hogql/database/schema/sessions_v1.py
@@ -74,8 +74,10 @@
"$num_uniq_urls": IntegerDatabaseField(name="$num_uniq_urls"),
"$entry_current_url": StringDatabaseField(name="$entry_current_url"),
"$entry_pathname": StringDatabaseField(name="$entry_pathname"),
+ "$entry_hostname": StringDatabaseField(name="$entry_host"),
"$exit_current_url": StringDatabaseField(name="$exit_current_url"),
"$exit_pathname": StringDatabaseField(name="$exit_pathname"),
+ "$exit_hostname": StringDatabaseField(name="$exit_host"),
"$entry_utm_source": StringDatabaseField(name="$entry_utm_source"),
"$entry_utm_campaign": StringDatabaseField(name="$entry_utm_campaign"),
"$entry_utm_medium": StringDatabaseField(name="$entry_utm_medium"),
@@ -189,10 +191,18 @@ def arg_max_merge_field(field_name: str) -> ast.Call:
name="path",
args=[aggregate_fields["$entry_current_url"]],
)
+ aggregate_fields["$entry_hostname"] = ast.Call(
+ name="domain",
+ args=[aggregate_fields["$entry_current_url"]],
+ )
aggregate_fields["$exit_pathname"] = ast.Call(
name="path",
args=[aggregate_fields["$exit_current_url"]],
)
+ aggregate_fields["$exit_hostname"] = ast.Call(
+ name="domain",
+ args=[aggregate_fields["$exit_current_url"]],
+ )
aggregate_fields["$session_duration"] = ast.Call(
name="dateDiff",
args=[
@@ -255,6 +265,9 @@ def arg_max_merge_field(field_name: str) -> ast.Call:
medium=aggregate_fields["$entry_utm_medium"],
source=aggregate_fields["$entry_utm_source"],
referring_domain=aggregate_fields["$entry_referring_domain"],
+ url=aggregate_fields["$entry_current_url"],
+ hostname=aggregate_fields["$entry_hostname"],
+ pathname=aggregate_fields["$entry_pathname"],
gclid=aggregate_fields["$entry_gclid"],
gad_source=aggregate_fields["$entry_gad_source"],
),
diff --git a/posthog/hogql/database/schema/sessions_v2.py b/posthog/hogql/database/schema/sessions_v2.py
index 66bfae2d3952d..a2d623427685e 100644
--- a/posthog/hogql/database/schema/sessions_v2.py
+++ b/posthog/hogql/database/schema/sessions_v2.py
@@ -78,8 +78,10 @@
"$num_uniq_urls": IntegerDatabaseField(name="$num_uniq_urls"),
"$entry_current_url": StringDatabaseField(name="$entry_current_url"),
"$entry_pathname": StringDatabaseField(name="$entry_pathname"),
+ "$entry_hostname": StringDatabaseField(name="$entry_host"),
"$end_current_url": StringDatabaseField(name="$end_current_url"),
"$end_pathname": StringDatabaseField(name="$end_pathname"),
+ "$end_hostname": StringDatabaseField(name="$end_hostname"),
"$entry_utm_source": StringDatabaseField(name="$entry_utm_source"),
"$entry_utm_campaign": StringDatabaseField(name="$entry_utm_campaign"),
"$entry_utm_medium": StringDatabaseField(name="$entry_utm_medium"),
@@ -235,10 +237,18 @@ def arg_max_merge_field(field_name: str) -> ast.Call:
name="path",
args=[aggregate_fields["$entry_current_url"]],
)
+ aggregate_fields["$entry_hostname"] = ast.Call(
+ name="domain",
+ args=[aggregate_fields["$entry_current_url"]],
+ )
aggregate_fields["$end_pathname"] = ast.Call(
name="path",
args=[aggregate_fields["$end_current_url"]],
)
+ aggregate_fields["$end_hostname"] = ast.Call(
+ name="domain",
+ args=[aggregate_fields["$end_current_url"]],
+ )
aggregate_fields["$session_duration"] = ast.Call(
name="dateDiff",
args=[
@@ -328,6 +338,9 @@ def arg_max_merge_field(field_name: str) -> ast.Call:
campaign=aggregate_fields["$entry_utm_campaign"],
medium=aggregate_fields["$entry_utm_medium"],
source=aggregate_fields["$entry_utm_source"],
+ url=aggregate_fields["$entry_current_url"],
+ hostname=aggregate_fields["$entry_hostname"],
+ pathname=aggregate_fields["$entry_pathname"],
referring_domain=aggregate_fields["$entry_referring_domain"],
gclid=aggregate_fields["$entry_gclid"],
gad_source=aggregate_fields["$entry_gad_source"],
diff --git a/posthog/hogql/database/schema/test/test_channel_type.py b/posthog/hogql/database/schema/test/test_channel_type.py
index c15269536dbfa..8482757badb99 100644
--- a/posthog/hogql/database/schema/test/test_channel_type.py
+++ b/posthog/hogql/database/schema/test/test_channel_type.py
@@ -426,6 +426,74 @@ def test_custom_channel_type(self):
)
== "Test2"
)
+ # custom channel type using pathname
+ assert (
+ self._get_session_channel_type(
+ {
+ "$current_url": "https://www.google.com/some/path",
+ },
+ custom_channel_rules=[
+ CustomChannelRule(
+ items=[CustomChannelCondition(key="pathname", op="exact", value="/some/path", id="1")],
+ channel_type="Test",
+ combiner=FilterLogicalOperator.AND_,
+ id="a",
+ ),
+ ],
+ )
+ == "Test"
+ )
+ # custom channel type using hostname
+ assert (
+ self._get_session_channel_type(
+ {
+ "$current_url": "https://google.com/some/path",
+ },
+ custom_channel_rules=[
+ CustomChannelRule(
+ items=[CustomChannelCondition(key="hostname", op="exact", value="google.com", id="1")],
+ channel_type="Test",
+ combiner=FilterLogicalOperator.AND_,
+ id="a",
+ ),
+ ],
+ )
+ == "Test"
+ )
+ # custom channel type using hostname with port
+ assert (
+ self._get_session_channel_type(
+ {
+ "$current_url": "https://google.com:3000/some/path",
+ },
+ custom_channel_rules=[
+ CustomChannelRule(
+ items=[CustomChannelCondition(key="hostname", op="exact", value="google.com", id="1")],
+ channel_type="Test",
+ combiner=FilterLogicalOperator.AND_,
+ id="a",
+ ),
+ ],
+ )
+ == "Test"
+ )
+ # custom channel type using url
+ assert (
+ self._get_session_channel_type(
+ {
+ "$current_url": "https://www.google.com/some/path",
+ },
+ custom_channel_rules=[
+ CustomChannelRule(
+ items=[CustomChannelCondition(key="url", op="icontains", value="/some/path", id="1")],
+ channel_type="Test",
+ combiner=FilterLogicalOperator.AND_,
+ id="a",
+ ),
+ ],
+ )
+ == "Test"
+ )
def _get_initial_channel_type_from_wild_clicks(self, url: str, referrer: str):
session_id = str(uuid7())
diff --git a/posthog/hogql/database/schema/test/test_sessions_v1.py b/posthog/hogql/database/schema/test/test_sessions_v1.py
index 7c1a2f4562759..aa8295e2ecfde 100644
--- a/posthog/hogql/database/schema/test/test_sessions_v1.py
+++ b/posthog/hogql/database/schema/test/test_sessions_v1.py
@@ -299,7 +299,7 @@ def test_can_use_v1_and_v2_fields(self):
class TestGetLazySessionProperties(ClickhouseTestMixin, APIBaseTest):
def test_all(self):
results = get_lazy_session_table_properties_v1(None)
- self.assertEqual(len(results), 19)
+ self.assertEqual(len(results), 21)
self.assertEqual(
results[0],
{
diff --git a/posthog/hogql/database/schema/test/test_sessions_v2.py b/posthog/hogql/database/schema/test/test_sessions_v2.py
index f59d0075352bd..7dcfde92852ec 100644
--- a/posthog/hogql/database/schema/test/test_sessions_v2.py
+++ b/posthog/hogql/database/schema/test/test_sessions_v2.py
@@ -692,11 +692,13 @@ def test_all(self):
"$channel_type",
"$end_current_url",
"$end_pathname",
+ "$end_hostname",
"$end_timestamp",
"$entry_current_url",
"$entry_gad_source",
"$entry_gclid",
"$entry_pathname",
+ "$entry_hostname",
"$entry_referring_domain",
"$entry_utm_campaign",
"$entry_utm_content",
diff --git a/posthog/hogql/database/test/__snapshots__/test_database.ambr b/posthog/hogql/database/test/__snapshots__/test_database.ambr
index f30a908192a4d..480f8487f077a 100644
--- a/posthog/hogql/database/test/__snapshots__/test_database.ambr
+++ b/posthog/hogql/database/test/__snapshots__/test_database.ambr
@@ -384,8 +384,10 @@
"$num_uniq_urls",
"$entry_current_url",
"$entry_pathname",
+ "$entry_hostname",
"$end_current_url",
"$end_pathname",
+ "$end_hostname",
"$entry_utm_source",
"$entry_utm_campaign",
"$entry_utm_medium",
@@ -980,8 +982,10 @@
"$num_uniq_urls",
"$entry_current_url",
"$entry_pathname",
+ "$entry_hostname",
"$end_current_url",
"$end_pathname",
+ "$end_hostname",
"$entry_utm_source",
"$entry_utm_campaign",
"$entry_utm_medium",
@@ -1373,6 +1377,16 @@
"table": null,
"type": "string"
},
+ "$entry_hostname": {
+ "chain": null,
+ "fields": null,
+ "hogql_value": "`$entry_hostname`",
+ "id": null,
+ "name": "$entry_hostname",
+ "schema_valid": true,
+ "table": null,
+ "type": "string"
+ },
"$end_current_url": {
"chain": null,
"fields": null,
@@ -1393,6 +1407,16 @@
"table": null,
"type": "string"
},
+ "$end_hostname": {
+ "chain": null,
+ "fields": null,
+ "hogql_value": "`$end_hostname`",
+ "id": null,
+ "name": "$end_hostname",
+ "schema_valid": true,
+ "table": null,
+ "type": "string"
+ },
"$entry_utm_source": {
"chain": null,
"fields": null,
@@ -2210,8 +2234,10 @@
"$num_uniq_urls",
"$entry_current_url",
"$entry_pathname",
+ "$entry_hostname",
"$end_current_url",
"$end_pathname",
+ "$end_hostname",
"$entry_utm_source",
"$entry_utm_campaign",
"$entry_utm_medium",
@@ -2779,8 +2805,10 @@
"$num_uniq_urls",
"$entry_current_url",
"$entry_pathname",
+ "$entry_hostname",
"$end_current_url",
"$end_pathname",
+ "$end_hostname",
"$entry_utm_source",
"$entry_utm_campaign",
"$entry_utm_medium",
@@ -3172,6 +3200,16 @@
"table": null,
"type": "string"
},
+ "$entry_hostname": {
+ "chain": null,
+ "fields": null,
+ "hogql_value": "`$entry_hostname`",
+ "id": null,
+ "name": "$entry_hostname",
+ "schema_valid": true,
+ "table": null,
+ "type": "string"
+ },
"$end_current_url": {
"chain": null,
"fields": null,
@@ -3192,6 +3230,16 @@
"table": null,
"type": "string"
},
+ "$end_hostname": {
+ "chain": null,
+ "fields": null,
+ "hogql_value": "`$end_hostname`",
+ "id": null,
+ "name": "$end_hostname",
+ "schema_valid": true,
+ "table": null,
+ "type": "string"
+ },
"$entry_utm_source": {
"chain": null,
"fields": null,
diff --git a/posthog/hogql/test/__snapshots__/test_resolver.ambr b/posthog/hogql/test/__snapshots__/test_resolver.ambr
index a9f25124489a2..570de55393396 100644
--- a/posthog/hogql/test/__snapshots__/test_resolver.ambr
+++ b/posthog/hogql/test/__snapshots__/test_resolver.ambr
@@ -2390,11 +2390,13 @@
$autocapture_count: {},
$channel_type: {},
$end_current_url: {},
+ $end_hostname: {},
$end_pathname: {},
$end_timestamp: {},
$entry_current_url: {},
$entry_gad_source: {},
$entry_gclid: {},
+ $entry_hostname: {},
$entry_pathname: {},
$entry_referring_domain: {},
$entry_utm_campaign: {},
diff --git a/posthog/hogql_queries/actors_query_runner.py b/posthog/hogql_queries/actors_query_runner.py
index 62325d87e8c18..8b750196b8a98 100644
--- a/posthog/hogql_queries/actors_query_runner.py
+++ b/posthog/hogql_queries/actors_query_runner.py
@@ -19,6 +19,8 @@
ActorsQueryResponse,
CachedActorsQueryResponse,
DashboardFilter,
+ InsightActorsQuery,
+ TrendsQuery,
)
@@ -38,6 +40,7 @@ def __init__(self, *args, **kwargs):
self.source_query_runner = get_query_runner(self.query.source, self.team, self.timings, self.limit_context)
self.strategy = self.determine_strategy()
+ self.calculating = False
@property
def group_type_index(self) -> int | None:
@@ -100,7 +103,7 @@ def prepare_recordings(
matching_events_list = itertools.chain.from_iterable(row[column_index_events] for row in self.paginator.results)
return column_index_events, self.strategy.get_recordings(matching_events_list)
- def calculate(self) -> ActorsQueryResponse:
+ def _calculate(self) -> ActorsQueryResponse:
# Funnel queries require the experimental analyzer to run correctly
# Can remove once clickhouse moves to version 24.3 or above
settings = None
@@ -128,8 +131,8 @@ def calculate(self) -> ActorsQueryResponse:
actors_lookup = self.strategy.get_actors(actor_ids)
person_uuid_to_event_distinct_ids = None
- if "event_distinct_ids" in self.strategy.input_columns():
- event_distinct_ids_index = self.strategy.input_columns().index("event_distinct_ids")
+ if "event_distinct_ids" in input_columns:
+ event_distinct_ids_index = input_columns.index("event_distinct_ids")
person_uuid_to_event_distinct_ids = {
str(row[actor_column_index]): row[event_distinct_ids_index] for row in self.paginator.results
}
@@ -157,8 +160,22 @@ def calculate(self) -> ActorsQueryResponse:
**self.paginator.response_params(),
)
+ def calculate(self) -> ActorsQueryResponse:
+ try:
+ self.calculating = True
+ return self._calculate()
+ finally:
+ self.calculating = False
+
def input_columns(self) -> list[str]:
+ strategy_input_cols = self.strategy.input_columns()
if self.query.select:
+ if (
+ self.calculating
+ and "event_distinct_ids" in strategy_input_cols
+ and "event_distinct_ids" not in self.query.select
+ ):
+ return [*self.query.select, "event_distinct_ids"]
return self.query.select
return self.strategy.input_columns()
@@ -309,7 +326,14 @@ def to_query(self) -> ast.SelectQuery:
table=source_query,
alias=source_alias,
)
- if source_distinct_id_column is not None:
+ # If we're calculating, which involves hydrating for the actors modal, we include event_distinct_ids
+ # See https://github.com/PostHog/posthog/pull/27131
+ if (
+ self.calculating
+ and isinstance(self.query.source, InsightActorsQuery)
+ and isinstance(self.query.source.source, TrendsQuery)
+ and source_distinct_id_column is not None
+ ):
select_query.select.append(ast.Field(chain=[source_distinct_id_column]))
try:
diff --git a/posthog/hogql_queries/error_tracking_query_runner.py b/posthog/hogql_queries/error_tracking_query_runner.py
index 88f66050e52e7..091665bf7caba 100644
--- a/posthog/hogql_queries/error_tracking_query_runner.py
+++ b/posthog/hogql_queries/error_tracking_query_runner.py
@@ -54,6 +54,10 @@ def select(self):
),
ast.Alias(alias="last_seen", expr=ast.Call(name="max", args=[ast.Field(chain=["timestamp"])])),
ast.Alias(alias="first_seen", expr=ast.Call(name="min", args=[ast.Field(chain=["timestamp"])])),
+ ast.Alias(
+ alias="earliest",
+ expr=ast.Call(name="argMin", args=[ast.Field(chain=["properties"]), ast.Field(chain=["timestamp"])]),
+ ),
ast.Alias(alias="id", expr=ast.Field(chain=["issue_id"])),
]
diff --git a/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py b/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py
index 08c7e3dd91de5..cd8203b5dbbf9 100644
--- a/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py
+++ b/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py
@@ -51,7 +51,7 @@ def __init__(self, *args, **kwargs):
if self.experiment.holdout:
self.variants.append(f"holdout-{self.experiment.holdout.id}")
- self.stats_version = self.query.stats_version or 1
+ self.stats_version = self.experiment.get_stats_config("version") or 1
self.prepared_funnels_query = self._prepare_funnel_query()
self.funnels_query_runner = FunnelsQueryRunner(
diff --git a/posthog/hogql_queries/experiments/experiment_trends_query_runner.py b/posthog/hogql_queries/experiments/experiment_trends_query_runner.py
index b3c72c788c951..b09187c4453aa 100644
--- a/posthog/hogql_queries/experiments/experiment_trends_query_runner.py
+++ b/posthog/hogql_queries/experiments/experiment_trends_query_runner.py
@@ -66,7 +66,7 @@ def __init__(self, *args, **kwargs):
self.variants.append(f"holdout-{self.experiment.holdout.id}")
self.breakdown_key = f"$feature/{self.feature_flag.key}"
- self.stats_version = self.query.stats_version or 1
+ self.stats_version = self.experiment.get_stats_config("version") or 1
self.prepared_count_query = self._prepare_count_query()
self.prepared_exposure_query = self._prepare_exposure_query()
@@ -307,7 +307,7 @@ def run(query_runner: TrendsQueryRunner, result_key: str, is_parallel: bool):
if count_result is None or exposure_result is None:
raise ValueError("One or both query runners failed to produce a response")
- self._validate_event_variants(count_result)
+ self._validate_event_variants(count_result, exposure_result)
# Statistical analysis
control_variant, test_variants = self._get_variants_with_base_stats(count_result, exposure_result)
@@ -369,21 +369,21 @@ def _get_variants_with_base_stats(
count = result.get("count", 0)
breakdown_value = result.get("breakdown_value")
if breakdown_value == CONTROL_VARIANT_KEY:
+ absolute_exposure = exposure_counts.get(breakdown_value, 0)
control_variant = ExperimentVariantTrendsBaseStats(
key=breakdown_value,
count=count,
exposure=1,
- # TODO: in the absence of exposure data, we should throw rather than default to 1
- absolute_exposure=exposure_counts.get(breakdown_value, 1),
+ absolute_exposure=absolute_exposure,
)
else:
+ absolute_exposure = exposure_counts.get(breakdown_value, 0)
test_variants.append(
ExperimentVariantTrendsBaseStats(
key=breakdown_value,
count=count,
- # TODO: in the absence of exposure data, we should throw rather than default to 1
- exposure=exposure_ratios.get(breakdown_value, 1),
- absolute_exposure=exposure_counts.get(breakdown_value, 1),
+ exposure=exposure_ratios.get(breakdown_value, 0),
+ absolute_exposure=absolute_exposure,
)
)
@@ -392,14 +392,20 @@ def _get_variants_with_base_stats(
return control_variant, test_variants
- def _validate_event_variants(self, count_result: TrendsQueryResponse):
+ def _validate_event_variants(self, count_result: TrendsQueryResponse, exposure_result: TrendsQueryResponse):
errors = {
+ ExperimentNoResultsErrorKeys.NO_EXPOSURES: True,
ExperimentNoResultsErrorKeys.NO_EVENTS: True,
ExperimentNoResultsErrorKeys.NO_FLAG_INFO: True,
ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT: True,
ExperimentNoResultsErrorKeys.NO_TEST_VARIANT: True,
}
+ # Don't throw right away because we want to validate metric events too
+ # If metric events pass, the end of the function will still throw an error
+ if exposure_result.results:
+ errors[ExperimentNoResultsErrorKeys.NO_EXPOSURES] = False
+
if not count_result.results or not count_result.results[0]:
raise ValidationError(code="no-results", detail=json.dumps(errors))
diff --git a/posthog/hogql_queries/experiments/funnels_statistics_v2.py b/posthog/hogql_queries/experiments/funnels_statistics_v2.py
index 02f18d2f70740..73d23f9924a08 100644
--- a/posthog/hogql_queries/experiments/funnels_statistics_v2.py
+++ b/posthog/hogql_queries/experiments/funnels_statistics_v2.py
@@ -21,28 +21,41 @@ def calculate_probabilities_v2(
for funnel conversion rates.
This function computes the probability that each variant is the best (i.e., has the highest
- conversion rate) compared to all other variants, including the control. It uses samples
- drawn from the posterior Beta distributions of each variant's conversion rate.
+ conversion rate) compared to all other variants, including the control. It uses a Beta
+ distribution as the "conjugate prior" for binomial (success/failure) data, and starts with
+ Beta(1,1) as a minimally informative prior distribution. The "conjugate prior" means that
+ the prior and posterior distributions are the same family, and the posterior is easy
+ to compute.
Parameters:
-----------
control : ExperimentVariantFunnelsBaseStats
- Statistics for the control group, including success and failure counts
+ Statistics for the control group, containing success_count and failure_count
variants : list[ExperimentVariantFunnelsBaseStats]
List of statistics for test variants to compare against the control
Returns:
--------
list[float]
- A list of probabilities where:
+ A list of probabilities that sum to 1, where:
- The first element is the probability that the control variant is the best
- Subsequent elements are the probabilities that each test variant is the best
Notes:
------
- - Uses a Bayesian approach with Beta distributions as the posterior
- - Uses Beta(1,1) as the prior, which is uniform over [0,1]
- - Draws 10,000 samples from each variant's posterior distribution
+ - Uses a Bayesian approach with Beta distributions as conjugate prior for binomial data
+ - Uses Beta(1,1) as minimally informative prior (uniform over [0,1])
+ - Draws SAMPLE_SIZE (10,000) samples from each variant's posterior distribution
+ - Calculates win probability as frequency of samples where variant is maximum
+
+ Example:
+ --------
+ >>> from posthog.schema import ExperimentVariantFunnelsBaseStats
+ >>> from posthog.hogql_queries.experiments.funnels_statistics_v2 import calculate_probabilities_v2
+ >>> control = ExperimentVariantFunnelsBaseStats(key="control", success_count=100, failure_count=900)
+ >>> test = ExperimentVariantFunnelsBaseStats(key="test", success_count=150, failure_count=850)
+ >>> calculate_probabilities_v2(control, [test])
+ >>> # Returns: [0.001, 0.999] indicating the test variant is very likely to be best
"""
all_variants = [control, *variants]
@@ -179,27 +192,40 @@ def calculate_credible_intervals_v2(variants: list[ExperimentVariantFunnelsBaseS
Calculate Bayesian credible intervals for conversion rates of each variant.
This function computes the 95% credible intervals for the true conversion rate
- of each variant, representing the range where we believe the true rate lies
- with 95% probability.
+ of each variant using a Beta model. The interval represents the range where we
+ believe the true conversion rate lies with 95% probability.
Parameters:
-----------
variants : list[ExperimentVariantFunnelsBaseStats]
- List of all variants including control, containing success and failure counts
+ List of all variants (including control), each containing success_count and failure_count
Returns:
--------
dict[str, list[float]]
Dictionary mapping variant keys to [lower, upper] credible intervals, where:
- - lower is the 2.5th percentile of the posterior distribution
- - upper is the 97.5th percentile of the posterior distribution
+ - lower is the 2.5th percentile of the Beta posterior distribution
+ - upper is the 97.5th percentile of the Beta posterior distribution
+ - intervals represent conversion rates between 0 and 1
Notes:
------
- - Uses Beta distribution as the posterior
- - Uses Beta(1,1) as the prior, which is uniform over [0,1]
- - Returns 95% credible intervals
- - Intervals become narrower with larger sample sizes
+ - Uses Beta distribution as conjugate prior for binomial data
+ - Uses Beta(1,1) as minimally informative prior (uniform over [0,1])
+ - Computes 95% credible intervals (2.5th to 97.5th percentiles)
+ - Intervals become narrower with more data (larger success_count + failure_count)
+ - Returns empty dict if any calculations fail
+
+ Example:
+ --------
+ >>> from posthog.schema import ExperimentVariantFunnelsBaseStats
+ >>> from posthog.hogql_queries.experiments.funnels_statistics_v2 import calculate_credible_intervals_v2
+ >>> variants = [
+ ... ExperimentVariantFunnelsBaseStats(key="control", success_count=100, failure_count=900),
+ ... ExperimentVariantFunnelsBaseStats(key="test", success_count=150, failure_count=850)
+ ... ]
+ >>> calculate_credible_intervals_v2(variants)
+ >>> # Returns: {"control": [0.083, 0.120], "test": [0.129, 0.173]}
"""
intervals = {}
diff --git a/posthog/hogql_queries/experiments/test/test_experiment_funnels_query_runner.py b/posthog/hogql_queries/experiments/test/test_experiment_funnels_query_runner.py
index 3dc11499b1393..1da3e194bb838 100644
--- a/posthog/hogql_queries/experiments/test/test_experiment_funnels_query_runner.py
+++ b/posthog/hogql_queries/experiments/test/test_experiment_funnels_query_runner.py
@@ -141,6 +141,8 @@ def test_query_runner(self):
def test_query_runner_v2(self):
feature_flag = self.create_feature_flag()
experiment = self.create_experiment(feature_flag=feature_flag)
+ experiment.stats_config = {"version": 2}
+ experiment.save()
feature_flag_property = f"$feature/{feature_flag.key}"
@@ -152,7 +154,6 @@ def test_query_runner_v2(self):
experiment_id=experiment.id,
kind="ExperimentFunnelsQuery",
funnels_query=funnels_query,
- stats_version=2,
)
experiment.metrics = [{"type": "primary", "query": experiment_query.model_dump()}]
diff --git a/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py b/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py
index 0d3b37eeedc77..58aef9b8c05c7 100644
--- a/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py
+++ b/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py
@@ -1287,6 +1287,8 @@ def test_query_runner_with_avg_math(self):
def test_query_runner_with_avg_math_v2_stats(self):
feature_flag = self.create_feature_flag()
experiment = self.create_experiment(feature_flag=feature_flag)
+ experiment.stats_config = {"version": 2}
+ experiment.save()
feature_flag_property = f"$feature/{feature_flag.key}"
@@ -1302,7 +1304,6 @@ def test_query_runner_with_avg_math_v2_stats(self):
kind="ExperimentTrendsQuery",
count_query=count_query,
exposure_query=exposure_query,
- stats_version=2,
)
experiment.metrics = [{"type": "primary", "query": experiment_query.model_dump()}]
@@ -1522,6 +1523,8 @@ def test_query_runner_standard_flow(self):
def test_query_runner_standard_flow_v2_stats(self):
feature_flag = self.create_feature_flag()
experiment = self.create_experiment(feature_flag=feature_flag)
+ experiment.stats_config = {"version": 2}
+ experiment.save()
ff_property = f"$feature/{feature_flag.key}"
count_query = TrendsQuery(series=[EventsNode(event="$pageview")])
@@ -1532,7 +1535,6 @@ def test_query_runner_standard_flow_v2_stats(self):
kind="ExperimentTrendsQuery",
count_query=count_query,
exposure_query=exposure_query,
- stats_version=2,
)
experiment.metrics = [{"type": "primary", "query": experiment_query.model_dump()}]
@@ -1658,6 +1660,7 @@ def test_validate_event_variants_no_events(self):
expected_errors = json.dumps(
{
+ ExperimentNoResultsErrorKeys.NO_EXPOSURES: True,
ExperimentNoResultsErrorKeys.NO_EVENTS: True,
ExperimentNoResultsErrorKeys.NO_FLAG_INFO: True,
ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT: True,
@@ -1696,6 +1699,7 @@ def test_validate_event_variants_no_control(self):
expected_errors = json.dumps(
{
+ ExperimentNoResultsErrorKeys.NO_EXPOSURES: True,
ExperimentNoResultsErrorKeys.NO_EVENTS: False,
ExperimentNoResultsErrorKeys.NO_FLAG_INFO: False,
ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT: True,
@@ -1713,6 +1717,14 @@ def test_validate_event_variants_no_test(self):
journeys_for(
{
"user_control": [
+ {
+ "event": "$feature_flag_called",
+ "timestamp": "2020-01-02",
+ "properties": {
+ "$feature_flag_response": "control",
+ "$feature_flag": feature_flag.key,
+ },
+ },
{"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "control"}},
],
},
@@ -1734,6 +1746,7 @@ def test_validate_event_variants_no_test(self):
expected_errors = json.dumps(
{
+ ExperimentNoResultsErrorKeys.NO_EXPOSURES: False,
ExperimentNoResultsErrorKeys.NO_EVENTS: False,
ExperimentNoResultsErrorKeys.NO_FLAG_INFO: False,
ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT: False,
@@ -1750,9 +1763,25 @@ def test_validate_event_variants_no_flag_info(self):
journeys_for(
{
"user_no_flag_1": [
+ {
+ "event": "$feature_flag_called",
+ "timestamp": "2020-01-02",
+ "properties": {
+ "$feature_flag": feature_flag.key,
+ "$feature_flag_response": "control",
+ },
+ },
{"event": "$pageview", "timestamp": "2020-01-02"},
],
"user_no_flag_2": [
+ {
+ "event": "$feature_flag_called",
+ "timestamp": "2020-01-02",
+ "properties": {
+ "$feature_flag": feature_flag.key,
+ "$feature_flag_response": "control",
+ },
+ },
{"event": "$pageview", "timestamp": "2020-01-03"},
],
},
@@ -1774,6 +1803,7 @@ def test_validate_event_variants_no_flag_info(self):
expected_errors = json.dumps(
{
+ ExperimentNoResultsErrorKeys.NO_EXPOSURES: False,
ExperimentNoResultsErrorKeys.NO_EVENTS: True,
ExperimentNoResultsErrorKeys.NO_FLAG_INFO: True,
ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT: True,
@@ -1781,3 +1811,55 @@ def test_validate_event_variants_no_flag_info(self):
}
)
self.assertEqual(cast(list, context.exception.detail)[0], expected_errors)
+
+ @freeze_time("2020-01-01T12:00:00Z")
+ def test_validate_event_variants_no_exposure(self):
+ feature_flag = self.create_feature_flag()
+ experiment = self.create_experiment(feature_flag=feature_flag)
+
+ ff_property = f"$feature/{feature_flag.key}"
+
+ journeys_for(
+ {
+ "user_control": [
+ {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "control"}},
+ ],
+ "user_test": [
+ {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "test"}},
+ ],
+ },
+ self.team,
+ )
+
+ flush_persons_and_events()
+
+ count_query = TrendsQuery(series=[EventsNode(event="$pageview")])
+ exposure_query = TrendsQuery(series=[EventsNode(event="$feature_flag_called")])
+
+ count_query = TrendsQuery(series=[EventsNode(event="$pageview")])
+ exposure_query = TrendsQuery(series=[EventsNode(event="$feature_flag_called")])
+
+ experiment_query = ExperimentTrendsQuery(
+ experiment_id=experiment.id,
+ kind="ExperimentTrendsQuery",
+ count_query=count_query,
+ exposure_query=exposure_query,
+ )
+
+ experiment.metrics = [{"type": "primary", "query": experiment_query.model_dump()}]
+ experiment.save()
+
+ query_runner = ExperimentTrendsQueryRunner(query=experiment_query, team=self.team)
+ with self.assertRaises(ValidationError) as context:
+ query_runner.calculate()
+
+ expected_errors = json.dumps(
+ {
+ ExperimentNoResultsErrorKeys.NO_EXPOSURES: True,
+ ExperimentNoResultsErrorKeys.NO_EVENTS: False,
+ ExperimentNoResultsErrorKeys.NO_FLAG_INFO: False,
+ ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT: False,
+ ExperimentNoResultsErrorKeys.NO_TEST_VARIANT: False,
+ }
+ )
+ self.assertEqual(cast(list, context.exception.detail)[0], expected_errors)
diff --git a/posthog/hogql_queries/experiments/test/test_funnels_statistics.py b/posthog/hogql_queries/experiments/test/test_funnels_statistics.py
index 2206ff92b9305..bd70abf456162 100644
--- a/posthog/hogql_queries/experiments/test/test_funnels_statistics.py
+++ b/posthog/hogql_queries/experiments/test/test_funnels_statistics.py
@@ -11,6 +11,7 @@
calculate_credible_intervals,
)
from posthog.test.base import APIBaseTest
+from flaky import flaky
def create_variant(
@@ -45,6 +46,7 @@ def run_test_for_both_implementations(self, test_fn):
calculate_credible_intervals=calculate_credible_intervals_v2,
)
+ @flaky(max_runs=5, min_passes=1)
def test_small_sample_two_variants_not_significant(self):
"""Test with small sample size, two variants, no clear winner"""
@@ -58,16 +60,16 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.assertEqual(len(probabilities), 2)
if stats_version == 2:
- self.assertAlmostEqual(probabilities[0], 0.15, delta=0.1)
- self.assertAlmostEqual(probabilities[1], 0.85, delta=0.1)
+ self.assertAlmostEqual(probabilities[0], 0.149, delta=0.05)
+ self.assertAlmostEqual(probabilities[1], 0.850, delta=0.05)
self.assertEqual(significance, ExperimentSignificanceCode.LOW_WIN_PROBABILITY)
self.assertEqual(p_value, 1)
# Check credible intervals
- self.assertAlmostEqual(intervals["control"][0], 0.05, delta=0.05)
- self.assertAlmostEqual(intervals["control"][1], 0.20, delta=0.05)
- self.assertAlmostEqual(intervals["test"][0], 0.08, delta=0.05)
- self.assertAlmostEqual(intervals["test"][1], 0.25, delta=0.05)
+ self.assertAlmostEqual(intervals["control"][0], 0.055, places=2)
+ self.assertAlmostEqual(intervals["control"][1], 0.174, places=2)
+ self.assertAlmostEqual(intervals["test"][0], 0.093, places=2)
+ self.assertAlmostEqual(intervals["test"][1], 0.233, places=2)
else:
# Original implementation behavior
self.assertTrue(0.1 < probabilities[0] < 0.5)
@@ -76,13 +78,14 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.assertEqual(p_value, 1)
# Original implementation intervals
- self.assertAlmostEqual(intervals["control"][0], 0.05, delta=0.05)
- self.assertAlmostEqual(intervals["control"][1], 0.20, delta=0.05)
- self.assertAlmostEqual(intervals["test"][0], 0.08, delta=0.05)
- self.assertAlmostEqual(intervals["test"][1], 0.25, delta=0.05)
+ self.assertAlmostEqual(intervals["control"][0], 0.055, places=2)
+ self.assertAlmostEqual(intervals["control"][1], 0.174, places=2)
+ self.assertAlmostEqual(intervals["test"][0], 0.093, places=2)
+ self.assertAlmostEqual(intervals["test"][1], 0.233, places=2)
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_large_sample_two_variants_significant(self):
"""Test with large sample size, two variants, clear winner"""
@@ -102,10 +105,10 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.assertEqual(p_value, 0)
# Check credible intervals
- self.assertAlmostEqual(intervals["control"][0], 0.095, delta=0.01)
- self.assertAlmostEqual(intervals["control"][1], 0.105, delta=0.01)
- self.assertAlmostEqual(intervals["test"][0], 0.145, delta=0.01)
- self.assertAlmostEqual(intervals["test"][1], 0.155, delta=0.01)
+ self.assertAlmostEqual(intervals["control"][0], 0.095, places=2)
+ self.assertAlmostEqual(intervals["control"][1], 0.105, places=2)
+ self.assertAlmostEqual(intervals["test"][0], 0.145, places=2)
+ self.assertAlmostEqual(intervals["test"][1], 0.155, places=2)
else:
# Original implementation behavior
self.assertTrue(probabilities[1] > 0.5) # Test variant winning
@@ -114,13 +117,14 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.assertLess(p_value, 0.05)
# Original implementation intervals
- self.assertAlmostEqual(intervals["control"][0], 0.095, delta=0.01)
- self.assertAlmostEqual(intervals["control"][1], 0.105, delta=0.01)
- self.assertAlmostEqual(intervals["test"][0], 0.145, delta=0.01)
- self.assertAlmostEqual(intervals["test"][1], 0.155, delta=0.01)
+ self.assertAlmostEqual(intervals["control"][0], 0.095, places=2)
+ self.assertAlmostEqual(intervals["control"][1], 0.105, places=2)
+ self.assertAlmostEqual(intervals["test"][0], 0.145, places=2)
+ self.assertAlmostEqual(intervals["test"][1], 0.155, places=2)
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_many_variants_not_significant(self):
"""Test with multiple variants, no clear winner"""
@@ -142,14 +146,14 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
# Check credible intervals overlap
# Check credible intervals for control and all test variants
- self.assertAlmostEqual(intervals["control"][0], 0.09, delta=0.02)
- self.assertAlmostEqual(intervals["control"][1], 0.12, delta=0.02)
- self.assertAlmostEqual(intervals["test_a"][0], 0.09, delta=0.02)
- self.assertAlmostEqual(intervals["test_a"][1], 0.12, delta=0.02)
- self.assertAlmostEqual(intervals["test_b"][0], 0.09, delta=0.02)
- self.assertAlmostEqual(intervals["test_b"][1], 0.12, delta=0.02)
- self.assertAlmostEqual(intervals["test_c"][0], 0.09, delta=0.02)
- self.assertAlmostEqual(intervals["test_c"][1], 0.12, delta=0.02)
+ self.assertAlmostEqual(intervals["control"][0], 0.0829, places=2)
+ self.assertAlmostEqual(intervals["control"][1], 0.12, places=2)
+ self.assertAlmostEqual(intervals["test_a"][0], 0.0829, places=2)
+ self.assertAlmostEqual(intervals["test_a"][1], 0.12, places=2)
+ self.assertAlmostEqual(intervals["test_b"][0], 0.0829, places=2)
+ self.assertAlmostEqual(intervals["test_b"][1], 0.12, places=2)
+ self.assertAlmostEqual(intervals["test_c"][0], 0.0829, places=2)
+ self.assertAlmostEqual(intervals["test_c"][1], 0.12, places=2)
else:
# Original implementation behavior
self.assertTrue(all(0.1 < p < 0.9 for p in probabilities))
@@ -158,17 +162,18 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
# Check credible intervals overlap
# Check credible intervals for control and all test variants
- self.assertAlmostEqual(intervals["control"][0], 0.09, delta=0.02)
- self.assertAlmostEqual(intervals["control"][1], 0.12, delta=0.02)
- self.assertAlmostEqual(intervals["test_a"][0], 0.09, delta=0.02)
- self.assertAlmostEqual(intervals["test_a"][1], 0.12, delta=0.02)
- self.assertAlmostEqual(intervals["test_b"][0], 0.09, delta=0.02)
- self.assertAlmostEqual(intervals["test_b"][1], 0.12, delta=0.02)
- self.assertAlmostEqual(intervals["test_c"][0], 0.09, delta=0.02)
- self.assertAlmostEqual(intervals["test_c"][1], 0.12, delta=0.02)
+ self.assertAlmostEqual(intervals["control"][0], 0.081, places=2)
+ self.assertAlmostEqual(intervals["control"][1], 0.12, places=2)
+ self.assertAlmostEqual(intervals["test_a"][0], 0.081, places=2)
+ self.assertAlmostEqual(intervals["test_a"][1], 0.12, places=2)
+ self.assertAlmostEqual(intervals["test_b"][0], 0.081, places=2)
+ self.assertAlmostEqual(intervals["test_b"][1], 0.12, places=2)
+ self.assertAlmostEqual(intervals["test_c"][0], 0.081, places=2)
+ self.assertAlmostEqual(intervals["test_c"][1], 0.12, places=2)
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_insufficient_sample_size(self):
"""Test with sample size below threshold"""
@@ -199,6 +204,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_expected_loss_minimal_difference(self):
"""Test expected loss when variants have very similar performance"""
@@ -222,6 +228,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_expected_loss_test_variant_clear_winner(self):
"""Test expected loss when one variant is clearly better"""
diff --git a/posthog/hogql_queries/experiments/test/test_trends_statistics_continuous.py b/posthog/hogql_queries/experiments/test/test_trends_statistics_continuous.py
index a2b3c0a54fa7e..d1185abd73c80 100644
--- a/posthog/hogql_queries/experiments/test/test_trends_statistics_continuous.py
+++ b/posthog/hogql_queries/experiments/test/test_trends_statistics_continuous.py
@@ -11,6 +11,7 @@
calculate_credible_intervals,
)
from posthog.test.base import APIBaseTest
+from flaky import flaky
def create_variant(key: str, mean: float, exposure: float, absolute_exposure: int) -> ExperimentVariantTrendsBaseStats:
@@ -38,6 +39,7 @@ def run_test_for_both_implementations(self, test_fn):
calculate_credible_intervals=calculate_credible_intervals_v2_continuous,
)
+ @flaky(max_runs=5, min_passes=1)
def test_small_sample_two_variants_not_significant(self):
"""Test with small sample size, two variants, no clear winner"""
@@ -85,6 +87,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_large_sample_two_variants_significant(self):
"""Test with large sample size, two variants, clear winner"""
@@ -134,6 +137,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_large_sample_two_variants_strongly_significant(self):
"""Test with large sample size, two variants, very clear winner"""
@@ -179,6 +183,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_many_variants_not_significant(self):
"""Test with multiple variants, no clear winner"""
@@ -258,6 +263,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_many_variants_significant(self):
"""Test with multiple variants, one clear winner"""
@@ -327,6 +333,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_insufficient_sample_size(self):
"""Test with sample size below threshold"""
@@ -373,6 +380,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_edge_cases_zero_means(self):
"""Test edge cases like zero means"""
@@ -420,6 +428,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_edge_cases_near_zero_means(self):
"""Test edge cases like near-zero means"""
@@ -475,6 +484,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_expected_loss_minimal_difference(self):
"""Test expected loss when variants have very similar performance"""
@@ -504,6 +514,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_expected_loss_test_variant_clear_winner(self):
"""Test expected loss when one variant is clearly better"""
diff --git a/posthog/hogql_queries/experiments/test/test_trends_statistics_count.py b/posthog/hogql_queries/experiments/test/test_trends_statistics_count.py
index ee5cf1502492f..67c569407c849 100644
--- a/posthog/hogql_queries/experiments/test/test_trends_statistics_count.py
+++ b/posthog/hogql_queries/experiments/test/test_trends_statistics_count.py
@@ -11,6 +11,7 @@
calculate_credible_intervals,
)
from posthog.test.base import APIBaseTest
+from flaky import flaky
def create_variant(key: str, count: int, exposure: float, absolute_exposure: int) -> ExperimentVariantTrendsBaseStats:
@@ -48,6 +49,7 @@ def run_test_for_both_implementations(self, test_fn):
calculate_credible_intervals=calculate_credible_intervals_v2_count,
)
+ @flaky(max_runs=5, min_passes=1)
def test_small_sample_two_variants_not_significant(self):
"""Test with small sample size, two variants, no clear winner"""
@@ -82,6 +84,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_large_sample_two_variants_significant(self):
"""Test with large sample size, two variants, clear winner"""
@@ -119,6 +122,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_large_sample_two_variants_strongly_significant(self):
"""Test with large sample size, two variants, very clear winner"""
@@ -156,6 +160,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_many_variants_not_significant(self):
"""Test with multiple variants, no clear winner"""
@@ -208,6 +213,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_many_variants_significant(self):
"""Test with multiple variants, one clear winner"""
@@ -268,6 +274,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_real_world_data_1(self):
"""Test with multiple variants, one clear winner"""
@@ -286,24 +293,30 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
significance, p_value = are_results_significant(control, [test], probabilities)
intervals = calculate_credible_intervals([control, test])
self.assertEqual(len(probabilities), 2)
- self.assertAlmostEqual(probabilities[1], 0.966, places=2) # test should be winning
- self.assertAlmostEqual(probabilities[0], 0.034, places=2) # control should be losing
if stats_version == 2:
+ self.assertAlmostEqual(probabilities[1], 0.966, delta=0.05)
+ self.assertAlmostEqual(probabilities[0], 0.034, delta=0.05)
self.assertEqual(significance, ExperimentSignificanceCode.SIGNIFICANT)
self.assertLess(p_value, 0.01)
self.assertGreater(p_value, 0.0)
+ self.assertAlmostEqual(intervals["control"][0], 0.094, places=2)
+ self.assertAlmostEqual(intervals["control"][1], 0.116, places=2)
+ self.assertAlmostEqual(intervals["test"][0], 0.107, places=2)
+ self.assertAlmostEqual(intervals["test"][1], 0.134, places=2)
else:
+ self.assertAlmostEqual(probabilities[1], 0.966, delta=0.05)
+ self.assertAlmostEqual(probabilities[0], 0.034, delta=0.05)
self.assertEqual(significance, ExperimentSignificanceCode.HIGH_P_VALUE)
- self.assertAlmostEqual(p_value, 0.07, delta=0.01)
+ self.assertAlmostEqual(p_value, 0.07, places=2)
- self.assertAlmostEqual(intervals["control"][0], 0.094, delta=0.01)
- self.assertAlmostEqual(intervals["control"][1], 0.116, delta=0.01)
-
- self.assertAlmostEqual(intervals["test"][0], 0.107, delta=0.01)
- self.assertAlmostEqual(intervals["test"][1], 0.129, delta=0.01)
+ self.assertAlmostEqual(intervals["control"][0], 0.094, places=2)
+ self.assertAlmostEqual(intervals["control"][1], 0.116, places=2)
+ self.assertAlmostEqual(intervals["test"][0], 0.107, places=2)
+ self.assertAlmostEqual(intervals["test"][1], 0.134, places=2)
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_insufficient_sample_size(self):
"""Test with sample size below threshold"""
@@ -341,6 +354,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_edge_cases(self):
"""Test edge cases like zero counts"""
@@ -374,6 +388,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_expected_loss_minimal_difference(self):
"""Test expected loss when variants have very similar performance"""
@@ -403,6 +418,7 @@ def run_test(stats_version, calculate_probabilities, are_results_significant, ca
self.run_test_for_both_implementations(run_test)
+ @flaky(max_runs=5, min_passes=1)
def test_expected_loss_test_variant_clear_winner(self):
"""Test expected loss when one variant is clearly better"""
diff --git a/posthog/hogql_queries/experiments/trends_statistics_v2_continuous.py b/posthog/hogql_queries/experiments/trends_statistics_v2_continuous.py
index c0894302143ec..e03a0bb3fcecf 100644
--- a/posthog/hogql_queries/experiments/trends_statistics_v2_continuous.py
+++ b/posthog/hogql_queries/experiments/trends_statistics_v2_continuous.py
@@ -26,31 +26,56 @@ def calculate_probabilities_v2_continuous(
) -> list[float]:
"""
Calculate the win probabilities for each variant in an experiment using Bayesian analysis
- for continuous metrics (e.g., revenue).
+ for continuous metrics (e.g., revenue) with log-normal distribution assumptions.
- This function computes the probability that each variant is the best (i.e., has the highest
- mean value) compared to all other variants, including the control. It uses samples
- drawn from the posterior distributions of each variant's mean.
+ This function computes the probability that each variant is the best by comparing its
+ posterior distribution against all other variants. It uses a Normal-Inverse-Gamma prior
+ and performs analysis in log-space to handle right-skewed distributions typical of
+ metrics like revenue.
Parameters:
-----------
control_variant : ExperimentVariantTrendsBaseStats
- Statistics for the control group, including mean value and exposure (number of users)
+ Statistics for the control group, containing the mean value (in count field)
+ and exposure (number of users)
test_variants : list[ExperimentVariantTrendsBaseStats]
List of statistics for test variants to compare against the control
Returns:
--------
list[float]
- A list of probabilities where:
- - The first element is the probability that the control variant is the best
- - Subsequent elements are the probabilities that each test variant is the best
+ A list of probabilities where each element represents the probability that the
+ corresponding variant is the best (has highest mean value) among all variants:
+ - index 0: probability control variant is best
+ - index i>0: probability test variant i-1 is best
+ All probabilities sum to 1.0
Notes:
------
- - Uses a Bayesian approach with a t-distribution as the posterior
- - Assumes a Normal-Inverse-Gamma prior
- - Log-transforms the data to handle typical revenue distributions
+ - Uses log-transformation of data to handle right-skewed distributions
+ - Employs a Normal-Inverse-Gamma prior with parameters:
+ MU_0=0.0, KAPPA_0=1.0, ALPHA_0=1.0, BETA_0=1.0
+ - Assumes constant variance in log-space (LOG_VARIANCE=0.75)
+ - Draws SAMPLE_SIZE=10000 samples from each posterior for probability estimation
+
+ Example:
+ --------
+ >>> from posthog.schema import ExperimentVariantTrendsBaseStats
+ >>> from posthog.hogql_queries.experiments.trends_statistics_v2_continuous import calculate_probabilities_v2_continuous
+ >>> control = ExperimentVariantTrendsBaseStats(
+ ... key="control",
+ ... count=50, # mean revenue per user
+ ... exposure=1.0, # exposure relative to control
+ ... absolute_exposure=500 # number of users
+ ... )
+ >>> test = ExperimentVariantTrendsBaseStats(
+ ... key="test",
+ ... count=60, # mean revenue per user
+ ... exposure=1, # exposure relative to control
+ ... absolute_exposure=500 # number of users
+ ... )
+ >>> calculate_probabilities_v2_continuous(control, [test])
+ >>> # Returns: [0.0004, 0.9996] indicating the test variant is very likely to be best
"""
if len(test_variants) >= 10:
raise ValidationError("Can't calculate experiment results for more than 10 variants", code="too_much_data")
@@ -161,12 +186,19 @@ def are_results_significant_v2_continuous(
def calculate_credible_intervals_v2_continuous(variants, lower_bound=0.025, upper_bound=0.975):
"""
- Calculate Bayesian credible intervals for each variant's mean value.
+ Calculate Bayesian credible intervals for each variant's mean value using a log-normal model.
+
+ This function computes credible intervals in log-space using a t-distribution posterior
+ derived from a Normal-Inverse-Gamma prior, then transforms the results back to the original
+ scale. This approach is particularly suitable for right-skewed metrics like revenue.
Parameters:
-----------
variants : list[ExperimentVariantTrendsBaseStats]
- List of variants containing mean values and exposure data
+ List of variants where each variant contains:
+ - count: the mean value of the metric
+ - absolute_exposure: number of users/observations
+ - key: identifier for the variant
lower_bound : float, optional (default=0.025)
Lower percentile for the credible interval (2.5% for 95% CI)
upper_bound : float, optional (default=0.975)
@@ -175,7 +207,44 @@ def calculate_credible_intervals_v2_continuous(variants, lower_bound=0.025, uppe
Returns:
--------
dict[str, tuple[float, float]]
- Dictionary mapping variant keys to their credible intervals
+ Dictionary mapping variant keys to their credible intervals where:
+ - Key: variant identifier
+ - Value: tuple of (lower_bound, upper_bound) in original scale
+ Returns empty dict if any calculation errors occur
+
+ Notes:
+ ------
+ - Uses log-transformation to handle right-skewed distributions
+ - Employs Normal-Inverse-Gamma prior with parameters:
+ MU_0=0.0, KAPPA_0=1.0, ALPHA_0=1.0, BETA_0=1.0
+ - Assumes constant variance in log-space (LOG_VARIANCE=0.75)
+ - Results are transformed back to original scale and guaranteed non-negative
+ - Handles potential calculation errors gracefully by returning empty dict
+
+ Example:
+ --------
+ >>> from posthog.schema import ExperimentVariantTrendsBaseStats
+ >>> from posthog.hogql_queries.experiments.trends_statistics_v2_continuous import calculate_credible_intervals_v2_continuous
+ >>> variants = [
+ ... ExperimentVariantTrendsBaseStats(
+ ... key="control",
+ ... count=50.0, # mean revenue per user
+ ... exposure=1.0, # exposure relative to control
+ ... absolute_exposure=500 # number of users
+ ... ),
+ ... ExperimentVariantTrendsBaseStats(
+ ... key="test",
+ ... count=60.0, # mean revenue per user
+ ... exposure=1, # exposure relative to control
+ ... absolute_exposure=500 # number of users
+ ... )
+ ... ]
+ >>> calculate_credible_intervals_v2_continuous(variants)
+ >>> # Returns something like:
+ >>> # {
+ >>> # 'control': (45.98, 53.53), # 95% confident true mean is between $45.98-$53.53
+ >>> # 'test': (55.15, 64.22) # 95% confident true mean is between $55.15-$64.22
+ >>> # }
"""
intervals = {}
diff --git a/posthog/hogql_queries/experiments/trends_statistics_v2_count.py b/posthog/hogql_queries/experiments/trends_statistics_v2_count.py
index 208747a14c1a1..38bf41ac0ca1d 100644
--- a/posthog/hogql_queries/experiments/trends_statistics_v2_count.py
+++ b/posthog/hogql_queries/experiments/trends_statistics_v2_count.py
@@ -23,35 +23,38 @@ def calculate_probabilities_v2_count(
Calculate the win probabilities for each variant in an experiment using Bayesian analysis.
This function computes the probability that each variant is the best (i.e., has the highest
- conversion rate) compared to all other variants, including the control. It uses samples
- drawn from the posterior distributions of each variant's conversion rate.
+ rate) compared to all other variants, including the control. It uses a Gamma-Poisson model
+ where samples are drawn from the posterior Gamma distributions of each variant's rate.
Parameters:
-----------
control_variant : ExperimentVariantTrendsBaseStats
- Statistics for the control group, including count (successes) and exposure (total trials)
+ Statistics for the control group, including count (events) and absolute_exposure
test_variants : list[ExperimentVariantTrendsBaseStats]
List of statistics for test variants to compare against the control
Returns:
--------
list[float]
- A list of probabilities where:
+ A list of probabilities that sum to 1, where:
- The first element is the probability that the control variant is the best
- Subsequent elements are the probabilities that each test variant is the best
Notes:
------
- - Uses a Bayesian approach with a Beta distribution as the posterior
- - Assumes a minimally informative prior (alpha=1, beta=1)
+ - Uses a Bayesian approach with a Gamma distribution as the posterior
+ - Assumes a minimally informative Gamma prior (alpha=1, beta=1)
- Draws samples from the posterior to estimate win probabilities
+ - Suitable for count/rate data following a Poisson distribution
Example:
--------
- >>> control = ExperimentVariantTrendsBaseStats(key="control", count=100, exposure=1000, absolute_exposure=1000)
- >>> test = ExperimentVariantTrendsBaseStats(key="test", count=120, exposure=1000, absolute_exposure=1000)
- >>> probabilities = calculate_probabilities_v2(control, [test])
- >>> # Returns: [0.085, 0.915] indicating the test variant is more likely to be the best
+ >>> from posthog.schema import ExperimentVariantTrendsBaseStats
+ >>> from posthog.hogql_queries.experiments.trends_statistics_v2_count import calculate_probabilities_v2_count
+ >>> control = ExperimentVariantTrendsBaseStats(key="control", count=100, exposure=1, absolute_exposure=1000)
+ >>> test = ExperimentVariantTrendsBaseStats(key="test", count=120, exposure=1, absolute_exposure=1000)
+ >>> calculate_probabilities_v2_count(control, [test])
+ >>> # Returns: [0.088, 0.920] indicating the test variant is more likely to be the best
"""
if len(test_variants) >= 10:
raise ValidationError("Can't calculate experiment results for more than 10 variants", code="too_much_data")
@@ -96,28 +99,38 @@ def are_results_significant_v2_count(
probabilities: list[Probability],
) -> tuple[ExperimentSignificanceCode, Probability]:
"""
- Determines if experiment results are statistically significant using Bayesian analysis.
+ Determines if experiment results are statistically significant.
- This function evaluates the win probabilities of each variant to determine if any variant
- is significantly better than the others. The method:
- 1. Checks if sample sizes meet minimum threshold requirements
- 2. Evaluates win probabilities from the posterior distributions
- 3. Calculates expected loss for the winning variant
+ This function evaluates whether any variant can be confidently declared as best by:
+ 1. Checking if variants have sufficient exposure (minimum threshold)
+ 2. Evaluating if the highest win probability exceeds the significance threshold
+ 3. For the variant with highest rate, calculating expected loss compared to alternatives
Parameters:
-----------
control_variant : ExperimentVariantTrendsBaseStats
- Statistics for the control group, including count and exposure data
+ Statistics for the control group, including count and absolute_exposure
test_variants : list[ExperimentVariantTrendsBaseStats]
List of statistics for test variants to compare against control
probabilities : list[Probability]
- List of win probabilities for each variant, as calculated by calculate_probabilities
+ Win probabilities for each variant (must sum to 1), as calculated by calculate_probabilities_v2_count
Returns:
--------
tuple[ExperimentSignificanceCode, Probability]
- - ExperimentSignificanceCode indicating the significance status
- - Expected loss value for significant results, 1.0 for non-significant results
+ - ExperimentSignificanceCode indicating result status:
+ * NOT_ENOUGH_EXPOSURE: if any variant has exposure below threshold
+ * LOW_WIN_PROBABILITY: if no variant exceeds probability threshold
+ * HIGH_LOSS: if expected loss is too high for best variant
+ * SIGNIFICANT: if a variant is confidently best
+ - Expected loss value (between 0 and 1) for significant results, 1.0 for non-significant results
+
+ Notes:
+ ------
+ - Uses FF_DISTRIBUTION_THRESHOLD for minimum exposure check
+ - Uses MIN_PROBABILITY_FOR_SIGNIFICANCE (default 0.9) for win probability threshold
+ - Uses EXPECTED_LOSS_SIGNIFICANCE_LEVEL for maximum acceptable expected loss
+ - Expected loss represents the expected rate difference between chosen variant and potential better alternatives
"""
# Check exposure thresholds
for variant in test_variants:
@@ -151,17 +164,16 @@ def are_results_significant_v2_count(
def calculate_credible_intervals_v2_count(variants, lower_bound=0.025, upper_bound=0.975):
"""
- Calculate Bayesian credible intervals for each variant's conversion rate.
+ Calculate Bayesian credible intervals for each variant's rate using a Gamma-Poisson model.
- Credible intervals represent the range where we believe the true conversion rate lies
- with a specified probability (default 95%). Unlike frequentist confidence intervals,
- these have a direct probabilistic interpretation: "There is a 95% probability that
- the true conversion rate lies within this interval."
+ Credible intervals represent the range where we believe the true rate lies
+ with a specified probability (default 95%). These intervals have a direct probabilistic
+ interpretation: "There is a 95% probability that the true rate lies within this interval."
Parameters:
-----------
variants : list[ExperimentVariantTrendsBaseStats]
- List of variants containing count (successes) and exposure (total trials) data
+ List of variants containing count (number of events) and absolute_exposure data
lower_bound : float, optional (default=0.025)
Lower percentile for the credible interval (2.5% for 95% CI)
upper_bound : float, optional (default=0.975)
@@ -171,22 +183,25 @@ def calculate_credible_intervals_v2_count(variants, lower_bound=0.025, upper_bou
--------
dict[str, tuple[float, float]]
Dictionary mapping variant keys to their credible intervals
- Each interval is a tuple of (lower_bound, upper_bound)
+ Each interval is a tuple of (lower_bound, upper_bound) representing rates
Notes:
------
- - Uses a Gamma distribution as the posterior distribution
- - Assumes a minimally informative prior (alpha=1, beta=1)
- - Intervals are calculated for visualization purposes, not for significance testing
+ - Uses a Gamma distribution as the posterior for the rate parameter
+ - Assumes a minimally informative Gamma prior (alpha=1, beta=1)
+ - Suitable for count/rate data following a Poisson distribution
- Returns empty dict if any calculations fail
+ - Intervals represent rates (events per exposure)
Example:
--------
+ >>> from posthog.schema import ExperimentVariantTrendsBaseStats
+ >>> from posthog.hogql_queries.experiments.trends_statistics_v2_count import calculate_credible_intervals_v2_count
>>> variants = [
- ... ExperimentVariantTrendsBaseStats(key="control", count=100, exposure=1000, absolute_exposure=1000),
- ... ExperimentVariantTrendsBaseStats(key="test", count=150, exposure=1000, absolute_exposure=1000)
+ ... ExperimentVariantTrendsBaseStats(key="control", count=100, exposure=1, absolute_exposure=1000),
+ ... ExperimentVariantTrendsBaseStats(key="test", count=150, exposure=1, absolute_exposure=1000)
... ]
- >>> intervals = calculate_credible_intervals_v2(variants)
+ >>> calculate_credible_intervals_v2_count(variants)
>>> # Returns: {"control": (0.082, 0.122), "test": (0.128, 0.176)}
"""
intervals = {}
diff --git a/posthog/hogql_queries/insights/test/test_insight_actors_query_runner.py b/posthog/hogql_queries/insights/test/test_insight_actors_query_runner.py
index 7497112d1a175..44e4768b886c9 100644
--- a/posthog/hogql_queries/insights/test/test_insight_actors_query_runner.py
+++ b/posthog/hogql_queries/insights/test/test_insight_actors_query_runner.py
@@ -5,10 +5,20 @@
from posthog.hogql import ast
from posthog.hogql.query import execute_hogql_query
+from posthog.hogql_queries.actors_query_runner import ActorsQueryRunner
from posthog.models.group.util import create_group
from posthog.models.group_type_mapping import GroupTypeMapping
from posthog.models.team import WeekStartDay
-from posthog.schema import HogQLQueryModifiers, PersonsArgMaxVersion
+from posthog.schema import (
+ HogQLQueryModifiers,
+ PersonsArgMaxVersion,
+ ActorsQuery,
+ InsightActorsQuery,
+ PersonPropertyFilter,
+ TrendsQuery,
+ DateRange,
+ EventsNode,
+)
from posthog.test.base import (
APIBaseTest,
ClickhouseTestMixin,
@@ -214,6 +224,55 @@ def test_insight_persons_stickiness_groups_query(self):
self.assertEqual([("org1",)], response.results)
+ def test_insight_persons_trends_query_with_argmaxV1_calculate_adds_event_distinct_ids(self):
+ self._create_test_events()
+ self.team.timezone = "US/Pacific"
+ self.team.save()
+
+ actors_query = ActorsQuery(
+ select=["properties.name"],
+ source=InsightActorsQuery(
+ day="2020-01-09",
+ source=TrendsQuery(
+ dateRange=DateRange(date_from="2020-01-09", date_to="2020-01-19"),
+ series=[EventsNode(event="$pageview")],
+ properties=[
+ PersonPropertyFilter(type="person", key="email", value="tom@posthog.com", operator="is_not")
+ ],
+ ),
+ ),
+ )
+ actor_query_response = ActorsQueryRunner(query=actors_query, team=self.team).calculate()
+
+ self.assertTrue("event_distinct_ids" in actor_query_response.columns)
+
+ def test_insight_persons_trends_query_with_argmaxV1_no_event_distinct(self):
+ self._create_test_events()
+ self.team.timezone = "US/Pacific"
+ self.team.save()
+
+ with self.capture_queries(lambda query: re.match(r"^SELECT\s+name\s+AS\s+name", query) is not None) as queries:
+ response = self.select(
+ """
+ select * from (
+
+
+ }
+ series={[]}
+ properties={[]}
+ />
+
+
+ )
+ """,
+ modifiers={"personsArgMaxVersion": PersonsArgMaxVersion.V1},
+ )
+
+ self.assertEqual([("p2",)], response.results)
+ assert "in(id," in queries[0]
+ self.assertEqual(2, queries[0].count("toTimeZone(e.timestamp, 'US/Pacific') AS timestamp"))
+
@snapshot_clickhouse_queries
def test_insight_persons_trends_query_with_argmaxV1(self):
self._create_test_events()
@@ -224,7 +283,7 @@ def test_insight_persons_trends_query_with_argmaxV1(self):
response = self.select(
"""
select * from (
-
+
}
@@ -252,7 +311,7 @@ def test_insight_persons_trends_query_with_argmaxV2(self):
response = self.select(
"""
select * from (
-
+
}
diff --git a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr
index 2a216d55bf026..cec3b18a3ff10 100644
--- a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr
+++ b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr
@@ -241,6 +241,7 @@
persons.created_at AS created_at,
source.event_count AS event_count,
source.matching_events AS matching_events,
+ source.event_distinct_ids,
source.event_distinct_ids AS event_distinct_ids
FROM
(SELECT actor_id AS actor_id,
@@ -1552,6 +1553,7 @@
persons.created_at AS created_at,
source.event_count AS event_count,
source.matching_events AS matching_events,
+ source.event_distinct_ids,
source.event_distinct_ids AS event_distinct_ids
FROM
(SELECT actor_id AS actor_id,
diff --git a/posthog/hogql_queries/query_cache.py b/posthog/hogql_queries/query_cache.py
index da00238c7f026..6090e10c72c02 100644
--- a/posthog/hogql_queries/query_cache.py
+++ b/posthog/hogql_queries/query_cache.py
@@ -10,6 +10,16 @@
class QueryCacheManager:
+ """
+ Storing query results in Redis keyed by the hash of the query (cache_key param).
+ '{cache_key}' -> query_results
+
+ Also using Redis sorted sets to store the time query results were calculated.
+
+ Sorted sets are keyed by team_id.
+ 'cache_timestamps:{team_id}' -> '{self.insight_id}:{self.dashboard_id or ''}' -> timestamp (epoch time when calculated)
+ """
+
def __init__(
self,
*,
@@ -46,6 +56,7 @@ def get_stale_insights(*, team_id: int, limit: Optional[int] = None) -> list[str
first calculation to refresh it will refresh all of them.
"""
current_time = datetime.now(UTC)
+ # get least stale insights first
insights = redis.get_client().zrevrangebyscore(
f"cache_timestamps:{team_id}",
min="-inf",
diff --git a/posthog/hogql_queries/test/__snapshots__/test_error_tracking_query_runner.ambr b/posthog/hogql_queries/test/__snapshots__/test_error_tracking_query_runner.ambr
index 20abb16f555dd..2a6cf10f4f8f7 100644
--- a/posthog/hogql_queries/test/__snapshots__/test_error_tracking_query_runner.ambr
+++ b/posthog/hogql_queries/test/__snapshots__/test_error_tracking_query_runner.ambr
@@ -6,6 +6,7 @@
count(DISTINCT events.distinct_id) AS users,
max(toTimeZone(events.timestamp, 'UTC')) AS last_seen,
min(toTimeZone(events.timestamp, 'UTC')) AS first_seen,
+ argMin(events.properties, toTimeZone(events.timestamp, 'UTC')) AS earliest,
if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id
FROM events
LEFT OUTER JOIN
@@ -34,6 +35,7 @@
count(DISTINCT events.distinct_id) AS users,
max(toTimeZone(events.timestamp, 'UTC')) AS last_seen,
min(toTimeZone(events.timestamp, 'UTC')) AS first_seen,
+ argMin(events.properties, toTimeZone(events.timestamp, 'UTC')) AS earliest,
if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id
FROM events
LEFT OUTER JOIN
@@ -79,6 +81,7 @@
count(DISTINCT events.distinct_id) AS users,
max(toTimeZone(events.timestamp, 'UTC')) AS last_seen,
min(toTimeZone(events.timestamp, 'UTC')) AS first_seen,
+ argMin(events.properties, toTimeZone(events.timestamp, 'UTC')) AS earliest,
if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id
FROM events
LEFT OUTER JOIN
@@ -124,6 +127,7 @@
count(DISTINCT events.distinct_id) AS users,
max(toTimeZone(events.timestamp, 'UTC')) AS last_seen,
min(toTimeZone(events.timestamp, 'UTC')) AS first_seen,
+ argMin(events.properties, toTimeZone(events.timestamp, 'UTC')) AS earliest,
if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id
FROM events
LEFT OUTER JOIN
@@ -169,6 +173,7 @@
count(DISTINCT events.distinct_id) AS users,
max(toTimeZone(events.timestamp, 'UTC')) AS last_seen,
min(toTimeZone(events.timestamp, 'UTC')) AS first_seen,
+ argMin(events.properties, toTimeZone(events.timestamp, 'UTC')) AS earliest,
if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id
FROM events
LEFT OUTER JOIN
@@ -197,6 +202,7 @@
count(DISTINCT events.distinct_id) AS users,
max(toTimeZone(events.timestamp, 'UTC')) AS last_seen,
min(toTimeZone(events.timestamp, 'UTC')) AS first_seen,
+ argMin(events.properties, toTimeZone(events.timestamp, 'UTC')) AS earliest,
if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id
FROM events
LEFT OUTER JOIN
@@ -226,6 +232,7 @@
count(DISTINCT events.distinct_id) AS users,
max(toTimeZone(events.timestamp, 'UTC')) AS last_seen,
min(toTimeZone(events.timestamp, 'UTC')) AS first_seen,
+ argMin(events.properties, toTimeZone(events.timestamp, 'UTC')) AS earliest,
if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id
FROM events
LEFT OUTER JOIN
@@ -255,6 +262,7 @@
count(DISTINCT events.distinct_id) AS users,
max(toTimeZone(events.timestamp, 'UTC')) AS last_seen,
min(toTimeZone(events.timestamp, 'UTC')) AS first_seen,
+ argMin(events.properties, toTimeZone(events.timestamp, 'UTC')) AS earliest,
if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id
FROM events
LEFT OUTER JOIN
@@ -300,6 +308,7 @@
count(DISTINCT events.distinct_id) AS users,
max(toTimeZone(events.timestamp, 'UTC')) AS last_seen,
min(toTimeZone(events.timestamp, 'UTC')) AS first_seen,
+ argMin(events.properties, toTimeZone(events.timestamp, 'UTC')) AS earliest,
if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id
FROM events
LEFT OUTER JOIN
diff --git a/posthog/hogql_queries/test/test_error_tracking_query_runner.py b/posthog/hogql_queries/test/test_error_tracking_query_runner.py
index 6e15387162643..91b40b4fda255 100644
--- a/posthog/hogql_queries/test/test_error_tracking_query_runner.py
+++ b/posthog/hogql_queries/test/test_error_tracking_query_runner.py
@@ -1,7 +1,5 @@
from unittest import TestCase
from freezegun import freeze_time
-from datetime import datetime
-from zoneinfo import ZoneInfo
from dateutil.relativedelta import relativedelta
from django.utils.timezone import now
@@ -287,6 +285,7 @@ def test_column_names(self):
"users",
"last_seen",
"first_seen",
+ "earliest",
"id",
],
)
@@ -310,6 +309,7 @@ def test_column_names(self):
"users",
"last_seen",
"first_seen",
+ "earliest",
"id",
],
)
@@ -518,38 +518,15 @@ def test_overrides_aggregation(self):
)
results = self._calculate(runner)["results"]
- self.assertEqual(
- results,
- [
- {
- "id": self.issue_id_one,
- "name": None,
- "description": None,
- "assignee": None,
- "volume": None,
- "status": ErrorTrackingIssue.Status.ACTIVE,
- "first_seen": datetime(2020, 1, 10, 9, 11, tzinfo=ZoneInfo("UTC")),
- "last_seen": datetime(2020, 1, 10, 11, 11, tzinfo=ZoneInfo("UTC")),
- # count is (2 x issue_one) + (1 x issue_three)
- "occurrences": 3,
- "sessions": 1,
- "users": 2,
- },
- {
- "id": self.issue_id_two,
- "name": None,
- "description": None,
- "assignee": None,
- "volume": None,
- "status": ErrorTrackingIssue.Status.ACTIVE,
- "first_seen": datetime(2020, 1, 10, 10, 11, tzinfo=ZoneInfo("UTC")),
- "last_seen": datetime(2020, 1, 10, 10, 11, tzinfo=ZoneInfo("UTC")),
- "occurrences": 1,
- "sessions": 1,
- "users": 1,
- },
- ],
- )
+
+ self.assertEqual(len(results), 2)
+
+ # count is (2 x issue_one) + (1 x issue_three)
+ self.assertEqual(results[0]["id"], self.issue_id_one)
+ self.assertEqual(results[0]["occurrences"], 3)
+
+ self.assertEqual(results[1]["id"], self.issue_id_two)
+ self.assertEqual(results[1]["occurrences"], 1)
@snapshot_clickhouse_queries
def test_assignee_groups(self):
diff --git a/posthog/hogql_queries/web_analytics/stats_table.py b/posthog/hogql_queries/web_analytics/stats_table.py
index 8b5ff36f76df3..c63f5775c7955 100644
--- a/posthog/hogql_queries/web_analytics/stats_table.py
+++ b/posthog/hogql_queries/web_analytics/stats_table.py
@@ -149,7 +149,7 @@ def to_path_scroll_bounce_query(self) -> ast.SelectQuery:
min(session.$start_timestamp ) AS start_timestamp
FROM events
WHERE and(
- events.event == '$pageview',
+ or(events.event == '$pageview', events.event == '$screen'),
breakdown_value IS NOT NULL,
{inside_periods},
{event_properties},
@@ -172,7 +172,7 @@ def to_path_scroll_bounce_query(self) -> ast.SelectQuery:
min(session.$start_timestamp) as start_timestamp
FROM events
WHERE and(
- events.event == '$pageview',
+ or(events.event == '$pageview', events.event == '$screen'),
breakdown_value IS NOT NULL,
{inside_periods},
{event_properties},
@@ -204,7 +204,7 @@ def to_path_scroll_bounce_query(self) -> ast.SelectQuery:
min(session.$start_timestamp) AS start_timestamp
FROM events
WHERE and(
- or(events.event == '$pageview', events.event == '$pageleave'),
+ or(events.event == '$pageview', events.event == '$pageleave', events.event == '$screen'),
breakdown_value IS NOT NULL,
{inside_periods},
{event_properties_for_scroll},
@@ -263,7 +263,7 @@ def to_path_bounce_query(self) -> ast.SelectQuery:
min(session.$start_timestamp) AS start_timestamp
FROM events
WHERE and(
- events.event == '$pageview',
+ or(events.event == '$pageview', events.event == '$screen'),
{inside_periods},
{event_properties},
{session_properties},
@@ -286,7 +286,7 @@ def to_path_bounce_query(self) -> ast.SelectQuery:
min(session.$start_timestamp) AS start_timestamp
FROM events
WHERE and(
- events.event == '$pageview',
+ or(events.event == '$pageview', events.event == '$screen'),
breakdown_value IS NOT NULL,
{inside_periods},
{event_properties},
@@ -481,6 +481,8 @@ def _counts_breakdown_value(self):
return self._apply_path_cleaning(ast.Field(chain=["session", "$end_pathname"]))
case WebStatsBreakdown.EXIT_CLICK:
return ast.Field(chain=["session", "$last_external_click_url"])
+ case WebStatsBreakdown.SCREEN_NAME:
+ return ast.Field(chain=["events", "properties", "$screen_name"])
case WebStatsBreakdown.INITIAL_REFERRING_DOMAIN:
return ast.Field(chain=["session", "$entry_referring_domain"])
case WebStatsBreakdown.INITIAL_UTM_SOURCE:
diff --git a/posthog/hogql_queries/web_analytics/test/test_web_overview.py b/posthog/hogql_queries/web_analytics/test/test_web_overview.py
index 3a5c421cb890a..4024e99f3c6fb 100644
--- a/posthog/hogql_queries/web_analytics/test/test_web_overview.py
+++ b/posthog/hogql_queries/web_analytics/test/test_web_overview.py
@@ -16,6 +16,7 @@
HogQLQueryModifiers,
CustomEventConversionGoal,
ActionConversionGoal,
+ BounceRatePageViewMode,
)
from posthog.settings import HOGQL_INCREASED_MAX_EXECUTION_TIME
from posthog.test.base import (
@@ -79,8 +80,11 @@ def _run_web_overview_query(
action: Optional[Action] = None,
custom_event: Optional[str] = None,
includeLCPScore: Optional[bool] = False,
+ bounce_rate_mode: Optional[BounceRatePageViewMode] = BounceRatePageViewMode.COUNT_PAGEVIEWS,
):
- modifiers = HogQLQueryModifiers(sessionTableVersion=session_table_version)
+ modifiers = HogQLQueryModifiers(
+ sessionTableVersion=session_table_version, bounceRatePageViewMode=bounce_rate_mode
+ )
query = WebOverviewQuery(
dateRange=DateRange(date_from=date_from, date_to=date_to),
properties=[],
@@ -185,6 +189,55 @@ def test_increase_in_users(self):
self.assertEqual(0, bounce.previous)
self.assertEqual(None, bounce.changeFromPreviousPct)
+ def test_increase_in_users_using_mobile(self):
+ s1a = str(uuid7("2023-12-02"))
+ s1b = str(uuid7("2023-12-12"))
+ s2 = str(uuid7("2023-12-11"))
+
+ self._create_events(
+ [
+ ("p1", [("2023-12-02", s1a), ("2023-12-03", s1a), ("2023-12-12", s1b)]),
+ ("p2", [("2023-12-11", s2)]),
+ ],
+ event="$screen",
+ )
+
+ results = self._run_web_overview_query(
+ "2023-12-08",
+ "2023-12-15",
+ bounce_rate_mode=BounceRatePageViewMode.UNIQ_PAGE_SCREEN_AUTOCAPTURES, # bounce rate won't work in the other modes
+ ).results
+
+ visitors = results[0]
+ self.assertEqual("visitors", visitors.key)
+ self.assertEqual(2, visitors.value)
+ self.assertEqual(1, visitors.previous)
+ self.assertEqual(100, visitors.changeFromPreviousPct)
+
+ views = results[1]
+ self.assertEqual("views", views.key)
+ self.assertEqual(2, views.value)
+ self.assertEqual(2, views.previous)
+ self.assertEqual(0, views.changeFromPreviousPct)
+
+ sessions = results[2]
+ self.assertEqual("sessions", sessions.key)
+ self.assertEqual(2, sessions.value)
+ self.assertEqual(1, sessions.previous)
+ self.assertEqual(100, sessions.changeFromPreviousPct)
+
+ duration_s = results[3]
+ self.assertEqual("session duration", duration_s.key)
+ self.assertEqual(0, duration_s.value)
+ self.assertEqual(60 * 60 * 24, duration_s.previous)
+ self.assertEqual(-100, duration_s.changeFromPreviousPct)
+
+ bounce = results[4]
+ self.assertEqual("bounce rate", bounce.key)
+ self.assertEqual(100, bounce.value)
+ self.assertEqual(0, bounce.previous)
+ self.assertEqual(None, bounce.changeFromPreviousPct)
+
def test_all_time(self):
s1a = str(uuid7("2023-12-02"))
s1b = str(uuid7("2023-12-12"))
diff --git a/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py b/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py
index 74c12e30586cf..84a9f650222ad 100644
--- a/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py
+++ b/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py
@@ -16,6 +16,7 @@
HogQLQueryModifiers,
CustomEventConversionGoal,
ActionConversionGoal,
+ BounceRatePageViewMode,
)
from posthog.test.base import (
APIBaseTest,
@@ -44,8 +45,11 @@ def _create_events(self, data, event="$pageview"):
for timestamp, session_id, *extra in timestamps:
url = None
elements = None
+ screen_name = None
if event == "$pageview":
url = extra[0] if extra else None
+ elif event == "$screen":
+ screen_name = extra[0] if extra else None
elif event == "$autocapture":
elements = extra[0] if extra else None
properties = extra[1] if extra and len(extra) > 1 else {}
@@ -59,6 +63,7 @@ def _create_events(self, data, event="$pageview"):
"$session_id": session_id,
"$pathname": url,
"$current_url": url,
+ "$screen_name": screen_name,
**properties,
},
elements=elements,
@@ -129,8 +134,11 @@ def _run_web_stats_table_query(
custom_event: Optional[str] = None,
session_table_version: SessionTableVersion = SessionTableVersion.V2,
filter_test_accounts: Optional[bool] = False,
+ bounce_rate_mode: Optional[BounceRatePageViewMode] = BounceRatePageViewMode.COUNT_PAGEVIEWS,
):
- modifiers = HogQLQueryModifiers(sessionTableVersion=session_table_version)
+ modifiers = HogQLQueryModifiers(
+ sessionTableVersion=session_table_version, bounceRatePageViewMode=bounce_rate_mode
+ )
query = WebStatsTableQuery(
dateRange=DateRange(date_from=date_from, date_to=date_to),
properties=properties or [],
@@ -179,6 +187,30 @@ def test_increase_in_users(self):
results,
)
+ def test_increase_in_users_on_mobile(self):
+ s1a = str(uuid7("2023-12-02"))
+ s1b = str(uuid7("2023-12-13"))
+ s2 = str(uuid7("2023-12-10"))
+ self._create_events(
+ [
+ ("p1", [("2023-12-02", s1a, "Home"), ("2023-12-03", s1a, "Login"), ("2023-12-13", s1b, "Docs")]),
+ ("p2", [("2023-12-10", s2, "Home")]),
+ ],
+ event="$screen",
+ )
+
+ results = self._run_web_stats_table_query(
+ "2023-12-01", "2023-12-11", breakdown_by=WebStatsBreakdown.SCREEN_NAME
+ ).results
+
+ self.assertEqual(
+ [
+ ["Home", (2, None), (2, None)],
+ ["Login", (1, None), (1, None)],
+ ],
+ results,
+ )
+
def test_all_time(self):
s1a = str(uuid7("2023-12-02"))
s1b = str(uuid7("2023-12-13"))
diff --git a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py
index 5d848a5057c57..28d87ecb0be1e 100644
--- a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py
+++ b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py
@@ -172,8 +172,15 @@ def conversion_person_id_expr(self) -> Optional[ast.Expr]:
@cached_property
def event_type_expr(self) -> ast.Expr:
- pageview_expr = ast.CompareOperation(
- op=ast.CompareOperationOp.Eq, left=ast.Field(chain=["event"]), right=ast.Constant(value="$pageview")
+ pageview_expr = ast.Or(
+ exprs=[
+ ast.CompareOperation(
+ op=ast.CompareOperationOp.Eq, left=ast.Field(chain=["event"]), right=ast.Constant(value="$pageview")
+ ),
+ ast.CompareOperation(
+ op=ast.CompareOperationOp.Eq, left=ast.Field(chain=["event"]), right=ast.Constant(value="$screen")
+ ),
+ ]
)
if self.conversion_goal_expr:
diff --git a/posthog/hogql_queries/web_analytics/web_goals.py b/posthog/hogql_queries/web_analytics/web_goals.py
index a89c933a369c8..1fa1511edb95f 100644
--- a/posthog/hogql_queries/web_analytics/web_goals.py
+++ b/posthog/hogql_queries/web_analytics/web_goals.py
@@ -143,7 +143,7 @@ def to_query(self) -> ast.SelectQuery | ast.SelectSetQuery:
FROM events
WHERE and(
events.`$session_id` IS NOT NULL,
- event = '$pageview' OR {action_where},
+ event = '$pageview' OR event = '$screen' OR {action_where},
{periods_expression},
{event_properties},
{session_properties}
diff --git a/posthog/hogql_queries/web_analytics/web_overview.py b/posthog/hogql_queries/web_analytics/web_overview.py
index 8eb58f32adefb..032fc7615de08 100644
--- a/posthog/hogql_queries/web_analytics/web_overview.py
+++ b/posthog/hogql_queries/web_analytics/web_overview.py
@@ -88,10 +88,19 @@ def pageview_count_expression(self) -> ast.Expr:
return ast.Call(
name="countIf",
args=[
- ast.CompareOperation(
- left=ast.Field(chain=["event"]),
- op=ast.CompareOperationOp.Eq,
- right=ast.Constant(value="$pageview"),
+ ast.Or(
+ exprs=[
+ ast.CompareOperation(
+ left=ast.Field(chain=["event"]),
+ op=ast.CompareOperationOp.Eq,
+ right=ast.Constant(value="$pageview"),
+ ),
+ ast.CompareOperation(
+ left=ast.Field(chain=["event"]),
+ op=ast.CompareOperationOp.Eq,
+ right=ast.Constant(value="$screen"),
+ ),
+ ]
)
],
)
diff --git a/posthog/management/commands/delete_persons.py b/posthog/management/commands/delete_persons.py
index 382a723b6041c..be26a3925a6b7 100644
--- a/posthog/management/commands/delete_persons.py
+++ b/posthog/management/commands/delete_persons.py
@@ -55,25 +55,25 @@ def run(options):
delete_query_person_distinct_ids = f"""
WITH to_delete AS ({select_query})
DELETE FROM posthog_persondistinctid
- WHERE team_id = %(team_id)s AND person_id IN (SELECT id FROM to_delete);
+ WHERE person_id IN (SELECT id FROM to_delete);
"""
delete_query_person_override = f"""
WITH to_delete AS ({select_query})
DELETE FROM posthog_personoverride
- WHERE team_id = %(team_id)s AND (old_person_id IN (SELECT id FROM to_delete) OR override_person_id IN (SELECT id FROM to_delete));
+ WHERE (old_person_id IN (SELECT id FROM to_delete) OR override_person_id IN (SELECT id FROM to_delete));
"""
delete_query_cohort_people = f"""
WITH to_delete AS ({select_query})
DELETE FROM posthog_cohortpeople
- WHERE team_id = %(team_id)s AND person_id IN (SELECT id FROM to_delete);
+ WHERE person_id IN (SELECT id FROM to_delete);
"""
delete_query_person = f"""
WITH to_delete AS ({select_query})
DELETE FROM posthog_person
- WHERE team_id = %(team_id)s AND id IN (SELECT id FROM to_delete);
+ WHERE id IN (SELECT id FROM to_delete);
"""
with connection.cursor() as cursor:
diff --git a/posthog/management/commands/generate_experiment_data.py b/posthog/management/commands/generate_experiment_data.py
index 5116d786be76e..1eb468e675b4b 100644
--- a/posthog/management/commands/generate_experiment_data.py
+++ b/posthog/management/commands/generate_experiment_data.py
@@ -4,11 +4,22 @@
import time
import uuid
import json
+from typing import Any, Literal, Union
from django.conf import settings
from django.core.management.base import BaseCommand
import posthoganalytics
-from pydantic import BaseModel, ValidationError
+from pydantic import BaseModel, ValidationError, Field
+
+
+class NormalDistributionParams(BaseModel):
+ mean: float
+ stddev: float
+
+
+class Distribution(BaseModel):
+ distribution: Literal["normal"]
+ params: NormalDistributionParams
class ActionConfig(BaseModel):
@@ -16,11 +27,17 @@ class ActionConfig(BaseModel):
probability: float
count: int = 1
required_for_next: bool = False
+ properties: dict[str, Union[Distribution, object]] = Field(default_factory=dict)
def model_post_init(self, __context) -> None:
if self.required_for_next and self.count > 1:
raise ValueError("'required_for_next' cannot be used with 'count' greater than 1")
+ # Convert any raw distribution dictionaries to Distribution objects
+ for key, value in self.properties.items():
+ if isinstance(value, dict) and "distribution" in value:
+ self.properties[key] = Distribution(**value)
+
class VariantConfig(BaseModel):
weight: float
@@ -76,12 +93,54 @@ def get_default_trend_experiment_config() -> ExperimentConfig:
)
+def get_default_revenue_experiment_config() -> ExperimentConfig:
+ return ExperimentConfig(
+ number_of_users=2000,
+ start_timestamp=datetime.now() - timedelta(days=7),
+ end_timestamp=datetime.now(),
+ variants={
+ "control": VariantConfig(
+ weight=0.5,
+ actions=[
+ ActionConfig(
+ event="checkout completed",
+ count=5,
+ probability=0.25,
+ properties={
+ "revenue": Distribution(
+ distribution="normal", params=NormalDistributionParams(mean=100, stddev=10)
+ )
+ },
+ )
+ ],
+ ),
+ "test": VariantConfig(
+ weight=0.5,
+ actions=[
+ ActionConfig(
+ event="checkout completed",
+ count=5,
+ probability=0.35,
+ properties={
+ "revenue": Distribution(
+ distribution="normal", params=NormalDistributionParams(mean=105, stddev=10)
+ )
+ },
+ )
+ ],
+ ),
+ },
+ )
+
+
def get_default_config(type) -> ExperimentConfig:
match type:
case "funnel":
return get_default_funnel_experiment_config()
case "trend":
return get_default_trend_experiment_config()
+ case "revenue":
+ return get_default_revenue_experiment_config()
case _:
raise ValueError(f"Invalid experiment type: {type}")
@@ -93,7 +152,7 @@ def add_arguments(self, parser):
parser.add_argument(
"--type",
type=str,
- choices=["trend", "funnel"],
+ choices=["trend", "funnel", "revenue"],
default="trend",
help="Type of experiment data to generate or configuration to initialize.",
)
@@ -172,13 +231,27 @@ def handle(self, *args, **options):
for action in experiment_config.variants[variant].actions:
for _ in range(action.count):
if random.random() < action.probability:
+ # Prepare properties dictionary
+ properties: dict[str, Any] = {
+ f"$feature/{experiment_id}": variant,
+ }
+
+ # Add custom properties, sampling from distributions if needed
+ for prop_name, prop_value in action.properties.items():
+ if isinstance(prop_value, Distribution):
+ # Sample from normal distribution
+ if prop_value.distribution == "normal":
+ properties[prop_name] = random.gauss(
+ prop_value.params.mean, prop_value.params.stddev
+ )
+ else:
+ properties[prop_name] = prop_value
+
posthoganalytics.capture(
distinct_id=distinct_id,
event=action.event,
timestamp=random_timestamp + timedelta(minutes=1),
- properties={
- f"$feature/{experiment_id}": variant,
- },
+ properties=properties,
)
else:
if action.required_for_next:
diff --git a/posthog/management/commands/get_temporal_workflow_count.py b/posthog/management/commands/get_temporal_workflow_count.py
index 27b2bd776a1dd..94f86e5b694c9 100644
--- a/posthog/management/commands/get_temporal_workflow_count.py
+++ b/posthog/management/commands/get_temporal_workflow_count.py
@@ -1,8 +1,9 @@
import asyncio
+import logging
from django.conf import settings
from django.core.management.base import BaseCommand
-from prometheus_client import Gauge
+from prometheus_client import CollectorRegistry, Gauge, push_to_gateway
from posthog.temporal.common.client import connect
@@ -90,11 +91,18 @@ def handle(self, *args, **options):
client.count_workflows(query=f'`TaskQueue`="{task_queue}" AND `ExecutionStatus`="{execution_status}"')
)
- if track_gauge:
+ if track_gauge and settings.PROM_PUSHGATEWAY_ADDRESS is not None:
+ logging.debug(f"Tracking count in Gauge: {track_gauge}")
+ registry = CollectorRegistry()
gauge = Gauge(
track_gauge,
- f"Number of Temporal Workflow executions in '{task_queue}' with status '{execution_status}'.",
+ f"Number of current Temporal Workflow executions.",
+ labelnames=["task_queue", "status"],
+ registry=registry,
)
- gauge.set(result.count)
+ gauge.labels(task_queue=task_queue, status=execution_status.lower()).set(result.count)
+ push_to_gateway(settings.PROM_PUSHGATEWAY_ADDRESS, job="get_temporal_workflow_count", registry=registry)
+
+ logging.info(f"Count of '{execution_status.lower()}' workflows in '{task_queue}': {result.count}")
return str(result.count)
diff --git a/posthog/migrations/0538_experiment_stats_config.py b/posthog/migrations/0538_experiment_stats_config.py
new file mode 100644
index 0000000000000..f6b0aa39471d3
--- /dev/null
+++ b/posthog/migrations/0538_experiment_stats_config.py
@@ -0,0 +1,17 @@
+# Generated by Django 4.2.15 on 2025-01-06 12:12
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ("posthog", "0537_data_color_themes"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="experiment",
+ name="stats_config",
+ field=models.JSONField(blank=True, default=dict, null=True),
+ ),
+ ]
diff --git a/posthog/migrations/0539_user_role_at_organization.py b/posthog/migrations/0539_user_role_at_organization.py
new file mode 100644
index 0000000000000..f0d87d4ef971a
--- /dev/null
+++ b/posthog/migrations/0539_user_role_at_organization.py
@@ -0,0 +1,31 @@
+# Generated by Django 4.2.15 on 2025-01-06 17:07
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ("posthog", "0538_experiment_stats_config"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="user",
+ name="role_at_organization",
+ field=models.CharField(
+ blank=True,
+ choices=[
+ ("engineering", "Engineering"),
+ ("data", "Data"),
+ ("product", "Product Management"),
+ ("founder", "Founder"),
+ ("leadership", "Leadership"),
+ ("marketing", "Marketing"),
+ ("sales", "Sales / Success"),
+ ("other", "Other"),
+ ],
+ max_length=64,
+ null=True,
+ ),
+ ),
+ ]
diff --git a/posthog/migrations/max_migration.txt b/posthog/migrations/max_migration.txt
index 5224bd35001ac..b182af0cabf6e 100644
--- a/posthog/migrations/max_migration.txt
+++ b/posthog/migrations/max_migration.txt
@@ -1 +1 @@
-0537_data_color_themes
+0539_user_role_at_organization
diff --git a/posthog/models/experiment.py b/posthog/models/experiment.py
index 87119d292b689..eeb59ed9eae73 100644
--- a/posthog/models/experiment.py
+++ b/posthog/models/experiment.py
@@ -46,9 +46,14 @@ class ExperimentType(models.TextChoices):
"ExperimentSavedMetric", blank=True, related_name="experiments", through="ExperimentToSavedMetric"
)
+ stats_config = models.JSONField(default=dict, null=True, blank=True)
+
def get_feature_flag_key(self):
return self.feature_flag.key
+ def get_stats_config(self, key: str):
+ return self.stats_config.get(key) if self.stats_config else None
+
@property
def is_draft(self):
return not self.start_date
diff --git a/posthog/models/user.py b/posthog/models/user.py
index 3b25009931c7c..871ae26c5119c 100644
--- a/posthog/models/user.py
+++ b/posthog/models/user.py
@@ -22,15 +22,30 @@
class Notifications(TypedDict, total=False):
plugin_disabled: bool
+ project_weekly_digest_disabled: dict[str, Any] # Maps project ID to disabled status, str is the team_id as a string
+ all_weekly_digest_disabled: bool
NOTIFICATION_DEFAULTS: Notifications = {
- "plugin_disabled": True # Catch all for any Pipeline destination issue (plugins, hog functions, batch exports)
+ "plugin_disabled": True, # Catch all for any Pipeline destination issue (plugins, hog functions, batch exports)
+ "project_weekly_digest_disabled": {}, # Empty dict by default - no projects disabled
+ "all_weekly_digest_disabled": False, # Weekly digests enabled by default
}
# We don't ned the following attributes in most cases, so we defer them by default
DEFERED_ATTRS = ["requested_password_reset_at"]
+ROLE_CHOICES = (
+ ("engineering", "Engineering"),
+ ("data", "Data"),
+ ("product", "Product Management"),
+ ("founder", "Founder"),
+ ("leadership", "Leadership"),
+ ("marketing", "Marketing"),
+ ("sales", "Sales / Success"),
+ ("other", "Other"),
+)
+
class UserManager(BaseUserManager):
"""Define a model manager for User model with no username field."""
@@ -154,7 +169,7 @@ class User(AbstractUser, UUIDClassicModel):
default=True,
help_text=_("Unselect this to temporarily disable an account."),
)
-
+ role_at_organization = models.CharField(max_length=64, choices=ROLE_CHOICES, null=True, blank=True)
# Preferences / configuration options
theme_mode = models.CharField(max_length=20, null=True, blank=True, choices=ThemeMode.choices)
diff --git a/posthog/queries/app_metrics/test/__snapshots__/test_historical_exports.ambr b/posthog/queries/app_metrics/test/__snapshots__/test_historical_exports.ambr
index cc3315c4cf369..0d7bc347ee7d8 100644
--- a/posthog/queries/app_metrics/test/__snapshots__/test_historical_exports.ambr
+++ b/posthog/queries/app_metrics/test/__snapshots__/test_historical_exports.ambr
@@ -96,6 +96,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -152,6 +153,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -208,6 +210,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -264,6 +267,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
diff --git a/posthog/schema.py b/posthog/schema.py
index 43eb5209fee9d..805c7387b4e6e 100644
--- a/posthog/schema.py
+++ b/posthog/schema.py
@@ -2,7220 +2,10 @@
from __future__ import annotations
-from enum import Enum, StrEnum
-from typing import Any, Literal, Optional, Union
+from typing import Any
-from pydantic import AwareDatetime, BaseModel, ConfigDict, Field, RootModel
+from pydantic import RootModel
class SchemaRoot(RootModel[Any]):
root: Any
-
-
-class MathGroupTypeIndex(float, Enum):
- NUMBER_0 = 0
- NUMBER_1 = 1
- NUMBER_2 = 2
- NUMBER_3 = 3
- NUMBER_4 = 4
-
-
-class AggregationAxisFormat(StrEnum):
- NUMERIC = "numeric"
- DURATION = "duration"
- DURATION_MS = "duration_ms"
- PERCENTAGE = "percentage"
- PERCENTAGE_SCALED = "percentage_scaled"
-
-
-class AlertCalculationInterval(StrEnum):
- HOURLY = "hourly"
- DAILY = "daily"
- WEEKLY = "weekly"
- MONTHLY = "monthly"
-
-
-class AlertConditionType(StrEnum):
- ABSOLUTE_VALUE = "absolute_value"
- RELATIVE_INCREASE = "relative_increase"
- RELATIVE_DECREASE = "relative_decrease"
-
-
-class AlertState(StrEnum):
- FIRING = "Firing"
- NOT_FIRING = "Not firing"
- ERRORED = "Errored"
- SNOOZED = "Snoozed"
-
-
-class AssistantArrayPropertyFilterOperator(StrEnum):
- EXACT = "exact"
- IS_NOT = "is_not"
-
-
-class AssistantBaseMultipleBreakdownFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- property: str = Field(..., description="Property name from the plan to break down by.")
-
-
-class AssistantDateTimePropertyFilterOperator(StrEnum):
- IS_DATE_EXACT = "is_date_exact"
- IS_DATE_BEFORE = "is_date_before"
- IS_DATE_AFTER = "is_date_after"
-
-
-class AssistantEventMultipleBreakdownFilterType(StrEnum):
- PERSON = "person"
- EVENT = "event"
- SESSION = "session"
- HOGQL = "hogql"
-
-
-class AssistantEventType(StrEnum):
- STATUS = "status"
- MESSAGE = "message"
- CONVERSATION = "conversation"
-
-
-class AssistantFunnelsBreakdownType(StrEnum):
- PERSON = "person"
- EVENT = "event"
- GROUP = "group"
- SESSION = "session"
-
-
-class AssistantGenerationStatusType(StrEnum):
- ACK = "ack"
- GENERATION_ERROR = "generation_error"
-
-
-class AssistantGenericMultipleBreakdownFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- property: str = Field(..., description="Property name from the plan to break down by.")
- type: AssistantEventMultipleBreakdownFilterType
-
-
-class AssistantGenericPropertyFilter2(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- key: str = Field(..., description="Use one of the properties the user has provided in the plan.")
- operator: AssistantArrayPropertyFilterOperator = Field(
- ..., description="`exact` - exact match of any of the values. `is_not` - does not match any of the values."
- )
- type: str
- value: list[str] = Field(
- ...,
- description=(
- "Only use property values from the plan. Always use strings as values. If you have a number, convert it to"
- ' a string first. If you have a boolean, convert it to a string "true" or "false".'
- ),
- )
-
-
-class AssistantGenericPropertyFilter3(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- key: str = Field(..., description="Use one of the properties the user has provided in the plan.")
- operator: AssistantDateTimePropertyFilterOperator
- type: str
- value: str = Field(..., description="Value must be a date in ISO 8601 format.")
-
-
-class AssistantMessage(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- content: str
- id: Optional[str] = None
- type: Literal["ai"] = "ai"
-
-
-class AssistantMessageType(StrEnum):
- HUMAN = "human"
- AI = "ai"
- AI_REASONING = "ai/reasoning"
- AI_VIZ = "ai/viz"
- AI_FAILURE = "ai/failure"
- AI_ROUTER = "ai/router"
-
-
-class AssistantSetPropertyFilterOperator(StrEnum):
- IS_SET = "is_set"
- IS_NOT_SET = "is_not_set"
-
-
-class AssistantSingleValuePropertyFilterOperator(StrEnum):
- EXACT = "exact"
- IS_NOT = "is_not"
- ICONTAINS = "icontains"
- NOT_ICONTAINS = "not_icontains"
- REGEX = "regex"
- NOT_REGEX = "not_regex"
-
-
-class AssistantTrendsDisplayType(RootModel[Union[str, Any]]):
- root: Union[str, Any]
-
-
-class Display(StrEnum):
- ACTIONS_LINE_GRAPH = "ActionsLineGraph"
- ACTIONS_BAR = "ActionsBar"
- ACTIONS_AREA_GRAPH = "ActionsAreaGraph"
- ACTIONS_LINE_GRAPH_CUMULATIVE = "ActionsLineGraphCumulative"
- BOLD_NUMBER = "BoldNumber"
- ACTIONS_PIE = "ActionsPie"
- ACTIONS_BAR_VALUE = "ActionsBarValue"
- ACTIONS_TABLE = "ActionsTable"
- WORLD_MAP = "WorldMap"
-
-
-class YAxisScaleType(StrEnum):
- LOG10 = "log10"
- LINEAR = "linear"
-
-
-class AssistantTrendsFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- aggregationAxisFormat: Optional[AggregationAxisFormat] = Field(
- default=AggregationAxisFormat.NUMERIC,
- description=(
- "Formats the trends value axis. Do not use the formatting unless you are absolutely sure that formatting"
- " will match the data. `numeric` - no formatting. Prefer this option by default. `duration` - formats the"
- " value in seconds to a human-readable duration, e.g., `132` becomes `2 minutes 12 seconds`. Use this"
- " option only if you are sure that the values are in seconds. `duration_ms` - formats the value in"
- " miliseconds to a human-readable duration, e.g., `1050` becomes `1 second 50 milliseconds`. Use this"
- " option only if you are sure that the values are in miliseconds. `percentage` - adds a percentage sign to"
- " the value, e.g., `50` becomes `50%`. `percentage_scaled` - formats the value as a percentage scaled to"
- " 0-100, e.g., `0.5` becomes `50%`."
- ),
- )
- aggregationAxisPostfix: Optional[str] = Field(
- default=None,
- description=(
- "Custom postfix to add to the aggregation axis, e.g., ` clicks` to format 5 as `5 clicks`. You may need to"
- " add a space before postfix."
- ),
- )
- aggregationAxisPrefix: Optional[str] = Field(
- default=None,
- description=(
- "Custom prefix to add to the aggregation axis, e.g., `$` for USD dollars. You may need to add a space after"
- " prefix."
- ),
- )
- decimalPlaces: Optional[float] = Field(
- default=None,
- description=(
- "Number of decimal places to show. Do not add this unless you are sure that values will have a decimal"
- " point."
- ),
- )
- display: Optional[Display] = Field(
- default=Display.ACTIONS_LINE_GRAPH,
- description=(
- "Visualization type. Available values: `ActionsLineGraph` - time-series line chart; most common option, as"
- " it shows change over time. `ActionsBar` - time-series bar chart. `ActionsAreaGraph` - time-series area"
- " chart. `ActionsLineGraphCumulative` - cumulative time-series line chart; good for cumulative metrics."
- " `BoldNumber` - total value single large number. You can't use this with breakdown; use when user"
- " explicitly asks for a single output number. `ActionsBarValue` - total value (NOT time-series) bar chart;"
- " good for categorical data. `ActionsPie` - total value pie chart; good for visualizing proportions."
- " `ActionsTable` - total value table; good when using breakdown to list users or other entities. `WorldMap`"
- " - total value world map; use when breaking down by country name using property `$geoip_country_name`, and"
- " only then."
- ),
- )
- formula: Optional[str] = Field(default=None, description="If the formula is provided, apply it here.")
- showLegend: Optional[bool] = Field(
- default=False, description="Whether to show the legend describing series and breakdowns."
- )
- showPercentStackView: Optional[bool] = Field(
- default=False, description="Whether to show a percentage of each series. Use only with"
- )
- showValuesOnSeries: Optional[bool] = Field(default=False, description="Whether to show a value on each data point.")
- yAxisScaleType: Optional[YAxisScaleType] = Field(
- default=YAxisScaleType.LINEAR, description="Whether to scale the y-axis."
- )
-
-
-class AssistantTrendsMath(StrEnum):
- FIRST_TIME_FOR_USER = "first_time_for_user"
- FIRST_TIME_FOR_USER_WITH_FILTERS = "first_time_for_user_with_filters"
-
-
-class AutocompleteCompletionItemKind(StrEnum):
- METHOD = "Method"
- FUNCTION = "Function"
- CONSTRUCTOR = "Constructor"
- FIELD = "Field"
- VARIABLE = "Variable"
- CLASS_ = "Class"
- STRUCT = "Struct"
- INTERFACE = "Interface"
- MODULE = "Module"
- PROPERTY = "Property"
- EVENT = "Event"
- OPERATOR = "Operator"
- UNIT = "Unit"
- VALUE = "Value"
- CONSTANT = "Constant"
- ENUM = "Enum"
- ENUM_MEMBER = "EnumMember"
- KEYWORD = "Keyword"
- TEXT = "Text"
- COLOR = "Color"
- FILE = "File"
- REFERENCE = "Reference"
- CUSTOMCOLOR = "Customcolor"
- FOLDER = "Folder"
- TYPE_PARAMETER = "TypeParameter"
- USER = "User"
- ISSUE = "Issue"
- SNIPPET = "Snippet"
-
-
-class BaseAssistantMessage(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- id: Optional[str] = None
-
-
-class BaseMathType(StrEnum):
- TOTAL = "total"
- DAU = "dau"
- WEEKLY_ACTIVE = "weekly_active"
- MONTHLY_ACTIVE = "monthly_active"
- UNIQUE_SESSION = "unique_session"
- FIRST_TIME_FOR_USER = "first_time_for_user"
- FIRST_MATCHING_EVENT_FOR_USER = "first_matching_event_for_user"
-
-
-class BreakdownAttributionType(StrEnum):
- FIRST_TOUCH = "first_touch"
- LAST_TOUCH = "last_touch"
- ALL_EVENTS = "all_events"
- STEP = "step"
-
-
-class BreakdownType(StrEnum):
- COHORT = "cohort"
- PERSON = "person"
- EVENT = "event"
- GROUP = "group"
- SESSION = "session"
- HOGQL = "hogql"
- DATA_WAREHOUSE = "data_warehouse"
- DATA_WAREHOUSE_PERSON_PROPERTY = "data_warehouse_person_property"
-
-
-class CompareItem(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- label: str
- value: str
-
-
-class StatusItem(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- label: str
- value: str
-
-
-class ChartDisplayCategory(StrEnum):
- TIME_SERIES = "TimeSeries"
- CUMULATIVE_TIME_SERIES = "CumulativeTimeSeries"
- TOTAL_VALUE = "TotalValue"
-
-
-class ChartDisplayType(StrEnum):
- ACTIONS_LINE_GRAPH = "ActionsLineGraph"
- ACTIONS_BAR = "ActionsBar"
- ACTIONS_STACKED_BAR = "ActionsStackedBar"
- ACTIONS_AREA_GRAPH = "ActionsAreaGraph"
- ACTIONS_LINE_GRAPH_CUMULATIVE = "ActionsLineGraphCumulative"
- BOLD_NUMBER = "BoldNumber"
- ACTIONS_PIE = "ActionsPie"
- ACTIONS_BAR_VALUE = "ActionsBarValue"
- ACTIONS_TABLE = "ActionsTable"
- WORLD_MAP = "WorldMap"
-
-
-class DisplayType(StrEnum):
- AUTO = "auto"
- LINE = "line"
- BAR = "bar"
-
-
-class YAxisPosition(StrEnum):
- LEFT = "left"
- RIGHT = "right"
-
-
-class ChartSettingsDisplay(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- color: Optional[str] = None
- displayType: Optional[DisplayType] = None
- label: Optional[str] = None
- trendLine: Optional[bool] = None
- yAxisPosition: Optional[YAxisPosition] = None
-
-
-class Style(StrEnum):
- NONE = "none"
- NUMBER = "number"
- PERCENT = "percent"
-
-
-class ChartSettingsFormatting(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- decimalPlaces: Optional[float] = None
- prefix: Optional[str] = None
- style: Optional[Style] = None
- suffix: Optional[str] = None
-
-
-class CompareFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- compare: Optional[bool] = Field(
- default=False, description="Whether to compare the current date range to a previous date range."
- )
- compare_to: Optional[str] = Field(
- default=None,
- description=(
- "The date range to compare to. The value is a relative date. Examples of relative dates are: `-1y` for 1"
- " year ago, `-14m` for 14 months ago, `-100w` for 100 weeks ago, `-14d` for 14 days ago, `-30h` for 30"
- " hours ago."
- ),
- )
-
-
-class ColorMode(StrEnum):
- LIGHT = "light"
- DARK = "dark"
-
-
-class ConditionalFormattingRule(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- bytecode: list
- color: str
- colorMode: Optional[ColorMode] = None
- columnName: str
- id: str
- input: str
- templateId: str
-
-
-class CountPerActorMathType(StrEnum):
- AVG_COUNT_PER_ACTOR = "avg_count_per_actor"
- MIN_COUNT_PER_ACTOR = "min_count_per_actor"
- MAX_COUNT_PER_ACTOR = "max_count_per_actor"
- MEDIAN_COUNT_PER_ACTOR = "median_count_per_actor"
- P90_COUNT_PER_ACTOR = "p90_count_per_actor"
- P95_COUNT_PER_ACTOR = "p95_count_per_actor"
- P99_COUNT_PER_ACTOR = "p99_count_per_actor"
-
-
-class CustomChannelField(StrEnum):
- UTM_SOURCE = "utm_source"
- UTM_MEDIUM = "utm_medium"
- UTM_CAMPAIGN = "utm_campaign"
- REFERRING_DOMAIN = "referring_domain"
-
-
-class CustomChannelOperator(StrEnum):
- EXACT = "exact"
- IS_NOT = "is_not"
- IS_SET = "is_set"
- IS_NOT_SET = "is_not_set"
- ICONTAINS = "icontains"
- NOT_ICONTAINS = "not_icontains"
- REGEX = "regex"
- NOT_REGEX = "not_regex"
-
-
-class CustomEventConversionGoal(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- customEventName: str
-
-
-class DataColorToken(StrEnum):
- PRESET_1 = "preset-1"
- PRESET_2 = "preset-2"
- PRESET_3 = "preset-3"
- PRESET_4 = "preset-4"
- PRESET_5 = "preset-5"
- PRESET_6 = "preset-6"
- PRESET_7 = "preset-7"
- PRESET_8 = "preset-8"
- PRESET_9 = "preset-9"
- PRESET_10 = "preset-10"
- PRESET_11 = "preset-11"
- PRESET_12 = "preset-12"
- PRESET_13 = "preset-13"
- PRESET_14 = "preset-14"
- PRESET_15 = "preset-15"
-
-
-class DataWarehouseEventsModifier(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- distinct_id_field: str
- id_field: str
- table_name: str
- timestamp_field: str
-
-
-class DatabaseSchemaSchema(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- id: str
- incremental: bool
- last_synced_at: Optional[str] = None
- name: str
- should_sync: bool
- status: Optional[str] = None
-
-
-class DatabaseSchemaSource(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- id: str
- last_synced_at: Optional[str] = None
- prefix: str
- source_type: str
- status: str
-
-
-class Type(StrEnum):
- POSTHOG = "posthog"
- DATA_WAREHOUSE = "data_warehouse"
- VIEW = "view"
- BATCH_EXPORT = "batch_export"
- MATERIALIZED_VIEW = "materialized_view"
-
-
-class DatabaseSerializedFieldType(StrEnum):
- INTEGER = "integer"
- FLOAT = "float"
- STRING = "string"
- DATETIME = "datetime"
- DATE = "date"
- BOOLEAN = "boolean"
- ARRAY = "array"
- JSON = "json"
- LAZY_TABLE = "lazy_table"
- VIRTUAL_TABLE = "virtual_table"
- FIELD_TRAVERSER = "field_traverser"
- EXPRESSION = "expression"
- VIEW = "view"
- MATERIALIZED_VIEW = "materialized_view"
-
-
-class DateRange(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- date_from: Optional[str] = None
- date_to: Optional[str] = None
- explicitDate: Optional[bool] = Field(
- default=False,
- description=(
- "Whether the date_from and date_to should be used verbatim. Disables rounding to the start and end of"
- " period."
- ),
- )
-
-
-class DatetimeDay(RootModel[AwareDatetime]):
- root: AwareDatetime
-
-
-class DefaultChannelTypes(StrEnum):
- CROSS_NETWORK = "Cross Network"
- PAID_SEARCH = "Paid Search"
- PAID_SOCIAL = "Paid Social"
- PAID_VIDEO = "Paid Video"
- PAID_SHOPPING = "Paid Shopping"
- PAID_UNKNOWN = "Paid Unknown"
- DIRECT = "Direct"
- ORGANIC_SEARCH = "Organic Search"
- ORGANIC_SOCIAL = "Organic Social"
- ORGANIC_VIDEO = "Organic Video"
- ORGANIC_SHOPPING = "Organic Shopping"
- PUSH = "Push"
- SMS = "SMS"
- AUDIO = "Audio"
- EMAIL = "Email"
- REFERRAL = "Referral"
- AFFILIATE = "Affiliate"
- UNKNOWN = "Unknown"
-
-
-class DurationType(StrEnum):
- DURATION = "duration"
- ACTIVE_SECONDS = "active_seconds"
- INACTIVE_SECONDS = "inactive_seconds"
-
-
-class Key(StrEnum):
- TAG_NAME = "tag_name"
- TEXT = "text"
- HREF = "href"
- SELECTOR = "selector"
-
-
-class ElementType(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- attr_class: Optional[list[str]] = None
- attr_id: Optional[str] = None
- attributes: dict[str, str]
- href: Optional[str] = None
- nth_child: Optional[float] = None
- nth_of_type: Optional[float] = None
- order: Optional[float] = None
- tag_name: str
- text: Optional[str] = None
-
-
-class EmptyPropertyFilter(BaseModel):
- pass
- model_config = ConfigDict(
- extra="forbid",
- )
-
-
-class EntityType(StrEnum):
- ACTIONS = "actions"
- EVENTS = "events"
- DATA_WAREHOUSE = "data_warehouse"
- NEW_ENTITY = "new_entity"
-
-
-class Status(StrEnum):
- ARCHIVED = "archived"
- ACTIVE = "active"
- RESOLVED = "resolved"
- PENDING_RELEASE = "pending_release"
-
-
-class ErrorTrackingIssue(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- assignee: Optional[float] = None
- description: Optional[str] = None
- first_seen: AwareDatetime
- id: str
- last_seen: AwareDatetime
- name: Optional[str] = None
- occurrences: float
- sessions: float
- status: Status
- users: float
- volume: Optional[Any] = None
-
-
-class OrderBy(StrEnum):
- LAST_SEEN = "last_seen"
- FIRST_SEEN = "first_seen"
- OCCURRENCES = "occurrences"
- USERS = "users"
- SESSIONS = "sessions"
-
-
-class EventDefinition(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- elements: list
- event: str
- properties: dict[str, Any]
-
-
-class CorrelationType(StrEnum):
- SUCCESS = "success"
- FAILURE = "failure"
-
-
-class Person(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- distinct_ids: list[str]
- is_identified: Optional[bool] = None
- properties: dict[str, Any]
-
-
-class EventType(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- distinct_id: str
- elements: list[ElementType]
- elements_chain: Optional[str] = None
- event: str
- id: str
- person: Optional[Person] = None
- properties: dict[str, Any]
- timestamp: str
- uuid: Optional[str] = None
-
-
-class Properties(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- email: Optional[str] = None
- name: Optional[str] = None
-
-
-class EventsQueryPersonColumn(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- created_at: str
- distinct_id: str
- properties: Properties
- uuid: str
-
-
-class ExperimentSignificanceCode(StrEnum):
- SIGNIFICANT = "significant"
- NOT_ENOUGH_EXPOSURE = "not_enough_exposure"
- LOW_WIN_PROBABILITY = "low_win_probability"
- HIGH_LOSS = "high_loss"
- HIGH_P_VALUE = "high_p_value"
-
-
-class ExperimentVariantFunnelsBaseStats(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- failure_count: float
- key: str
- success_count: float
-
-
-class ExperimentVariantTrendsBaseStats(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- absolute_exposure: float
- count: float
- exposure: float
- key: str
-
-
-class FailureMessage(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- content: Optional[str] = None
- id: Optional[str] = None
- type: Literal["ai/failure"] = "ai/failure"
-
-
-class FilterLogicalOperator(StrEnum):
- AND_ = "AND"
- OR_ = "OR"
-
-
-class FunnelConversionWindowTimeUnit(StrEnum):
- SECOND = "second"
- MINUTE = "minute"
- HOUR = "hour"
- DAY = "day"
- WEEK = "week"
- MONTH = "month"
-
-
-class FunnelCorrelationResultsType(StrEnum):
- EVENTS = "events"
- PROPERTIES = "properties"
- EVENT_WITH_PROPERTIES = "event_with_properties"
-
-
-class FunnelExclusionLegacy(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- custom_name: Optional[str] = None
- funnel_from_step: float
- funnel_to_step: float
- id: Optional[Union[str, float]] = None
- index: Optional[float] = None
- name: Optional[str] = None
- order: Optional[float] = None
- type: Optional[EntityType] = None
-
-
-class FunnelLayout(StrEnum):
- HORIZONTAL = "horizontal"
- VERTICAL = "vertical"
-
-
-class FunnelMathType(StrEnum):
- TOTAL = "total"
- FIRST_TIME_FOR_USER = "first_time_for_user"
- FIRST_TIME_FOR_USER_WITH_FILTERS = "first_time_for_user_with_filters"
-
-
-class FunnelPathType(StrEnum):
- FUNNEL_PATH_BEFORE_STEP = "funnel_path_before_step"
- FUNNEL_PATH_BETWEEN_STEPS = "funnel_path_between_steps"
- FUNNEL_PATH_AFTER_STEP = "funnel_path_after_step"
-
-
-class FunnelStepReference(StrEnum):
- TOTAL = "total"
- PREVIOUS = "previous"
-
-
-class FunnelTimeToConvertResults(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- average_conversion_time: Optional[float] = None
- bins: list[list[int]]
-
-
-class FunnelVizType(StrEnum):
- STEPS = "steps"
- TIME_TO_CONVERT = "time_to_convert"
- TRENDS = "trends"
-
-
-class GoalLine(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- label: str
- value: float
-
-
-class HogCompileResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- bytecode: list
- locals: list
-
-
-class HogLanguage(StrEnum):
- HOG = "hog"
- HOG_JSON = "hogJson"
- HOG_QL = "hogQL"
- HOG_QL_EXPR = "hogQLExpr"
- HOG_TEMPLATE = "hogTemplate"
-
-
-class BounceRatePageViewMode(StrEnum):
- COUNT_PAGEVIEWS = "count_pageviews"
- UNIQ_URLS = "uniq_urls"
- UNIQ_PAGE_SCREEN_AUTOCAPTURES = "uniq_page_screen_autocaptures"
-
-
-class InCohortVia(StrEnum):
- AUTO = "auto"
- LEFTJOIN = "leftjoin"
- SUBQUERY = "subquery"
- LEFTJOIN_CONJOINED = "leftjoin_conjoined"
-
-
-class MaterializationMode(StrEnum):
- AUTO = "auto"
- LEGACY_NULL_AS_STRING = "legacy_null_as_string"
- LEGACY_NULL_AS_NULL = "legacy_null_as_null"
- DISABLED = "disabled"
-
-
-class PersonsArgMaxVersion(StrEnum):
- AUTO = "auto"
- V1 = "v1"
- V2 = "v2"
-
-
-class PersonsJoinMode(StrEnum):
- INNER = "inner"
- LEFT = "left"
-
-
-class PersonsOnEventsMode(StrEnum):
- DISABLED = "disabled"
- PERSON_ID_NO_OVERRIDE_PROPERTIES_ON_EVENTS = "person_id_no_override_properties_on_events"
- PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS = "person_id_override_properties_on_events"
- PERSON_ID_OVERRIDE_PROPERTIES_JOINED = "person_id_override_properties_joined"
-
-
-class PropertyGroupsMode(StrEnum):
- ENABLED = "enabled"
- DISABLED = "disabled"
- OPTIMIZED = "optimized"
-
-
-class SessionTableVersion(StrEnum):
- AUTO = "auto"
- V1 = "v1"
- V2 = "v2"
-
-
-class HogQLVariable(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- code_name: str
- value: Optional[Any] = None
- variableId: str
-
-
-class HogQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- bytecode: Optional[list] = None
- coloredBytecode: Optional[list] = None
- results: Any
- stdout: Optional[str] = None
-
-
-class HumanMessage(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- content: str
- id: Optional[str] = None
- type: Literal["human"] = "human"
-
-
-class Compare(StrEnum):
- CURRENT = "current"
- PREVIOUS = "previous"
-
-
-class InsightFilterProperty(StrEnum):
- TRENDS_FILTER = "trendsFilter"
- FUNNELS_FILTER = "funnelsFilter"
- RETENTION_FILTER = "retentionFilter"
- PATHS_FILTER = "pathsFilter"
- STICKINESS_FILTER = "stickinessFilter"
- LIFECYCLE_FILTER = "lifecycleFilter"
-
-
-class InsightNodeKind(StrEnum):
- TRENDS_QUERY = "TrendsQuery"
- FUNNELS_QUERY = "FunnelsQuery"
- RETENTION_QUERY = "RetentionQuery"
- PATHS_QUERY = "PathsQuery"
- STICKINESS_QUERY = "StickinessQuery"
- LIFECYCLE_QUERY = "LifecycleQuery"
-
-
-class InsightThresholdType(StrEnum):
- ABSOLUTE = "absolute"
- PERCENTAGE = "percentage"
-
-
-class InsightsThresholdBounds(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- lower: Optional[float] = None
- upper: Optional[float] = None
-
-
-class IntervalType(StrEnum):
- MINUTE = "minute"
- HOUR = "hour"
- DAY = "day"
- WEEK = "week"
- MONTH = "month"
-
-
-class LifecycleToggle(StrEnum):
- NEW = "new"
- RESURRECTING = "resurrecting"
- RETURNING = "returning"
- DORMANT = "dormant"
-
-
-class MatchedRecordingEvent(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- uuid: str
-
-
-class MultipleBreakdownType(StrEnum):
- PERSON = "person"
- EVENT = "event"
- GROUP = "group"
- SESSION = "session"
- HOGQL = "hogql"
-
-
-class NodeKind(StrEnum):
- EVENTS_NODE = "EventsNode"
- ACTIONS_NODE = "ActionsNode"
- DATA_WAREHOUSE_NODE = "DataWarehouseNode"
- EVENTS_QUERY = "EventsQuery"
- PERSONS_NODE = "PersonsNode"
- HOG_QUERY = "HogQuery"
- HOG_QL_QUERY = "HogQLQuery"
- HOG_QL_METADATA = "HogQLMetadata"
- HOG_QL_AUTOCOMPLETE = "HogQLAutocomplete"
- ACTORS_QUERY = "ActorsQuery"
- FUNNELS_ACTORS_QUERY = "FunnelsActorsQuery"
- FUNNEL_CORRELATION_ACTORS_QUERY = "FunnelCorrelationActorsQuery"
- SESSIONS_TIMELINE_QUERY = "SessionsTimelineQuery"
- RECORDINGS_QUERY = "RecordingsQuery"
- SESSION_ATTRIBUTION_EXPLORER_QUERY = "SessionAttributionExplorerQuery"
- ERROR_TRACKING_QUERY = "ErrorTrackingQuery"
- DATA_TABLE_NODE = "DataTableNode"
- DATA_VISUALIZATION_NODE = "DataVisualizationNode"
- SAVED_INSIGHT_NODE = "SavedInsightNode"
- INSIGHT_VIZ_NODE = "InsightVizNode"
- TRENDS_QUERY = "TrendsQuery"
- FUNNELS_QUERY = "FunnelsQuery"
- RETENTION_QUERY = "RetentionQuery"
- PATHS_QUERY = "PathsQuery"
- STICKINESS_QUERY = "StickinessQuery"
- LIFECYCLE_QUERY = "LifecycleQuery"
- INSIGHT_ACTORS_QUERY = "InsightActorsQuery"
- INSIGHT_ACTORS_QUERY_OPTIONS = "InsightActorsQueryOptions"
- FUNNEL_CORRELATION_QUERY = "FunnelCorrelationQuery"
- WEB_OVERVIEW_QUERY = "WebOverviewQuery"
- WEB_STATS_TABLE_QUERY = "WebStatsTableQuery"
- WEB_EXTERNAL_CLICKS_TABLE_QUERY = "WebExternalClicksTableQuery"
- WEB_GOALS_QUERY = "WebGoalsQuery"
- EXPERIMENT_FUNNELS_QUERY = "ExperimentFunnelsQuery"
- EXPERIMENT_TRENDS_QUERY = "ExperimentTrendsQuery"
- DATABASE_SCHEMA_QUERY = "DatabaseSchemaQuery"
- SUGGESTED_QUESTIONS_QUERY = "SuggestedQuestionsQuery"
- TEAM_TAXONOMY_QUERY = "TeamTaxonomyQuery"
- EVENT_TAXONOMY_QUERY = "EventTaxonomyQuery"
- ACTORS_PROPERTY_TAXONOMY_QUERY = "ActorsPropertyTaxonomyQuery"
-
-
-class PathCleaningFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- alias: Optional[str] = None
- regex: Optional[str] = None
-
-
-class PathType(StrEnum):
- FIELD_PAGEVIEW = "$pageview"
- FIELD_SCREEN = "$screen"
- CUSTOM_EVENT = "custom_event"
- HOGQL = "hogql"
-
-
-class PathsFilterLegacy(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- edge_limit: Optional[int] = None
- end_point: Optional[str] = None
- exclude_events: Optional[list[str]] = None
- funnel_filter: Optional[dict[str, Any]] = None
- funnel_paths: Optional[FunnelPathType] = None
- include_event_types: Optional[list[PathType]] = None
- local_path_cleaning_filters: Optional[list[PathCleaningFilter]] = None
- max_edge_weight: Optional[int] = None
- min_edge_weight: Optional[int] = None
- path_groupings: Optional[list[str]] = None
- path_replacements: Optional[bool] = None
- path_type: Optional[PathType] = None
- paths_hogql_expression: Optional[str] = None
- start_point: Optional[str] = None
- step_limit: Optional[int] = None
-
-
-class PersonType(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- created_at: Optional[str] = None
- distinct_ids: list[str]
- id: Optional[str] = None
- is_identified: Optional[bool] = None
- name: Optional[str] = None
- properties: dict[str, Any]
- uuid: Optional[str] = None
-
-
-class PropertyFilterType(StrEnum):
- META = "meta"
- EVENT = "event"
- PERSON = "person"
- ELEMENT = "element"
- FEATURE = "feature"
- SESSION = "session"
- COHORT = "cohort"
- RECORDING = "recording"
- LOG_ENTRY = "log_entry"
- GROUP = "group"
- HOGQL = "hogql"
- DATA_WAREHOUSE = "data_warehouse"
- DATA_WAREHOUSE_PERSON_PROPERTY = "data_warehouse_person_property"
-
-
-class PropertyMathType(StrEnum):
- AVG = "avg"
- SUM = "sum"
- MIN = "min"
- MAX = "max"
- MEDIAN = "median"
- P90 = "p90"
- P95 = "p95"
- P99 = "p99"
-
-
-class PropertyOperator(StrEnum):
- EXACT = "exact"
- IS_NOT = "is_not"
- ICONTAINS = "icontains"
- NOT_ICONTAINS = "not_icontains"
- REGEX = "regex"
- NOT_REGEX = "not_regex"
- GT = "gt"
- GTE = "gte"
- LT = "lt"
- LTE = "lte"
- IS_SET = "is_set"
- IS_NOT_SET = "is_not_set"
- IS_DATE_EXACT = "is_date_exact"
- IS_DATE_BEFORE = "is_date_before"
- IS_DATE_AFTER = "is_date_after"
- BETWEEN = "between"
- NOT_BETWEEN = "not_between"
- MIN = "min"
- MAX = "max"
- IN_ = "in"
- NOT_IN = "not_in"
-
-
-class QueryResponseAlternative5(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- bytecode: Optional[list] = None
- coloredBytecode: Optional[list] = None
- results: Any
- stdout: Optional[str] = None
-
-
-class QueryResponseAlternative36(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- questions: list[str]
-
-
-class QueryTiming(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- k: str = Field(..., description="Key. Shortened to 'k' to save on data.")
- t: float = Field(..., description="Time in seconds. Shortened to 't' to save on data.")
-
-
-class ReasoningMessage(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- content: str
- id: Optional[str] = None
- substeps: Optional[list[str]] = None
- type: Literal["ai/reasoning"] = "ai/reasoning"
-
-
-class RecordingOrder(StrEnum):
- DURATION = "duration"
- RECORDING_DURATION = "recording_duration"
- INACTIVE_SECONDS = "inactive_seconds"
- ACTIVE_SECONDS = "active_seconds"
- START_TIME = "start_time"
- CONSOLE_ERROR_COUNT = "console_error_count"
- CLICK_COUNT = "click_count"
- KEYPRESS_COUNT = "keypress_count"
- MOUSE_ACTIVITY_COUNT = "mouse_activity_count"
- ACTIVITY_SCORE = "activity_score"
-
-
-class RecordingPropertyFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- key: Union[DurationType, str]
- label: Optional[str] = None
- operator: PropertyOperator
- type: Literal["recording"] = "recording"
- value: Optional[Union[str, float, list[Union[str, float]]]] = None
-
-
-class ResultCustomizationBase(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- color: DataColorToken
-
-
-class ResultCustomizationBy(StrEnum):
- VALUE = "value"
- POSITION = "position"
-
-
-class ResultCustomizationByPosition(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- assignmentBy: Literal["position"] = "position"
- color: DataColorToken
-
-
-class ResultCustomizationByValue(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- assignmentBy: Literal["value"] = "value"
- color: DataColorToken
-
-
-class RetentionEntityKind(StrEnum):
- ACTIONS_NODE = "ActionsNode"
- EVENTS_NODE = "EventsNode"
-
-
-class RetentionReference(StrEnum):
- TOTAL = "total"
- PREVIOUS = "previous"
-
-
-class RetentionPeriod(StrEnum):
- HOUR = "Hour"
- DAY = "Day"
- WEEK = "Week"
- MONTH = "Month"
-
-
-class RetentionType(StrEnum):
- RETENTION_RECURRING = "retention_recurring"
- RETENTION_FIRST_TIME = "retention_first_time"
-
-
-class RouterMessage(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- content: str
- id: Optional[str] = None
- type: Literal["ai/router"] = "ai/router"
-
-
-class SamplingRate(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- denominator: Optional[float] = None
- numerator: float
-
-
-class SessionAttributionGroupBy(StrEnum):
- CHANNEL_TYPE = "ChannelType"
- MEDIUM = "Medium"
- SOURCE = "Source"
- CAMPAIGN = "Campaign"
- AD_IDS = "AdIds"
- REFERRING_DOMAIN = "ReferringDomain"
- INITIAL_URL = "InitialURL"
-
-
-class SessionPropertyFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- key: str
- label: Optional[str] = None
- operator: PropertyOperator
- type: Literal["session"] = "session"
- value: Optional[Union[str, float, list[Union[str, float]]]] = None
-
-
-class SnapshotSource(StrEnum):
- WEB = "web"
- MOBILE = "mobile"
- UNKNOWN = "unknown"
-
-
-class Storage(StrEnum):
- OBJECT_STORAGE_LTS = "object_storage_lts"
- OBJECT_STORAGE = "object_storage"
-
-
-class StepOrderValue(StrEnum):
- STRICT = "strict"
- UNORDERED = "unordered"
- ORDERED = "ordered"
-
-
-class StickinessFilterLegacy(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- compare: Optional[bool] = None
- compare_to: Optional[str] = None
- display: Optional[ChartDisplayType] = None
- hidden_legend_keys: Optional[dict[str, Union[bool, Any]]] = None
- show_legend: Optional[bool] = None
- show_values_on_series: Optional[bool] = None
-
-
-class StickinessOperator(StrEnum):
- GTE = "gte"
- LTE = "lte"
- EXACT = "exact"
-
-
-class SuggestedQuestionsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- questions: list[str]
-
-
-class TaxonomicFilterGroupType(StrEnum):
- METADATA = "metadata"
- ACTIONS = "actions"
- COHORTS = "cohorts"
- COHORTS_WITH_ALL = "cohorts_with_all"
- DATA_WAREHOUSE = "data_warehouse"
- DATA_WAREHOUSE_PROPERTIES = "data_warehouse_properties"
- DATA_WAREHOUSE_PERSON_PROPERTIES = "data_warehouse_person_properties"
- ELEMENTS = "elements"
- EVENTS = "events"
- EVENT_PROPERTIES = "event_properties"
- EVENT_FEATURE_FLAGS = "event_feature_flags"
- NUMERICAL_EVENT_PROPERTIES = "numerical_event_properties"
- PERSON_PROPERTIES = "person_properties"
- PAGEVIEW_URLS = "pageview_urls"
- SCREENS = "screens"
- CUSTOM_EVENTS = "custom_events"
- WILDCARD = "wildcard"
- GROUPS = "groups"
- PERSONS = "persons"
- FEATURE_FLAGS = "feature_flags"
- INSIGHTS = "insights"
- EXPERIMENTS = "experiments"
- PLUGINS = "plugins"
- DASHBOARDS = "dashboards"
- NAME_GROUPS = "name_groups"
- SESSION_PROPERTIES = "session_properties"
- HOGQL_EXPRESSION = "hogql_expression"
- NOTEBOOKS = "notebooks"
- LOG_ENTRIES = "log_entries"
- REPLAY = "replay"
-
-
-class TimelineEntry(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- events: list[EventType]
- recording_duration_s: Optional[float] = Field(default=None, description="Duration of the recording in seconds.")
- sessionId: Optional[str] = Field(default=None, description="Session ID. None means out-of-session events")
-
-
-class TrendsFilterLegacy(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- aggregation_axis_format: Optional[AggregationAxisFormat] = None
- aggregation_axis_postfix: Optional[str] = None
- aggregation_axis_prefix: Optional[str] = None
- breakdown_histogram_bin_count: Optional[float] = None
- compare: Optional[bool] = None
- compare_to: Optional[str] = None
- decimal_places: Optional[float] = None
- display: Optional[ChartDisplayType] = None
- formula: Optional[str] = None
- hidden_legend_keys: Optional[dict[str, Union[bool, Any]]] = None
- show_alert_threshold_lines: Optional[bool] = None
- show_labels_on_series: Optional[bool] = None
- show_legend: Optional[bool] = None
- show_percent_stack_view: Optional[bool] = None
- show_values_on_series: Optional[bool] = None
- smoothing_intervals: Optional[float] = None
- y_axis_scale_type: Optional[YAxisScaleType] = YAxisScaleType.LINEAR
-
-
-class ActionsPie(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- disableHoverOffset: Optional[bool] = None
- hideAggregation: Optional[bool] = None
-
-
-class RETENTION(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- hideLineGraph: Optional[bool] = None
- hideSizeColumn: Optional[bool] = None
- useSmallLayout: Optional[bool] = None
-
-
-class VizSpecificOptions(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- ActionsPie: Optional[ActionsPie] = None
- RETENTION: Optional[RETENTION] = None
-
-
-class Sampling(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- enabled: Optional[bool] = None
- forceSamplingRate: Optional[SamplingRate] = None
-
-
-class WebOverviewItemKind(StrEnum):
- UNIT = "unit"
- DURATION_S = "duration_s"
- PERCENTAGE = "percentage"
-
-
-class WebStatsBreakdown(StrEnum):
- PAGE = "Page"
- INITIAL_PAGE = "InitialPage"
- EXIT_PAGE = "ExitPage"
- EXIT_CLICK = "ExitClick"
- INITIAL_CHANNEL_TYPE = "InitialChannelType"
- INITIAL_REFERRING_DOMAIN = "InitialReferringDomain"
- INITIAL_UTM_SOURCE = "InitialUTMSource"
- INITIAL_UTM_CAMPAIGN = "InitialUTMCampaign"
- INITIAL_UTM_MEDIUM = "InitialUTMMedium"
- INITIAL_UTM_TERM = "InitialUTMTerm"
- INITIAL_UTM_CONTENT = "InitialUTMContent"
- INITIAL_UTM_SOURCE_MEDIUM_CAMPAIGN = "InitialUTMSourceMediumCampaign"
- BROWSER = "Browser"
- OS = "OS"
- VIEWPORT = "Viewport"
- DEVICE_TYPE = "DeviceType"
- COUNTRY = "Country"
- REGION = "Region"
- CITY = "City"
- TIMEZONE = "Timezone"
- LANGUAGE = "Language"
-
-
-class Scale(StrEnum):
- LINEAR = "linear"
- LOGARITHMIC = "logarithmic"
-
-
-class YAxisSettings(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- scale: Optional[Scale] = None
- startAtZero: Optional[bool] = Field(default=None, description="Whether the Y axis should start at zero")
-
-
-class Integer(RootModel[int]):
- root: int
-
-
-class ActionConversionGoal(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- actionId: int
-
-
-class ActorsPropertyTaxonomyResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- sample_count: int
- sample_values: list[Union[str, float, bool, int]]
-
-
-class AlertCondition(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- type: AlertConditionType
-
-
-class AssistantArrayPropertyFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- operator: AssistantArrayPropertyFilterOperator = Field(
- ..., description="`exact` - exact match of any of the values. `is_not` - does not match any of the values."
- )
- value: list[str] = Field(
- ...,
- description=(
- "Only use property values from the plan. Always use strings as values. If you have a number, convert it to"
- ' a string first. If you have a boolean, convert it to a string "true" or "false".'
- ),
- )
-
-
-class AssistantBreakdownFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- breakdown_limit: Optional[int] = Field(default=25, description="How many distinct values to show.")
-
-
-class AssistantDateTimePropertyFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- operator: AssistantDateTimePropertyFilterOperator
- value: str = Field(..., description="Value must be a date in ISO 8601 format.")
-
-
-class AssistantFunnelsBreakdownFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- breakdown: str = Field(..., description="The entity property to break down by.")
- breakdown_group_type_index: Optional[int] = Field(
- default=None,
- description=(
- "If `breakdown_type` is `group`, this is the index of the group. Use the index from the group mapping."
- ),
- )
- breakdown_limit: Optional[int] = Field(default=25, description="How many distinct values to show.")
- breakdown_type: Optional[AssistantFunnelsBreakdownType] = Field(
- default=AssistantFunnelsBreakdownType.EVENT,
- description=(
- "Type of the entity to break down by. If `group` is used, you must also provide"
- " `breakdown_group_type_index` from the group mapping."
- ),
- )
-
-
-class AssistantFunnelsExclusionEventsNode(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- event: str
- funnelFromStep: int
- funnelToStep: int
- kind: Literal["EventsNode"] = "EventsNode"
-
-
-class AssistantFunnelsFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- binCount: Optional[int] = Field(
- default=None,
- description=(
- "Use this setting only when `funnelVizType` is `time_to_convert`: number of bins to show in histogram."
- ),
- )
- exclusions: Optional[list[AssistantFunnelsExclusionEventsNode]] = Field(
- default=[],
- description=(
- "Users may want to use exclusion events to filter out conversions in which a particular event occurred"
- " between specific steps. These events must not be included in the main sequence. You must include start"
- " and end indexes for each exclusion where the minimum index is one and the maximum index is the number of"
- " steps in the funnel. For example, there is a sequence with three steps: sign up, finish onboarding,"
- " purchase. If the user wants to exclude all conversions in which users left the page before finishing the"
- " onboarding, the exclusion step would be the event `$pageleave` with start index 2 and end index 3."
- ),
- )
- funnelAggregateByHogQL: Literal["properties.$session_id"] = Field(
- default="properties.$session_id",
- description="Use this field only if the user explicitly asks to aggregate the funnel by unique sessions.",
- )
- funnelOrderType: Optional[StepOrderValue] = Field(
- default=StepOrderValue.ORDERED,
- description=(
- "Defines the behavior of event matching between steps. Prefer the `strict` option unless explicitly told to"
- " use a different one. `ordered` - defines a sequential funnel. Step B must happen after Step A, but any"
- " number of events can happen between A and B. `strict` - defines a funnel where all events must happen in"
- " order. Step B must happen directly after Step A without any events in between. `any` - order doesn't"
- " matter. Steps can be completed in any sequence."
- ),
- )
- funnelStepReference: Optional[FunnelStepReference] = Field(
- default=FunnelStepReference.TOTAL,
- description=(
- "Whether conversion shown in the graph should be across all steps or just relative to the previous step."
- ),
- )
- funnelVizType: Optional[FunnelVizType] = Field(
- default=FunnelVizType.STEPS,
- description=(
- "Defines the type of visualization to use. The `steps` option is recommended. `steps` - shows a"
- " step-by-step funnel. Perfect to show a conversion rate of a sequence of events (default)."
- " `time_to_convert` - shows a histogram of the time it took to complete the funnel. Use this if the user"
- " asks about the average time it takes to complete the funnel. `trends` - shows a trend of the whole"
- " sequence's conversion rate over time. Use this if the user wants to see how the conversion rate changes"
- " over time."
- ),
- )
- funnelWindowInterval: Optional[int] = Field(
- default=14,
- description=(
- "Controls a time frame value for a conversion to be considered. Select a reasonable value based on the"
- " user's query. Use in combination with `funnelWindowIntervalUnit`. The default value is 14 days."
- ),
- )
- funnelWindowIntervalUnit: Optional[FunnelConversionWindowTimeUnit] = Field(
- default=FunnelConversionWindowTimeUnit.DAY,
- description=(
- "Controls a time frame interval for a conversion to be considered. Select a reasonable value based on the"
- " user's query. Use in combination with `funnelWindowInterval`. The default value is 14 days."
- ),
- )
- layout: Optional[FunnelLayout] = Field(
- default=FunnelLayout.VERTICAL,
- description="Controls how the funnel chart is displayed: vertically (preferred) or horizontally.",
- )
-
-
-class AssistantGenerationStatusEvent(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- type: AssistantGenerationStatusType
-
-
-class AssistantGenericPropertyFilter1(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- key: str = Field(..., description="Use one of the properties the user has provided in the plan.")
- operator: AssistantSingleValuePropertyFilterOperator = Field(
- ...,
- description=(
- "`icontains` - case insensitive contains. `not_icontains` - case insensitive does not contain. `regex` -"
- " matches the regex pattern. `not_regex` - does not match the regex pattern."
- ),
- )
- type: str
- value: str = Field(
- ...,
- description=(
- "Only use property values from the plan. If the operator is `regex` or `not_regex`, the value must be a"
- " valid ClickHouse regex pattern to match against. Otherwise, the value must be a substring that will be"
- " matched against the property value."
- ),
- )
-
-
-class AssistantGenericPropertyFilter4(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- key: str = Field(..., description="Use one of the properties the user has provided in the plan.")
- operator: AssistantSetPropertyFilterOperator = Field(
- ...,
- description=(
- "`is_set` - the property has any value. `is_not_set` - the property doesn't have a value or wasn't"
- " collected."
- ),
- )
- type: str
-
-
-class AssistantGroupMultipleBreakdownFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- group_type_index: Optional[int] = Field(default=None, description="Index of the group type from the group mapping.")
- property: str = Field(..., description="Property name from the plan to break down by.")
- type: Literal["group"] = "group"
-
-
-class AssistantGroupPropertyFilter1(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- group_type_index: int = Field(..., description="Index of the group type from the group mapping.")
- key: str = Field(..., description="Use one of the properties the user has provided in the plan.")
- operator: AssistantSingleValuePropertyFilterOperator = Field(
- ...,
- description=(
- "`icontains` - case insensitive contains. `not_icontains` - case insensitive does not contain. `regex` -"
- " matches the regex pattern. `not_regex` - does not match the regex pattern."
- ),
- )
- type: Literal["group"] = "group"
- value: str = Field(
- ...,
- description=(
- "Only use property values from the plan. If the operator is `regex` or `not_regex`, the value must be a"
- " valid ClickHouse regex pattern to match against. Otherwise, the value must be a substring that will be"
- " matched against the property value."
- ),
- )
-
-
-class AssistantGroupPropertyFilter2(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- group_type_index: int = Field(..., description="Index of the group type from the group mapping.")
- key: str = Field(..., description="Use one of the properties the user has provided in the plan.")
- operator: AssistantArrayPropertyFilterOperator = Field(
- ..., description="`exact` - exact match of any of the values. `is_not` - does not match any of the values."
- )
- type: Literal["group"] = "group"
- value: list[str] = Field(
- ...,
- description=(
- "Only use property values from the plan. Always use strings as values. If you have a number, convert it to"
- ' a string first. If you have a boolean, convert it to a string "true" or "false".'
- ),
- )
-
-
-class AssistantGroupPropertyFilter3(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- group_type_index: int = Field(..., description="Index of the group type from the group mapping.")
- key: str = Field(..., description="Use one of the properties the user has provided in the plan.")
- operator: AssistantDateTimePropertyFilterOperator
- type: Literal["group"] = "group"
- value: str = Field(..., description="Value must be a date in ISO 8601 format.")
-
-
-class AssistantGroupPropertyFilter4(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- group_type_index: int = Field(..., description="Index of the group type from the group mapping.")
- key: str = Field(..., description="Use one of the properties the user has provided in the plan.")
- operator: AssistantSetPropertyFilterOperator = Field(
- ...,
- description=(
- "`is_set` - the property has any value. `is_not_set` - the property doesn't have a value or wasn't"
- " collected."
- ),
- )
- type: Literal["group"] = "group"
-
-
-class AssistantSetPropertyFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- operator: AssistantSetPropertyFilterOperator = Field(
- ...,
- description=(
- "`is_set` - the property has any value. `is_not_set` - the property doesn't have a value or wasn't"
- " collected."
- ),
- )
-
-
-class AssistantSingleValuePropertyFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- operator: AssistantSingleValuePropertyFilterOperator = Field(
- ...,
- description=(
- "`icontains` - case insensitive contains. `not_icontains` - case insensitive does not contain. `regex` -"
- " matches the regex pattern. `not_regex` - does not match the regex pattern."
- ),
- )
- value: str = Field(
- ...,
- description=(
- "Only use property values from the plan. If the operator is `regex` or `not_regex`, the value must be a"
- " valid ClickHouse regex pattern to match against. Otherwise, the value must be a substring that will be"
- " matched against the property value."
- ),
- )
-
-
-class AssistantTrendsBreakdownFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- breakdown_limit: Optional[int] = Field(default=25, description="How many distinct values to show.")
- breakdowns: list[Union[AssistantGroupMultipleBreakdownFilter, AssistantGenericMultipleBreakdownFilter]] = Field(
- ..., description="Use this field to define breakdowns.", max_length=3
- )
-
-
-class AutocompleteCompletionItem(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- detail: Optional[str] = Field(
- default=None,
- description=(
- "A human-readable string with additional information about this item, like type or symbol information."
- ),
- )
- documentation: Optional[str] = Field(
- default=None, description="A human-readable string that represents a doc-comment."
- )
- insertText: str = Field(
- ..., description="A string or snippet that should be inserted in a document when selecting this completion."
- )
- kind: AutocompleteCompletionItemKind = Field(
- ..., description="The kind of this completion item. Based on the kind an icon is chosen by the editor."
- )
- label: str = Field(
- ...,
- description=(
- "The label of this completion item. By default this is also the text that is inserted when selecting this"
- " completion."
- ),
- )
-
-
-class Breakdown(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- group_type_index: Optional[int] = None
- histogram_bin_count: Optional[int] = None
- normalize_url: Optional[bool] = None
- property: str
- type: Optional[MultipleBreakdownType] = None
-
-
-class BreakdownFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- breakdown: Optional[Union[str, list[Union[str, int]], int]] = None
- breakdown_group_type_index: Optional[int] = None
- breakdown_hide_other_aggregation: Optional[bool] = None
- breakdown_histogram_bin_count: Optional[int] = None
- breakdown_limit: Optional[int] = None
- breakdown_normalize_url: Optional[bool] = None
- breakdown_type: Optional[BreakdownType] = BreakdownType.EVENT
- breakdowns: Optional[list[Breakdown]] = Field(default=None, max_length=3)
-
-
-class IntervalItem(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- label: str
- value: int = Field(..., description="An interval selected out of available intervals in source query")
-
-
-class Series(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- label: str
- value: int
-
-
-class Settings(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- display: Optional[ChartSettingsDisplay] = None
- formatting: Optional[ChartSettingsFormatting] = None
-
-
-class ChartAxis(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- column: str
- settings: Optional[Settings] = None
-
-
-class ChartSettings(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- goalLines: Optional[list[GoalLine]] = None
- leftYAxisSettings: Optional[YAxisSettings] = None
- rightYAxisSettings: Optional[YAxisSettings] = None
- seriesBreakdownColumn: Optional[str] = None
- stackBars100: Optional[bool] = Field(default=None, description="Whether we fill the bars to 100% in stacked mode")
- xAxis: Optional[ChartAxis] = None
- yAxis: Optional[list[ChartAxis]] = None
- yAxisAtZero: Optional[bool] = Field(
- default=None, description="Deprecated: use `[left|right]YAxisSettings`. Whether the Y axis should start at zero"
- )
-
-
-class ClickhouseQueryProgress(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- active_cpu_time: int
- bytes_read: int
- estimated_rows_total: int
- rows_read: int
- time_elapsed: int
-
-
-class CohortPropertyFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- key: Literal["id"] = "id"
- label: Optional[str] = None
- operator: Optional[PropertyOperator] = PropertyOperator.IN_
- type: Literal["cohort"] = "cohort"
- value: int
-
-
-class CustomChannelCondition(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- id: str
- key: CustomChannelField
- op: CustomChannelOperator
- value: Optional[Union[str, list[str]]] = None
-
-
-class CustomChannelRule(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- channel_type: str
- combiner: FilterLogicalOperator
- id: str
- items: list[CustomChannelCondition]
-
-
-class DataWarehousePersonPropertyFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- key: str
- label: Optional[str] = None
- operator: PropertyOperator
- type: Literal["data_warehouse_person_property"] = "data_warehouse_person_property"
- value: Optional[Union[str, float, list[Union[str, float]]]] = None
-
-
-class DataWarehousePropertyFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- key: str
- label: Optional[str] = None
- operator: PropertyOperator
- type: Literal["data_warehouse"] = "data_warehouse"
- value: Optional[Union[str, float, list[Union[str, float]]]] = None
-
-
-class DatabaseSchemaField(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- chain: Optional[list[Union[str, int]]] = None
- fields: Optional[list[str]] = None
- hogql_value: str
- id: Optional[str] = None
- name: str
- schema_valid: bool
- table: Optional[str] = None
- type: DatabaseSerializedFieldType
-
-
-class DatabaseSchemaPostHogTable(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- fields: dict[str, DatabaseSchemaField]
- id: str
- name: str
- type: Literal["posthog"] = "posthog"
-
-
-class DatabaseSchemaTableCommon(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- fields: dict[str, DatabaseSchemaField]
- id: str
- name: str
- type: Type
-
-
-class Day(RootModel[int]):
- root: int
-
-
-class ElementPropertyFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- key: Key
- label: Optional[str] = None
- operator: PropertyOperator
- type: Literal["element"] = "element"
- value: Optional[Union[str, float, list[Union[str, float]]]] = None
-
-
-class EventOddsRatioSerialized(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- correlation_type: CorrelationType
- event: EventDefinition
- failure_count: int
- odds_ratio: float
- success_count: int
-
-
-class EventPropertyFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- key: str
- label: Optional[str] = None
- operator: Optional[PropertyOperator] = PropertyOperator.EXACT
- type: Literal["event"] = Field(default="event", description="Event properties")
- value: Optional[Union[str, float, list[Union[str, float]]]] = None
-
-
-class EventTaxonomyItem(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- property: str
- sample_count: int
- sample_values: list[str]
-
-
-class FeaturePropertyFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- key: str
- label: Optional[str] = None
- operator: PropertyOperator
- type: Literal["feature"] = Field(default="feature", description='Event property with "$feature/" prepended')
- value: Optional[Union[str, float, list[Union[str, float]]]] = None
-
-
-class FunnelCorrelationResult(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- events: list[EventOddsRatioSerialized]
- skewed: bool
-
-
-class FunnelExclusionSteps(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- funnelFromStep: int
- funnelToStep: int
-
-
-class FunnelsFilterLegacy(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- bin_count: Optional[Union[float, str]] = None
- breakdown_attribution_type: Optional[BreakdownAttributionType] = None
- breakdown_attribution_value: Optional[float] = None
- exclusions: Optional[list[FunnelExclusionLegacy]] = None
- funnel_aggregate_by_hogql: Optional[str] = None
- funnel_from_step: Optional[float] = None
- funnel_order_type: Optional[StepOrderValue] = None
- funnel_step_reference: Optional[FunnelStepReference] = None
- funnel_to_step: Optional[float] = None
- funnel_viz_type: Optional[FunnelVizType] = None
- funnel_window_interval: Optional[float] = None
- funnel_window_interval_unit: Optional[FunnelConversionWindowTimeUnit] = None
- hidden_legend_keys: Optional[dict[str, Union[bool, Any]]] = None
- layout: Optional[FunnelLayout] = None
-
-
-class GroupPropertyFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- group_type_index: Optional[int] = None
- key: str
- label: Optional[str] = None
- operator: PropertyOperator
- type: Literal["group"] = "group"
- value: Optional[Union[str, float, list[Union[str, float]]]] = None
-
-
-class HogQLAutocompleteResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- incomplete_list: bool = Field(..., description="Whether or not the suggestions returned are complete")
- suggestions: list[AutocompleteCompletionItem]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class HogQLNotice(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- end: Optional[int] = None
- fix: Optional[str] = None
- message: str
- start: Optional[int] = None
-
-
-class HogQLPropertyFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- key: str
- label: Optional[str] = None
- type: Literal["hogql"] = "hogql"
- value: Optional[Union[str, float, list[Union[str, float]]]] = None
-
-
-class HogQLQueryModifiers(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- bounceRateDurationSeconds: Optional[float] = None
- bounceRatePageViewMode: Optional[BounceRatePageViewMode] = None
- customChannelTypeRules: Optional[list[CustomChannelRule]] = None
- dataWarehouseEventsModifiers: Optional[list[DataWarehouseEventsModifier]] = None
- debug: Optional[bool] = None
- inCohortVia: Optional[InCohortVia] = None
- materializationMode: Optional[MaterializationMode] = None
- optimizeJoinedFilters: Optional[bool] = None
- personsArgMaxVersion: Optional[PersonsArgMaxVersion] = None
- personsJoinMode: Optional[PersonsJoinMode] = None
- personsOnEventsMode: Optional[PersonsOnEventsMode] = None
- propertyGroupsMode: Optional[PropertyGroupsMode] = None
- s3TableUseInvalidColumns: Optional[bool] = None
- sessionTableVersion: Optional[SessionTableVersion] = None
- useMaterializedViews: Optional[bool] = None
-
-
-class HogQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- code: Optional[str] = None
- kind: Literal["HogQuery"] = "HogQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- response: Optional[HogQueryResponse] = None
-
-
-class DayItem(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- label: str
- value: Union[str, AwareDatetime, int]
-
-
-class InsightThreshold(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- bounds: Optional[InsightsThresholdBounds] = None
- type: InsightThresholdType
-
-
-class LifecycleFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- showLegend: Optional[bool] = False
- showValuesOnSeries: Optional[bool] = None
- toggledLifecycles: Optional[list[LifecycleToggle]] = None
-
-
-class LifecycleFilterLegacy(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- show_legend: Optional[bool] = None
- show_values_on_series: Optional[bool] = None
- toggledLifecycles: Optional[list[LifecycleToggle]] = None
-
-
-class LogEntryPropertyFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- key: str
- label: Optional[str] = None
- operator: PropertyOperator
- type: Literal["log_entry"] = "log_entry"
- value: Optional[Union[str, float, list[Union[str, float]]]] = None
-
-
-class MatchedRecording(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- events: list[MatchedRecordingEvent]
- session_id: Optional[str] = None
-
-
-class PathsFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- edgeLimit: Optional[int] = 50
- endPoint: Optional[str] = None
- excludeEvents: Optional[list[str]] = None
- includeEventTypes: Optional[list[PathType]] = None
- localPathCleaningFilters: Optional[list[PathCleaningFilter]] = None
- maxEdgeWeight: Optional[int] = None
- minEdgeWeight: Optional[int] = None
- pathDropoffKey: Optional[str] = Field(default=None, description="Relevant only within actors query")
- pathEndKey: Optional[str] = Field(default=None, description="Relevant only within actors query")
- pathGroupings: Optional[list[str]] = None
- pathReplacements: Optional[bool] = None
- pathStartKey: Optional[str] = Field(default=None, description="Relevant only within actors query")
- pathsHogQLExpression: Optional[str] = None
- startPoint: Optional[str] = None
- stepLimit: Optional[int] = 5
-
-
-class PersonPropertyFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- key: str
- label: Optional[str] = None
- operator: PropertyOperator
- type: Literal["person"] = Field(default="person", description="Person properties")
- value: Optional[Union[str, float, list[Union[str, float]]]] = None
-
-
-class QueryResponseAlternative7(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- errors: list[HogQLNotice]
- isValid: Optional[bool] = None
- isValidView: Optional[bool] = None
- notices: list[HogQLNotice]
- query: Optional[str] = None
- table_names: Optional[list[str]] = None
- warnings: list[HogQLNotice]
-
-
-class QueryResponseAlternative8(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- incomplete_list: bool = Field(..., description="Whether or not the suggestions returned are complete")
- suggestions: list[AutocompleteCompletionItem]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class QueryStatus(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- complete: Optional[bool] = Field(
- default=False,
- description=(
- "Whether the query is still running. Will be true if the query is complete, even if it errored. Either"
- " result or error will be set."
- ),
- )
- dashboard_id: Optional[int] = None
- end_time: Optional[AwareDatetime] = Field(
- default=None, description="When did the query execution task finish (whether successfully or not)."
- )
- error: Optional[bool] = Field(
- default=False,
- description=(
- "If the query failed, this will be set to true. More information can be found in the error_message field."
- ),
- )
- error_message: Optional[str] = None
- expiration_time: Optional[AwareDatetime] = None
- id: str
- insight_id: Optional[int] = None
- labels: Optional[list[str]] = None
- pickup_time: Optional[AwareDatetime] = Field(
- default=None, description="When was the query execution task picked up by a worker."
- )
- query_async: Literal[True] = Field(default=True, description="ONLY async queries use QueryStatus.")
- query_progress: Optional[ClickhouseQueryProgress] = None
- results: Optional[Any] = None
- start_time: Optional[AwareDatetime] = Field(default=None, description="When was query execution task enqueued.")
- task_id: Optional[str] = None
- team_id: int
-
-
-class QueryStatusResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- query_status: QueryStatus
-
-
-class ResultCustomization(RootModel[Union[ResultCustomizationByValue, ResultCustomizationByPosition]]):
- root: Union[ResultCustomizationByValue, ResultCustomizationByPosition]
-
-
-class RetentionEntity(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- custom_name: Optional[str] = None
- id: Optional[Union[str, float]] = None
- kind: Optional[RetentionEntityKind] = None
- name: Optional[str] = None
- order: Optional[int] = None
- type: Optional[EntityType] = None
- uuid: Optional[str] = None
-
-
-class RetentionFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cumulative: Optional[bool] = None
- period: Optional[RetentionPeriod] = RetentionPeriod.DAY
- retentionReference: Optional[RetentionReference] = None
- retentionType: Optional[RetentionType] = None
- returningEntity: Optional[RetentionEntity] = None
- showMean: Optional[bool] = None
- targetEntity: Optional[RetentionEntity] = None
- totalIntervals: Optional[int] = 11
-
-
-class RetentionFilterLegacy(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cumulative: Optional[bool] = None
- period: Optional[RetentionPeriod] = None
- retention_reference: Optional[RetentionReference] = None
- retention_type: Optional[RetentionType] = None
- returning_entity: Optional[RetentionEntity] = None
- show_mean: Optional[bool] = None
- target_entity: Optional[RetentionEntity] = None
- total_intervals: Optional[int] = None
-
-
-class RetentionValue(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- count: int
-
-
-class SavedInsightNode(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- allowSorting: Optional[bool] = Field(
- default=None, description="Can the user click on column headers to sort the table? (default: true)"
- )
- embedded: Optional[bool] = Field(default=None, description="Query is embedded inside another bordered component")
- expandable: Optional[bool] = Field(
- default=None, description="Can expand row to show raw event data (default: true)"
- )
- full: Optional[bool] = Field(
- default=None, description="Show with most visual options enabled. Used in insight scene."
- )
- hidePersonsModal: Optional[bool] = None
- kind: Literal["SavedInsightNode"] = "SavedInsightNode"
- propertiesViaUrl: Optional[bool] = Field(default=None, description="Link properties via the URL (default: false)")
- shortId: str
- showActions: Optional[bool] = Field(default=None, description="Show the kebab menu at the end of the row")
- showColumnConfigurator: Optional[bool] = Field(
- default=None, description="Show a button to configure the table's columns if possible"
- )
- showCorrelationTable: Optional[bool] = None
- showDateRange: Optional[bool] = Field(default=None, description="Show date range selector")
- showElapsedTime: Optional[bool] = Field(default=None, description="Show the time it takes to run a query")
- showEventFilter: Optional[bool] = Field(
- default=None, description="Include an event filter above the table (EventsNode only)"
- )
- showExport: Optional[bool] = Field(default=None, description="Show the export button")
- showFilters: Optional[bool] = None
- showHeader: Optional[bool] = None
- showHogQLEditor: Optional[bool] = Field(default=None, description="Include a HogQL query editor above HogQL tables")
- showLastComputation: Optional[bool] = None
- showLastComputationRefresh: Optional[bool] = None
- showOpenEditorButton: Optional[bool] = Field(
- default=None, description="Show a button to open the current query as a new insight. (default: true)"
- )
- showPersistentColumnConfigurator: Optional[bool] = Field(
- default=None, description="Show a button to configure and persist the table's default columns if possible"
- )
- showPropertyFilter: Optional[Union[bool, list[TaxonomicFilterGroupType]]] = Field(
- default=None, description="Include a property filter above the table"
- )
- showReload: Optional[bool] = Field(default=None, description="Show a reload button")
- showResults: Optional[bool] = None
- showResultsTable: Optional[bool] = Field(default=None, description="Show a results table")
- showSavedQueries: Optional[bool] = Field(default=None, description="Shows a list of saved queries")
- showSearch: Optional[bool] = Field(default=None, description="Include a free text search field (PersonsNode only)")
- showTable: Optional[bool] = None
- showTestAccountFilters: Optional[bool] = Field(default=None, description="Show filter to exclude test accounts")
- showTimings: Optional[bool] = Field(default=None, description="Show a detailed query timing breakdown")
- suppressSessionAnalysisWarning: Optional[bool] = None
- vizSpecificOptions: Optional[VizSpecificOptions] = None
-
-
-class Filters(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- dateRange: Optional[DateRange] = None
- properties: Optional[list[SessionPropertyFilter]] = None
-
-
-class SessionAttributionExplorerQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: Optional[list] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: Any
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = None
-
-
-class SessionRecordingType(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- active_seconds: Optional[float] = None
- activity_score: Optional[float] = Field(
- default=None, description="calculated on the backend so that we can sort by it, definition may change over time"
- )
- click_count: Optional[float] = None
- console_error_count: Optional[float] = None
- console_log_count: Optional[float] = None
- console_warn_count: Optional[float] = None
- distinct_id: Optional[str] = None
- email: Optional[str] = None
- end_time: str = Field(..., description="When the recording ends in ISO format.")
- id: str
- inactive_seconds: Optional[float] = None
- keypress_count: Optional[float] = None
- matching_events: Optional[list[MatchedRecording]] = Field(default=None, description="List of matching events. *")
- mouse_activity_count: Optional[float] = Field(
- default=None, description="count of all mouse activity in the recording, not just clicks"
- )
- ongoing: Optional[bool] = Field(
- default=None,
- description=(
- "whether we have received data for this recording in the last 5 minutes (assumes the recording was loaded"
- " from ClickHouse)\n*"
- ),
- )
- person: Optional[PersonType] = None
- recording_duration: float = Field(..., description="Length of recording in seconds.")
- snapshot_source: SnapshotSource
- start_time: str = Field(..., description="When the recording starts in ISO format.")
- start_url: Optional[str] = None
- storage: Optional[Storage] = Field(default=None, description="Where this recording information was loaded from")
- summary: Optional[str] = None
- viewed: bool = Field(..., description="Whether this recording has been viewed already.")
-
-
-class SessionsTimelineQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[TimelineEntry]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class StickinessCriteria(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- operator: StickinessOperator
- value: int
-
-
-class StickinessFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- display: Optional[ChartDisplayType] = None
- hiddenLegendIndexes: Optional[list[int]] = None
- showLegend: Optional[bool] = None
- showValuesOnSeries: Optional[bool] = None
- stickinessCriteria: Optional[StickinessCriteria] = None
-
-
-class StickinessQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[dict[str, Any]]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class SuggestedQuestionsQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- kind: Literal["SuggestedQuestionsQuery"] = "SuggestedQuestionsQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- response: Optional[SuggestedQuestionsQueryResponse] = None
-
-
-class TableSettings(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: Optional[list[ChartAxis]] = None
- conditionalFormatting: Optional[list[ConditionalFormattingRule]] = None
-
-
-class TeamTaxonomyItem(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- count: int
- event: str
-
-
-class TestBasicQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class TestCachedBasicQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class TrendsAlertConfig(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- check_ongoing_interval: Optional[bool] = None
- series_index: int
- type: Literal["TrendsAlertConfig"] = "TrendsAlertConfig"
-
-
-class TrendsFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- aggregationAxisFormat: Optional[AggregationAxisFormat] = AggregationAxisFormat.NUMERIC
- aggregationAxisPostfix: Optional[str] = None
- aggregationAxisPrefix: Optional[str] = None
- breakdown_histogram_bin_count: Optional[float] = None
- decimalPlaces: Optional[float] = None
- display: Optional[ChartDisplayType] = ChartDisplayType.ACTIONS_LINE_GRAPH
- formula: Optional[str] = None
- hiddenLegendIndexes: Optional[list[int]] = None
- resultCustomizationBy: Optional[ResultCustomizationBy] = Field(
- default=ResultCustomizationBy.VALUE,
- description="Wether result datasets are associated by their values or by their order.",
- )
- resultCustomizations: Optional[
- Union[dict[str, ResultCustomizationByValue], dict[str, ResultCustomizationByPosition]]
- ] = Field(default=None, description="Customizations for the appearance of result datasets.")
- showAlertThresholdLines: Optional[bool] = False
- showLabelsOnSeries: Optional[bool] = None
- showLegend: Optional[bool] = False
- showPercentStackView: Optional[bool] = False
- showValuesOnSeries: Optional[bool] = False
- smoothingIntervals: Optional[int] = 1
- yAxisScaleType: Optional[YAxisScaleType] = YAxisScaleType.LINEAR
-
-
-class TrendsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = Field(default=None, description="Wether more breakdown values are available.")
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[dict[str, Any]]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class WebExternalClicksTableQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: Optional[list] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list
- samplingRate: Optional[SamplingRate] = None
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = None
-
-
-class WebGoalsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: Optional[list] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list
- samplingRate: Optional[SamplingRate] = None
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = None
-
-
-class WebOverviewItem(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- changeFromPreviousPct: Optional[float] = None
- isIncreaseBad: Optional[bool] = None
- key: str
- kind: WebOverviewItemKind
- previous: Optional[float] = None
- value: Optional[float] = None
-
-
-class WebOverviewQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- dateFrom: Optional[str] = None
- dateTo: Optional[str] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[WebOverviewItem]
- samplingRate: Optional[SamplingRate] = None
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class WebStatsTableQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: Optional[list] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list
- samplingRate: Optional[SamplingRate] = None
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = None
-
-
-class ActorsPropertyTaxonomyQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: ActorsPropertyTaxonomyResponse
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class ActorsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: list
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: str = Field(..., description="Generated HogQL query.")
- limit: int
- missing_actors_count: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: int
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[list]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: list[str]
-
-
-class AssistantBasePropertyFilter(
- RootModel[
- Union[
- AssistantDateTimePropertyFilter,
- AssistantSetPropertyFilter,
- Union[AssistantSingleValuePropertyFilter, AssistantArrayPropertyFilter],
- ]
- ]
-):
- root: Union[
- AssistantDateTimePropertyFilter,
- AssistantSetPropertyFilter,
- Union[AssistantSingleValuePropertyFilter, AssistantArrayPropertyFilter],
- ]
-
-
-class AssistantFunnelsEventsNode(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- custom_name: Optional[str] = Field(
- default=None, description="Optional custom name for the event if it is needed to be renamed."
- )
- event: str = Field(..., description="Name of the event.")
- kind: Literal["EventsNode"] = "EventsNode"
- math: Optional[AssistantTrendsMath] = Field(
- default=None,
- description=(
- "Optional math aggregation type for the series. Only specify this math type if the user wants one of these."
- " `first_time_for_user` - counts the number of users who have completed the event for the first time ever."
- " `first_time_for_user_with_filters` - counts the number of users who have completed the event with"
- " specified filters for the first time."
- ),
- )
- properties: Optional[
- list[
- Union[
- Union[
- AssistantGenericPropertyFilter1,
- AssistantGenericPropertyFilter2,
- AssistantGenericPropertyFilter3,
- AssistantGenericPropertyFilter4,
- ],
- Union[
- AssistantGroupPropertyFilter1,
- AssistantGroupPropertyFilter2,
- AssistantGroupPropertyFilter3,
- AssistantGroupPropertyFilter4,
- ],
- ]
- ]
- ] = None
- response: Optional[dict[str, Any]] = None
-
-
-class AssistantFunnelsQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- aggregation_group_type_index: Optional[int] = Field(
- default=None,
- description=(
- "Use this field to define the aggregation by a specific group from the group mapping that the user has"
- " provided."
- ),
- )
- breakdownFilter: Optional[AssistantFunnelsBreakdownFilter] = Field(
- default=None, description="Breakdown the chart by a property"
- )
- dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
- filterTestAccounts: Optional[bool] = Field(
- default=False, description="Exclude internal and test users by applying the respective filters"
- )
- funnelsFilter: Optional[AssistantFunnelsFilter] = Field(
- default=None, description="Properties specific to the funnels insight"
- )
- interval: Optional[IntervalType] = Field(
- default=None, description="Granularity of the response. Can be one of `hour`, `day`, `week` or `month`"
- )
- kind: Literal["FunnelsQuery"] = "FunnelsQuery"
- properties: Optional[
- list[
- Union[
- Union[
- AssistantGenericPropertyFilter1,
- AssistantGenericPropertyFilter2,
- AssistantGenericPropertyFilter3,
- AssistantGenericPropertyFilter4,
- ],
- Union[
- AssistantGroupPropertyFilter1,
- AssistantGroupPropertyFilter2,
- AssistantGroupPropertyFilter3,
- AssistantGroupPropertyFilter4,
- ],
- ]
- ]
- ] = Field(default=[], description="Property filters for all series")
- samplingFactor: Optional[float] = Field(
- default=None, description="Sampling rate from 0 to 1 where 1 is 100% of the data."
- )
- series: list[AssistantFunnelsEventsNode] = Field(..., description="Events to include")
-
-
-class AssistantInsightsQueryBase(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
- filterTestAccounts: Optional[bool] = Field(
- default=False, description="Exclude internal and test users by applying the respective filters"
- )
- properties: Optional[
- list[
- Union[
- Union[
- AssistantGenericPropertyFilter1,
- AssistantGenericPropertyFilter2,
- AssistantGenericPropertyFilter3,
- AssistantGenericPropertyFilter4,
- ],
- Union[
- AssistantGroupPropertyFilter1,
- AssistantGroupPropertyFilter2,
- AssistantGroupPropertyFilter3,
- AssistantGroupPropertyFilter4,
- ],
- ]
- ]
- ] = Field(default=[], description="Property filters for all series")
- samplingFactor: Optional[float] = Field(
- default=None, description="Sampling rate from 0 to 1 where 1 is 100% of the data."
- )
-
-
-class AssistantTrendsEventsNode(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- custom_name: Optional[str] = None
- event: Optional[str] = Field(default=None, description="The event or `null` for all events.")
- kind: Literal["EventsNode"] = "EventsNode"
- math: Optional[
- Union[
- BaseMathType,
- FunnelMathType,
- PropertyMathType,
- CountPerActorMathType,
- Literal["unique_group"],
- Literal["hogql"],
- ]
- ] = None
- math_group_type_index: Optional[MathGroupTypeIndex] = None
- math_property: Optional[str] = None
- math_property_type: Optional[str] = None
- name: Optional[str] = None
- orderBy: Optional[list[str]] = Field(default=None, description="Columns to order by")
- properties: Optional[
- list[
- Union[
- Union[
- AssistantGenericPropertyFilter1,
- AssistantGenericPropertyFilter2,
- AssistantGenericPropertyFilter3,
- AssistantGenericPropertyFilter4,
- ],
- Union[
- AssistantGroupPropertyFilter1,
- AssistantGroupPropertyFilter2,
- AssistantGroupPropertyFilter3,
- AssistantGroupPropertyFilter4,
- ],
- ]
- ]
- ] = None
- response: Optional[dict[str, Any]] = None
-
-
-class AssistantTrendsQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- breakdownFilter: Optional[AssistantTrendsBreakdownFilter] = Field(
- default=None, description="Breakdown of the events"
- )
- compareFilter: Optional[CompareFilter] = Field(default=None, description="Compare to date range")
- dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
- filterTestAccounts: Optional[bool] = Field(
- default=False, description="Exclude internal and test users by applying the respective filters"
- )
- interval: Optional[IntervalType] = Field(
- default=IntervalType.DAY,
- description="Granularity of the response. Can be one of `hour`, `day`, `week` or `month`",
- )
- kind: Literal["TrendsQuery"] = "TrendsQuery"
- properties: Optional[
- list[
- Union[
- Union[
- AssistantGenericPropertyFilter1,
- AssistantGenericPropertyFilter2,
- AssistantGenericPropertyFilter3,
- AssistantGenericPropertyFilter4,
- ],
- Union[
- AssistantGroupPropertyFilter1,
- AssistantGroupPropertyFilter2,
- AssistantGroupPropertyFilter3,
- AssistantGroupPropertyFilter4,
- ],
- ]
- ]
- ] = Field(default=[], description="Property filters for all series")
- samplingFactor: Optional[float] = Field(
- default=None, description="Sampling rate from 0 to 1 where 1 is 100% of the data."
- )
- series: list[AssistantTrendsEventsNode] = Field(..., description="Events to include")
- trendsFilter: Optional[AssistantTrendsFilter] = Field(
- default=None, description="Properties specific to the trends insight"
- )
-
-
-class BreakdownItem(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- label: str
- value: Union[str, int]
-
-
-class CacheMissResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: Optional[str] = None
- query_status: Optional[QueryStatus] = None
-
-
-class CachedActorsPropertyTaxonomyQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: ActorsPropertyTaxonomyResponse
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class CachedActorsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- columns: list
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: str = Field(..., description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- limit: int
- missing_actors_count: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- offset: int
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[list]
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: list[str]
-
-
-class CachedErrorTrackingQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- columns: Optional[list[str]] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[ErrorTrackingIssue]
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class CachedEventTaxonomyQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[EventTaxonomyItem]
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class CachedEventsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- columns: list
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: str = Field(..., description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[list]
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: list[str]
-
-
-class CachedFunnelCorrelationResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- columns: Optional[list] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: FunnelCorrelationResult
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = None
-
-
-class CachedFunnelsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- isUdf: Optional[bool] = None
- is_cached: bool
- last_refresh: AwareDatetime
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: Union[FunnelTimeToConvertResults, list[dict[str, Any]], list[list[dict[str, Any]]]]
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class CachedLifecycleQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[dict[str, Any]]
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class CachedPathsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[dict[str, Any]]
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class CachedSessionAttributionExplorerQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- columns: Optional[list] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: Any
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = None
-
-
-class CachedSessionsTimelineQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[TimelineEntry]
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class CachedStickinessQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[dict[str, Any]]
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class CachedSuggestedQuestionsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- is_cached: bool
- last_refresh: AwareDatetime
- next_allowed_client_refresh: AwareDatetime
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- questions: list[str]
- timezone: str
-
-
-class CachedTeamTaxonomyQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[TeamTaxonomyItem]
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class CachedTrendsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = Field(default=None, description="Wether more breakdown values are available.")
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[dict[str, Any]]
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class CachedWebExternalClicksTableQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- columns: Optional[list] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list
- samplingRate: Optional[SamplingRate] = None
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = None
-
-
-class CachedWebGoalsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- columns: Optional[list] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list
- samplingRate: Optional[SamplingRate] = None
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = None
-
-
-class CachedWebOverviewQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- dateFrom: Optional[str] = None
- dateTo: Optional[str] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[WebOverviewItem]
- samplingRate: Optional[SamplingRate] = None
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class CachedWebStatsTableQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- columns: Optional[list] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list
- samplingRate: Optional[SamplingRate] = None
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = None
-
-
-class DashboardFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- date_from: Optional[str] = None
- date_to: Optional[str] = None
- properties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = None
-
-
-class Response(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: list
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: str = Field(..., description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[list]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: list[str]
-
-
-class Response1(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: list
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: str = Field(..., description="Generated HogQL query.")
- limit: int
- missing_actors_count: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: int
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[list]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: list[str]
-
-
-class Response3(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- dateFrom: Optional[str] = None
- dateTo: Optional[str] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[WebOverviewItem]
- samplingRate: Optional[SamplingRate] = None
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class Response4(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: Optional[list] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list
- samplingRate: Optional[SamplingRate] = None
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = None
-
-
-class Response7(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: Optional[list] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: Any
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = None
-
-
-class Response8(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: Optional[list[str]] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[ErrorTrackingIssue]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class DataWarehouseNode(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- custom_name: Optional[str] = None
- distinct_id_field: str
- fixedProperties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = Field(
- default=None,
- description="Fixed properties in the query, can't be edited in the interface (e.g. scoping down by person)",
- )
- id: str
- id_field: str
- kind: Literal["DataWarehouseNode"] = "DataWarehouseNode"
- math: Optional[
- Union[
- BaseMathType,
- FunnelMathType,
- PropertyMathType,
- CountPerActorMathType,
- Literal["unique_group"],
- Literal["hogql"],
- ]
- ] = None
- math_group_type_index: Optional[MathGroupTypeIndex] = None
- math_hogql: Optional[str] = None
- math_property: Optional[str] = None
- math_property_type: Optional[str] = None
- name: Optional[str] = None
- properties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = Field(default=None, description="Properties configurable in the interface")
- response: Optional[dict[str, Any]] = None
- table_name: str
- timestamp_field: str
-
-
-class DatabaseSchemaBatchExportTable(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- fields: dict[str, DatabaseSchemaField]
- id: str
- name: str
- type: Literal["batch_export"] = "batch_export"
-
-
-class DatabaseSchemaDataWarehouseTable(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- fields: dict[str, DatabaseSchemaField]
- format: str
- id: str
- name: str
- schema_: Optional[DatabaseSchemaSchema] = Field(default=None, alias="schema")
- source: Optional[DatabaseSchemaSource] = None
- type: Literal["data_warehouse"] = "data_warehouse"
- url_pattern: str
-
-
-class EntityNode(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- custom_name: Optional[str] = None
- fixedProperties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = Field(
- default=None,
- description="Fixed properties in the query, can't be edited in the interface (e.g. scoping down by person)",
- )
- kind: NodeKind
- math: Optional[
- Union[
- BaseMathType,
- FunnelMathType,
- PropertyMathType,
- CountPerActorMathType,
- Literal["unique_group"],
- Literal["hogql"],
- ]
- ] = None
- math_group_type_index: Optional[MathGroupTypeIndex] = None
- math_hogql: Optional[str] = None
- math_property: Optional[str] = None
- math_property_type: Optional[str] = None
- name: Optional[str] = None
- properties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = Field(default=None, description="Properties configurable in the interface")
- response: Optional[dict[str, Any]] = None
-
-
-class ErrorTrackingQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: Optional[list[str]] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[ErrorTrackingIssue]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class EventTaxonomyQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[EventTaxonomyItem]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class EventsNode(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- custom_name: Optional[str] = None
- event: Optional[str] = Field(default=None, description="The event or `null` for all events.")
- fixedProperties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = Field(
- default=None,
- description="Fixed properties in the query, can't be edited in the interface (e.g. scoping down by person)",
- )
- kind: Literal["EventsNode"] = "EventsNode"
- limit: Optional[int] = None
- math: Optional[
- Union[
- BaseMathType,
- FunnelMathType,
- PropertyMathType,
- CountPerActorMathType,
- Literal["unique_group"],
- Literal["hogql"],
- ]
- ] = None
- math_group_type_index: Optional[MathGroupTypeIndex] = None
- math_hogql: Optional[str] = None
- math_property: Optional[str] = None
- math_property_type: Optional[str] = None
- name: Optional[str] = None
- orderBy: Optional[list[str]] = Field(default=None, description="Columns to order by")
- properties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = Field(default=None, description="Properties configurable in the interface")
- response: Optional[dict[str, Any]] = None
-
-
-class EventsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: list
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: str = Field(..., description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[list]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: list[str]
-
-
-class FunnelCorrelationResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: Optional[list] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: FunnelCorrelationResult
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = None
-
-
-class FunnelExclusionActionsNode(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- custom_name: Optional[str] = None
- fixedProperties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = Field(
- default=None,
- description="Fixed properties in the query, can't be edited in the interface (e.g. scoping down by person)",
- )
- funnelFromStep: int
- funnelToStep: int
- id: int
- kind: Literal["ActionsNode"] = "ActionsNode"
- math: Optional[
- Union[
- BaseMathType,
- FunnelMathType,
- PropertyMathType,
- CountPerActorMathType,
- Literal["unique_group"],
- Literal["hogql"],
- ]
- ] = None
- math_group_type_index: Optional[MathGroupTypeIndex] = None
- math_hogql: Optional[str] = None
- math_property: Optional[str] = None
- math_property_type: Optional[str] = None
- name: Optional[str] = None
- properties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = Field(default=None, description="Properties configurable in the interface")
- response: Optional[dict[str, Any]] = None
-
-
-class FunnelExclusionEventsNode(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- custom_name: Optional[str] = None
- event: Optional[str] = Field(default=None, description="The event or `null` for all events.")
- fixedProperties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = Field(
- default=None,
- description="Fixed properties in the query, can't be edited in the interface (e.g. scoping down by person)",
- )
- funnelFromStep: int
- funnelToStep: int
- kind: Literal["EventsNode"] = "EventsNode"
- limit: Optional[int] = None
- math: Optional[
- Union[
- BaseMathType,
- FunnelMathType,
- PropertyMathType,
- CountPerActorMathType,
- Literal["unique_group"],
- Literal["hogql"],
- ]
- ] = None
- math_group_type_index: Optional[MathGroupTypeIndex] = None
- math_hogql: Optional[str] = None
- math_property: Optional[str] = None
- math_property_type: Optional[str] = None
- name: Optional[str] = None
- orderBy: Optional[list[str]] = Field(default=None, description="Columns to order by")
- properties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = Field(default=None, description="Properties configurable in the interface")
- response: Optional[dict[str, Any]] = None
-
-
-class FunnelsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- isUdf: Optional[bool] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: Union[FunnelTimeToConvertResults, list[dict[str, Any]], list[list[dict[str, Any]]]]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class GenericCachedQueryResponse(BaseModel):
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- is_cached: bool
- last_refresh: AwareDatetime
- next_allowed_client_refresh: AwareDatetime
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- timezone: str
-
-
-class HogQLFilters(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- dateRange: Optional[DateRange] = None
- filterTestAccounts: Optional[bool] = None
- properties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = None
-
-
-class HogQLMetadataResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- errors: list[HogQLNotice]
- isValid: Optional[bool] = None
- isValidView: Optional[bool] = None
- notices: list[HogQLNotice]
- query: Optional[str] = None
- table_names: Optional[list[str]] = None
- warnings: list[HogQLNotice]
-
-
-class HogQLQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- clickhouse: Optional[str] = Field(default=None, description="Executed ClickHouse query")
- columns: Optional[list] = Field(default=None, description="Returned columns")
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- explain: Optional[list[str]] = Field(default=None, description="Query explanation output")
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- metadata: Optional[HogQLMetadataResponse] = Field(default=None, description="Query metadata output")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query: Optional[str] = Field(default=None, description="Input query string")
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = Field(default=None, description="Types of returned columns")
-
-
-class InsightActorsQueryBase(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- includeRecordings: Optional[bool] = None
- kind: NodeKind
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- response: Optional[ActorsQueryResponse] = None
-
-
-class LifecycleQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[dict[str, Any]]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class MultipleBreakdownOptions(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- values: list[BreakdownItem]
-
-
-class PathsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[dict[str, Any]]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class PersonsNode(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cohort: Optional[int] = None
- distinctId: Optional[str] = None
- fixedProperties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = Field(
- default=None,
- description="Fixed properties in the query, can't be edited in the interface (e.g. scoping down by person)",
- )
- kind: Literal["PersonsNode"] = "PersonsNode"
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- properties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = Field(default=None, description="Properties configurable in the interface")
- response: Optional[dict[str, Any]] = None
- search: Optional[str] = None
-
-
-class PropertyGroupFilterValue(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- type: FilterLogicalOperator
- values: list[
- Union[
- PropertyGroupFilterValue,
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ],
- ]
- ]
-
-
-class QueryResponseAlternative1(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: list
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: str = Field(..., description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[list]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: list[str]
-
-
-class QueryResponseAlternative2(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: list
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: str = Field(..., description="Generated HogQL query.")
- limit: int
- missing_actors_count: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: int
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[list]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: list[str]
-
-
-class QueryResponseAlternative3(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- breakdown: Optional[list[BreakdownItem]] = None
- breakdowns: Optional[list[MultipleBreakdownOptions]] = None
- compare: Optional[list[CompareItem]] = None
- day: Optional[list[DayItem]] = None
- interval: Optional[list[IntervalItem]] = None
- series: Optional[list[Series]] = None
- status: Optional[list[StatusItem]] = None
-
-
-class QueryResponseAlternative4(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[TimelineEntry]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class QueryResponseAlternative6(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- clickhouse: Optional[str] = Field(default=None, description="Executed ClickHouse query")
- columns: Optional[list] = Field(default=None, description="Returned columns")
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- explain: Optional[list[str]] = Field(default=None, description="Query explanation output")
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- metadata: Optional[HogQLMetadataResponse] = Field(default=None, description="Query metadata output")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query: Optional[str] = Field(default=None, description="Input query string")
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = Field(default=None, description="Types of returned columns")
-
-
-class QueryResponseAlternative9(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- dateFrom: Optional[str] = None
- dateTo: Optional[str] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[WebOverviewItem]
- samplingRate: Optional[SamplingRate] = None
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class QueryResponseAlternative10(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: Optional[list] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list
- samplingRate: Optional[SamplingRate] = None
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = None
-
-
-class QueryResponseAlternative13(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: Optional[list] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: Any
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = None
-
-
-class QueryResponseAlternative14(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: Optional[list[str]] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[ErrorTrackingIssue]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class QueryResponseAlternative17(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: list
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: str = Field(..., description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[list]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: list[str]
-
-
-class QueryResponseAlternative18(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: list
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: str = Field(..., description="Generated HogQL query.")
- limit: int
- missing_actors_count: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: int
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[list]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: list[str]
-
-
-class QueryResponseAlternative19(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- clickhouse: Optional[str] = Field(default=None, description="Executed ClickHouse query")
- columns: Optional[list] = Field(default=None, description="Returned columns")
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- explain: Optional[list[str]] = Field(default=None, description="Query explanation output")
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- metadata: Optional[HogQLMetadataResponse] = Field(default=None, description="Query metadata output")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query: Optional[str] = Field(default=None, description="Input query string")
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = Field(default=None, description="Types of returned columns")
-
-
-class QueryResponseAlternative20(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- dateFrom: Optional[str] = None
- dateTo: Optional[str] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[WebOverviewItem]
- samplingRate: Optional[SamplingRate] = None
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class QueryResponseAlternative21(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: Optional[list] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list
- samplingRate: Optional[SamplingRate] = None
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = None
-
-
-class QueryResponseAlternative24(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: Optional[list] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: Any
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = None
-
-
-class QueryResponseAlternative25(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: Optional[list[str]] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[ErrorTrackingIssue]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class QueryResponseAlternative28(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = Field(default=None, description="Wether more breakdown values are available.")
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[dict[str, Any]]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class QueryResponseAlternative29(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- isUdf: Optional[bool] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: Union[FunnelTimeToConvertResults, list[dict[str, Any]], list[list[dict[str, Any]]]]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class QueryResponseAlternative31(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[dict[str, Any]]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class QueryResponseAlternative34(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- columns: Optional[list] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: FunnelCorrelationResult
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = None
-
-
-class QueryResponseAlternative37(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[TeamTaxonomyItem]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class QueryResponseAlternative38(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[EventTaxonomyItem]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class QueryResponseAlternative39(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: ActorsPropertyTaxonomyResponse
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class RecordingsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- has_next: bool
- results: list[SessionRecordingType]
-
-
-class RetentionResult(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- date: AwareDatetime
- label: str
- values: list[RetentionValue]
-
-
-class SessionAttributionExplorerQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- filters: Optional[Filters] = None
- groupBy: list[SessionAttributionGroupBy]
- kind: Literal["SessionAttributionExplorerQuery"] = "SessionAttributionExplorerQuery"
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- response: Optional[SessionAttributionExplorerQueryResponse] = None
-
-
-class SessionsTimelineQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- after: Optional[str] = Field(
- default=None, description="Only fetch sessions that started after this timestamp (default: '-24h')"
- )
- before: Optional[str] = Field(
- default=None, description="Only fetch sessions that started before this timestamp (default: '+5s')"
- )
- kind: Literal["SessionsTimelineQuery"] = "SessionsTimelineQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- personId: Optional[str] = Field(default=None, description="Fetch sessions only for a given person")
- response: Optional[SessionsTimelineQueryResponse] = None
-
-
-class TeamTaxonomyQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[TeamTaxonomyItem]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class VisualizationMessage(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- answer: Optional[Union[AssistantTrendsQuery, AssistantFunnelsQuery]] = None
- id: Optional[str] = None
- initiator: Optional[str] = None
- plan: Optional[str] = None
- type: Literal["ai/viz"] = "ai/viz"
-
-
-class WebExternalClicksTableQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- compareFilter: Optional[CompareFilter] = None
- conversionGoal: Optional[Union[ActionConversionGoal, CustomEventConversionGoal]] = None
- dateRange: Optional[DateRange] = None
- filterTestAccounts: Optional[bool] = None
- kind: Literal["WebExternalClicksTableQuery"] = "WebExternalClicksTableQuery"
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- properties: list[Union[EventPropertyFilter, PersonPropertyFilter, SessionPropertyFilter]]
- response: Optional[WebExternalClicksTableQueryResponse] = None
- sampling: Optional[Sampling] = None
- stripQueryParams: Optional[bool] = None
- useSessionsTable: Optional[bool] = None
-
-
-class WebGoalsQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- compareFilter: Optional[CompareFilter] = None
- conversionGoal: Optional[Union[ActionConversionGoal, CustomEventConversionGoal]] = None
- dateRange: Optional[DateRange] = None
- filterTestAccounts: Optional[bool] = None
- kind: Literal["WebGoalsQuery"] = "WebGoalsQuery"
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- properties: list[Union[EventPropertyFilter, PersonPropertyFilter, SessionPropertyFilter]]
- response: Optional[WebGoalsQueryResponse] = None
- sampling: Optional[Sampling] = None
- useSessionsTable: Optional[bool] = None
-
-
-class WebOverviewQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- compareFilter: Optional[CompareFilter] = None
- conversionGoal: Optional[Union[ActionConversionGoal, CustomEventConversionGoal]] = None
- dateRange: Optional[DateRange] = None
- filterTestAccounts: Optional[bool] = None
- includeLCPScore: Optional[bool] = None
- kind: Literal["WebOverviewQuery"] = "WebOverviewQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- properties: list[Union[EventPropertyFilter, PersonPropertyFilter, SessionPropertyFilter]]
- response: Optional[WebOverviewQueryResponse] = None
- sampling: Optional[Sampling] = None
- useSessionsTable: Optional[bool] = None
-
-
-class WebStatsTableQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- breakdownBy: WebStatsBreakdown
- compareFilter: Optional[CompareFilter] = None
- conversionGoal: Optional[Union[ActionConversionGoal, CustomEventConversionGoal]] = None
- dateRange: Optional[DateRange] = None
- doPathCleaning: Optional[bool] = None
- filterTestAccounts: Optional[bool] = None
- includeBounceRate: Optional[bool] = None
- includeScrollDepth: Optional[bool] = None
- kind: Literal["WebStatsTableQuery"] = "WebStatsTableQuery"
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- properties: list[Union[EventPropertyFilter, PersonPropertyFilter, SessionPropertyFilter]]
- response: Optional[WebStatsTableQueryResponse] = None
- sampling: Optional[Sampling] = None
- useSessionsTable: Optional[bool] = None
-
-
-class ActionsNode(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- custom_name: Optional[str] = None
- fixedProperties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = Field(
- default=None,
- description="Fixed properties in the query, can't be edited in the interface (e.g. scoping down by person)",
- )
- id: int
- kind: Literal["ActionsNode"] = "ActionsNode"
- math: Optional[
- Union[
- BaseMathType,
- FunnelMathType,
- PropertyMathType,
- CountPerActorMathType,
- Literal["unique_group"],
- Literal["hogql"],
- ]
- ] = None
- math_group_type_index: Optional[MathGroupTypeIndex] = None
- math_hogql: Optional[str] = None
- math_property: Optional[str] = None
- math_property_type: Optional[str] = None
- name: Optional[str] = None
- properties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = Field(default=None, description="Properties configurable in the interface")
- response: Optional[dict[str, Any]] = None
-
-
-class ActorsPropertyTaxonomyQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- group_type_index: Optional[int] = None
- kind: Literal["ActorsPropertyTaxonomyQuery"] = "ActorsPropertyTaxonomyQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- property: str
- response: Optional[ActorsPropertyTaxonomyQueryResponse] = None
-
-
-class AnyResponseType(
- RootModel[
- Union[
- dict[str, Any],
- HogQueryResponse,
- HogQLQueryResponse,
- HogQLMetadataResponse,
- HogQLAutocompleteResponse,
- Any,
- EventsQueryResponse,
- ErrorTrackingQueryResponse,
- ]
- ]
-):
- root: Union[
- dict[str, Any],
- HogQueryResponse,
- HogQLQueryResponse,
- HogQLMetadataResponse,
- HogQLAutocompleteResponse,
- Any,
- EventsQueryResponse,
- ErrorTrackingQueryResponse,
- ]
-
-
-class CachedHogQLQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- clickhouse: Optional[str] = Field(default=None, description="Executed ClickHouse query")
- columns: Optional[list] = Field(default=None, description="Returned columns")
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- explain: Optional[list[str]] = Field(default=None, description="Query explanation output")
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- limit: Optional[int] = None
- metadata: Optional[HogQLMetadataResponse] = Field(default=None, description="Query metadata output")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- offset: Optional[int] = None
- query: Optional[str] = Field(default=None, description="Input query string")
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = Field(default=None, description="Types of returned columns")
-
-
-class CachedInsightActorsQueryOptionsResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- breakdown: Optional[list[BreakdownItem]] = None
- breakdowns: Optional[list[MultipleBreakdownOptions]] = None
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- compare: Optional[list[CompareItem]] = None
- day: Optional[list[DayItem]] = None
- interval: Optional[list[IntervalItem]] = None
- is_cached: bool
- last_refresh: AwareDatetime
- next_allowed_client_refresh: AwareDatetime
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- series: Optional[list[Series]] = None
- status: Optional[list[StatusItem]] = None
- timezone: str
-
-
-class CachedRetentionQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[RetentionResult]
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class Response2(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- clickhouse: Optional[str] = Field(default=None, description="Executed ClickHouse query")
- columns: Optional[list] = Field(default=None, description="Returned columns")
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- explain: Optional[list[str]] = Field(default=None, description="Query explanation output")
- hasMore: Optional[bool] = None
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- limit: Optional[int] = None
- metadata: Optional[HogQLMetadataResponse] = Field(default=None, description="Query metadata output")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- query: Optional[str] = Field(default=None, description="Input query string")
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
- types: Optional[list] = Field(default=None, description="Types of returned columns")
-
-
-class EventTaxonomyQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- event: str
- kind: Literal["EventTaxonomyQuery"] = "EventTaxonomyQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- properties: Optional[list[str]] = None
- response: Optional[EventTaxonomyQueryResponse] = None
-
-
-class FunnelsFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- binCount: Optional[int] = None
- breakdownAttributionType: Optional[BreakdownAttributionType] = BreakdownAttributionType.FIRST_TOUCH
- breakdownAttributionValue: Optional[int] = None
- exclusions: Optional[list[Union[FunnelExclusionEventsNode, FunnelExclusionActionsNode]]] = []
- funnelAggregateByHogQL: Optional[str] = None
- funnelFromStep: Optional[int] = None
- funnelOrderType: Optional[StepOrderValue] = StepOrderValue.ORDERED
- funnelStepReference: Optional[FunnelStepReference] = FunnelStepReference.TOTAL
- funnelToStep: Optional[int] = None
- funnelVizType: Optional[FunnelVizType] = FunnelVizType.STEPS
- funnelWindowInterval: Optional[int] = 14
- funnelWindowIntervalUnit: Optional[FunnelConversionWindowTimeUnit] = FunnelConversionWindowTimeUnit.DAY
- hiddenLegendBreakdowns: Optional[list[str]] = None
- layout: Optional[FunnelLayout] = FunnelLayout.VERTICAL
- resultCustomizations: Optional[dict[str, ResultCustomizationByValue]] = Field(
- default=None, description="Customizations for the appearance of result datasets."
- )
- useUdf: Optional[bool] = None
-
-
-class HogQLQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- explain: Optional[bool] = None
- filters: Optional[HogQLFilters] = None
- kind: Literal["HogQLQuery"] = "HogQLQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query: str
- response: Optional[HogQLQueryResponse] = None
- values: Optional[dict[str, Any]] = Field(
- default=None, description="Constant values that can be referenced with the {placeholder} syntax in the query"
- )
- variables: Optional[dict[str, HogQLVariable]] = Field(
- default=None, description="Variables to be subsituted into the query"
- )
-
-
-class InsightActorsQueryOptionsResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- breakdown: Optional[list[BreakdownItem]] = None
- breakdowns: Optional[list[MultipleBreakdownOptions]] = None
- compare: Optional[list[CompareItem]] = None
- day: Optional[list[DayItem]] = None
- interval: Optional[list[IntervalItem]] = None
- series: Optional[list[Series]] = None
- status: Optional[list[StatusItem]] = None
-
-
-class InsightFilter(
- RootModel[Union[TrendsFilter, FunnelsFilter, RetentionFilter, PathsFilter, StickinessFilter, LifecycleFilter]]
-):
- root: Union[TrendsFilter, FunnelsFilter, RetentionFilter, PathsFilter, StickinessFilter, LifecycleFilter]
-
-
-class PropertyGroupFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- type: FilterLogicalOperator
- values: list[PropertyGroupFilterValue]
-
-
-class QueryResponseAlternative30(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[RetentionResult]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class RecordingsQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- actions: Optional[list[dict[str, Any]]] = None
- console_log_filters: Optional[list[LogEntryPropertyFilter]] = None
- date_from: Optional[str] = "-3d"
- date_to: Optional[str] = None
- events: Optional[list[dict[str, Any]]] = None
- filter_test_accounts: Optional[bool] = None
- having_predicates: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = None
- kind: Literal["RecordingsQuery"] = "RecordingsQuery"
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- operand: Optional[FilterLogicalOperator] = FilterLogicalOperator.AND_
- order: Optional[RecordingOrder] = RecordingOrder.START_TIME
- person_uuid: Optional[str] = None
- properties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = None
- response: Optional[RecordingsQueryResponse] = None
- session_ids: Optional[list[str]] = None
- user_modified_filters: Optional[dict[str, Any]] = None
-
-
-class RetentionQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[RetentionResult]
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
-class RootAssistantMessage(
- RootModel[
- Union[VisualizationMessage, ReasoningMessage, AssistantMessage, HumanMessage, FailureMessage, RouterMessage]
- ]
-):
- root: Union[VisualizationMessage, ReasoningMessage, AssistantMessage, HumanMessage, FailureMessage, RouterMessage]
-
-
-class StickinessQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- compareFilter: Optional[CompareFilter] = Field(default=None, description="Compare to date range")
- dataColorTheme: Optional[float] = Field(default=None, description="Colors used in the insight's visualization")
- dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
- filterTestAccounts: Optional[bool] = Field(
- default=False, description="Exclude internal and test users by applying the respective filters"
- )
- interval: Optional[IntervalType] = Field(
- default=IntervalType.DAY,
- description="Granularity of the response. Can be one of `hour`, `day`, `week` or `month`",
- )
- kind: Literal["StickinessQuery"] = "StickinessQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- properties: Optional[
- Union[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ],
- PropertyGroupFilter,
- ]
- ] = Field(default=[], description="Property filters for all series")
- response: Optional[StickinessQueryResponse] = None
- samplingFactor: Optional[float] = Field(default=None, description="Sampling rate")
- series: list[Union[EventsNode, ActionsNode, DataWarehouseNode]] = Field(
- ..., description="Events and actions to include"
- )
- stickinessFilter: Optional[StickinessFilter] = Field(
- default=None, description="Properties specific to the stickiness insight"
- )
-
-
-class TeamTaxonomyQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- kind: Literal["TeamTaxonomyQuery"] = "TeamTaxonomyQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- response: Optional[TeamTaxonomyQueryResponse] = None
-
-
-class TrendsQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- aggregation_group_type_index: Optional[int] = Field(default=None, description="Groups aggregation")
- breakdownFilter: Optional[BreakdownFilter] = Field(default=None, description="Breakdown of the events and actions")
- compareFilter: Optional[CompareFilter] = Field(default=None, description="Compare to date range")
- conversionGoal: Optional[Union[ActionConversionGoal, CustomEventConversionGoal]] = Field(
- default=None, description="Whether we should be comparing against a specific conversion goal"
- )
- dataColorTheme: Optional[float] = Field(default=None, description="Colors used in the insight's visualization")
- dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
- filterTestAccounts: Optional[bool] = Field(
- default=False, description="Exclude internal and test users by applying the respective filters"
- )
- interval: Optional[IntervalType] = Field(
- default=IntervalType.DAY,
- description="Granularity of the response. Can be one of `hour`, `day`, `week` or `month`",
- )
- kind: Literal["TrendsQuery"] = "TrendsQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- properties: Optional[
- Union[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ],
- PropertyGroupFilter,
- ]
- ] = Field(default=[], description="Property filters for all series")
- response: Optional[TrendsQueryResponse] = None
- samplingFactor: Optional[float] = Field(default=None, description="Sampling rate")
- series: list[Union[EventsNode, ActionsNode, DataWarehouseNode]] = Field(
- ..., description="Events and actions to include"
- )
- trendsFilter: Optional[TrendsFilter] = Field(default=None, description="Properties specific to the trends insight")
-
-
-class CachedExperimentTrendsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- count_query: Optional[TrendsQuery] = None
- credible_intervals: dict[str, list[float]]
- exposure_query: Optional[TrendsQuery] = None
- insight: list[dict[str, Any]]
- is_cached: bool
- kind: Literal["ExperimentTrendsQuery"] = "ExperimentTrendsQuery"
- last_refresh: AwareDatetime
- next_allowed_client_refresh: AwareDatetime
- p_value: float
- probability: dict[str, float]
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- significance_code: ExperimentSignificanceCode
- significant: bool
- stats_version: Optional[int] = None
- timezone: str
- variants: list[ExperimentVariantTrendsBaseStats]
-
-
-class Response10(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- count_query: Optional[TrendsQuery] = None
- credible_intervals: dict[str, list[float]]
- exposure_query: Optional[TrendsQuery] = None
- insight: list[dict[str, Any]]
- kind: Literal["ExperimentTrendsQuery"] = "ExperimentTrendsQuery"
- p_value: float
- probability: dict[str, float]
- significance_code: ExperimentSignificanceCode
- significant: bool
- stats_version: Optional[int] = None
- variants: list[ExperimentVariantTrendsBaseStats]
-
-
-class DataVisualizationNode(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- chartSettings: Optional[ChartSettings] = None
- display: Optional[ChartDisplayType] = None
- kind: Literal["DataVisualizationNode"] = "DataVisualizationNode"
- source: HogQLQuery
- tableSettings: Optional[TableSettings] = None
-
-
-class DatabaseSchemaMaterializedViewTable(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- fields: dict[str, DatabaseSchemaField]
- id: str
- last_run_at: Optional[str] = None
- name: str
- query: HogQLQuery
- status: Optional[str] = None
- type: Literal["materialized_view"] = "materialized_view"
-
-
-class DatabaseSchemaViewTable(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- fields: dict[str, DatabaseSchemaField]
- id: str
- name: str
- query: HogQLQuery
- type: Literal["view"] = "view"
-
-
-class ErrorTrackingQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- assignee: Optional[int] = None
- dateRange: DateRange
- filterGroup: Optional[PropertyGroupFilter] = None
- filterTestAccounts: Optional[bool] = None
- issueId: Optional[str] = None
- kind: Literal["ErrorTrackingQuery"] = "ErrorTrackingQuery"
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- orderBy: Optional[OrderBy] = None
- response: Optional[ErrorTrackingQueryResponse] = None
- searchQuery: Optional[str] = None
- select: Optional[list[str]] = None
-
-
-class EventsQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- actionId: Optional[int] = Field(default=None, description="Show events matching a given action")
- after: Optional[str] = Field(default=None, description="Only fetch events that happened after this timestamp")
- before: Optional[str] = Field(default=None, description="Only fetch events that happened before this timestamp")
- event: Optional[str] = Field(default=None, description="Limit to events matching this string")
- filterTestAccounts: Optional[bool] = Field(default=None, description="Filter test accounts")
- fixedProperties: Optional[
- list[
- Union[
- PropertyGroupFilter,
- PropertyGroupFilterValue,
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ],
- ]
- ]
- ] = Field(
- default=None,
- description="Fixed properties in the query, can't be edited in the interface (e.g. scoping down by person)",
- )
- kind: Literal["EventsQuery"] = "EventsQuery"
- limit: Optional[int] = Field(default=None, description="Number of rows to return")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = Field(default=None, description="Number of rows to skip before returning rows")
- orderBy: Optional[list[str]] = Field(default=None, description="Columns to order by")
- personId: Optional[str] = Field(default=None, description="Show events for a given person")
- properties: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = Field(default=None, description="Properties configurable in the interface")
- response: Optional[EventsQueryResponse] = None
- select: list[str] = Field(..., description="Return a limited set of data. Required.")
- where: Optional[list[str]] = Field(default=None, description="HogQL filters to apply on returned data")
-
-
-class ExperimentTrendsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- count_query: Optional[TrendsQuery] = None
- credible_intervals: dict[str, list[float]]
- exposure_query: Optional[TrendsQuery] = None
- insight: list[dict[str, Any]]
- kind: Literal["ExperimentTrendsQuery"] = "ExperimentTrendsQuery"
- p_value: float
- probability: dict[str, float]
- significance_code: ExperimentSignificanceCode
- significant: bool
- stats_version: Optional[int] = None
- variants: list[ExperimentVariantTrendsBaseStats]
-
-
-class FunnelsQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- aggregation_group_type_index: Optional[int] = Field(default=None, description="Groups aggregation")
- breakdownFilter: Optional[BreakdownFilter] = Field(default=None, description="Breakdown of the events and actions")
- dataColorTheme: Optional[float] = Field(default=None, description="Colors used in the insight's visualization")
- dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
- filterTestAccounts: Optional[bool] = Field(
- default=False, description="Exclude internal and test users by applying the respective filters"
- )
- funnelsFilter: Optional[FunnelsFilter] = Field(
- default=None, description="Properties specific to the funnels insight"
- )
- interval: Optional[IntervalType] = Field(
- default=None, description="Granularity of the response. Can be one of `hour`, `day`, `week` or `month`"
- )
- kind: Literal["FunnelsQuery"] = "FunnelsQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- properties: Optional[
- Union[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ],
- PropertyGroupFilter,
- ]
- ] = Field(default=[], description="Property filters for all series")
- response: Optional[FunnelsQueryResponse] = None
- samplingFactor: Optional[float] = Field(default=None, description="Sampling rate")
- series: list[Union[EventsNode, ActionsNode, DataWarehouseNode]] = Field(
- ..., description="Events and actions to include"
- )
-
-
-class HasPropertiesNode(RootModel[Union[EventsNode, EventsQuery, PersonsNode]]):
- root: Union[EventsNode, EventsQuery, PersonsNode]
-
-
-class InsightsQueryBaseFunnelsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- aggregation_group_type_index: Optional[int] = Field(default=None, description="Groups aggregation")
- dataColorTheme: Optional[float] = Field(default=None, description="Colors used in the insight's visualization")
- dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
- filterTestAccounts: Optional[bool] = Field(
- default=False, description="Exclude internal and test users by applying the respective filters"
- )
- kind: NodeKind
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- properties: Optional[
- Union[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ],
- PropertyGroupFilter,
- ]
- ] = Field(default=[], description="Property filters for all series")
- response: Optional[FunnelsQueryResponse] = None
- samplingFactor: Optional[float] = Field(default=None, description="Sampling rate")
-
-
-class InsightsQueryBaseLifecycleQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- aggregation_group_type_index: Optional[int] = Field(default=None, description="Groups aggregation")
- dataColorTheme: Optional[float] = Field(default=None, description="Colors used in the insight's visualization")
- dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
- filterTestAccounts: Optional[bool] = Field(
- default=False, description="Exclude internal and test users by applying the respective filters"
- )
- kind: NodeKind
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- properties: Optional[
- Union[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ],
- PropertyGroupFilter,
- ]
- ] = Field(default=[], description="Property filters for all series")
- response: Optional[LifecycleQueryResponse] = None
- samplingFactor: Optional[float] = Field(default=None, description="Sampling rate")
-
-
-class InsightsQueryBasePathsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- aggregation_group_type_index: Optional[int] = Field(default=None, description="Groups aggregation")
- dataColorTheme: Optional[float] = Field(default=None, description="Colors used in the insight's visualization")
- dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
- filterTestAccounts: Optional[bool] = Field(
- default=False, description="Exclude internal and test users by applying the respective filters"
- )
- kind: NodeKind
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- properties: Optional[
- Union[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ],
- PropertyGroupFilter,
- ]
- ] = Field(default=[], description="Property filters for all series")
- response: Optional[PathsQueryResponse] = None
- samplingFactor: Optional[float] = Field(default=None, description="Sampling rate")
-
-
-class InsightsQueryBaseRetentionQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- aggregation_group_type_index: Optional[int] = Field(default=None, description="Groups aggregation")
- dataColorTheme: Optional[float] = Field(default=None, description="Colors used in the insight's visualization")
- dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
- filterTestAccounts: Optional[bool] = Field(
- default=False, description="Exclude internal and test users by applying the respective filters"
- )
- kind: NodeKind
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- properties: Optional[
- Union[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ],
- PropertyGroupFilter,
- ]
- ] = Field(default=[], description="Property filters for all series")
- response: Optional[RetentionQueryResponse] = None
- samplingFactor: Optional[float] = Field(default=None, description="Sampling rate")
-
-
-class InsightsQueryBaseTrendsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- aggregation_group_type_index: Optional[int] = Field(default=None, description="Groups aggregation")
- dataColorTheme: Optional[float] = Field(default=None, description="Colors used in the insight's visualization")
- dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
- filterTestAccounts: Optional[bool] = Field(
- default=False, description="Exclude internal and test users by applying the respective filters"
- )
- kind: NodeKind
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- properties: Optional[
- Union[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ],
- PropertyGroupFilter,
- ]
- ] = Field(default=[], description="Property filters for all series")
- response: Optional[TrendsQueryResponse] = None
- samplingFactor: Optional[float] = Field(default=None, description="Sampling rate")
-
-
-class LifecycleQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- aggregation_group_type_index: Optional[int] = Field(default=None, description="Groups aggregation")
- dataColorTheme: Optional[float] = Field(default=None, description="Colors used in the insight's visualization")
- dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
- filterTestAccounts: Optional[bool] = Field(
- default=False, description="Exclude internal and test users by applying the respective filters"
- )
- interval: Optional[IntervalType] = Field(
- default=IntervalType.DAY,
- description="Granularity of the response. Can be one of `hour`, `day`, `week` or `month`",
- )
- kind: Literal["LifecycleQuery"] = "LifecycleQuery"
- lifecycleFilter: Optional[LifecycleFilter] = Field(
- default=None, description="Properties specific to the lifecycle insight"
- )
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- properties: Optional[
- Union[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ],
- PropertyGroupFilter,
- ]
- ] = Field(default=[], description="Property filters for all series")
- response: Optional[LifecycleQueryResponse] = None
- samplingFactor: Optional[float] = Field(default=None, description="Sampling rate")
- series: list[Union[EventsNode, ActionsNode, DataWarehouseNode]] = Field(
- ..., description="Events and actions to include"
- )
-
-
-class QueryResponseAlternative15(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- credible_intervals: dict[str, list[float]]
- expected_loss: float
- funnels_query: Optional[FunnelsQuery] = None
- insight: list[list[dict[str, Any]]]
- kind: Literal["ExperimentFunnelsQuery"] = "ExperimentFunnelsQuery"
- probability: dict[str, float]
- significance_code: ExperimentSignificanceCode
- significant: bool
- stats_version: Optional[int] = None
- variants: list[ExperimentVariantFunnelsBaseStats]
-
-
-class QueryResponseAlternative16(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- count_query: Optional[TrendsQuery] = None
- credible_intervals: dict[str, list[float]]
- exposure_query: Optional[TrendsQuery] = None
- insight: list[dict[str, Any]]
- kind: Literal["ExperimentTrendsQuery"] = "ExperimentTrendsQuery"
- p_value: float
- probability: dict[str, float]
- significance_code: ExperimentSignificanceCode
- significant: bool
- stats_version: Optional[int] = None
- variants: list[ExperimentVariantTrendsBaseStats]
-
-
-class QueryResponseAlternative26(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- credible_intervals: dict[str, list[float]]
- expected_loss: float
- funnels_query: Optional[FunnelsQuery] = None
- insight: list[list[dict[str, Any]]]
- kind: Literal["ExperimentFunnelsQuery"] = "ExperimentFunnelsQuery"
- probability: dict[str, float]
- significance_code: ExperimentSignificanceCode
- significant: bool
- stats_version: Optional[int] = None
- variants: list[ExperimentVariantFunnelsBaseStats]
-
-
-class QueryResponseAlternative27(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- count_query: Optional[TrendsQuery] = None
- credible_intervals: dict[str, list[float]]
- exposure_query: Optional[TrendsQuery] = None
- insight: list[dict[str, Any]]
- kind: Literal["ExperimentTrendsQuery"] = "ExperimentTrendsQuery"
- p_value: float
- probability: dict[str, float]
- significance_code: ExperimentSignificanceCode
- significant: bool
- stats_version: Optional[int] = None
- variants: list[ExperimentVariantTrendsBaseStats]
-
-
-class RetentionQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- aggregation_group_type_index: Optional[int] = Field(default=None, description="Groups aggregation")
- dataColorTheme: Optional[float] = Field(default=None, description="Colors used in the insight's visualization")
- dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
- filterTestAccounts: Optional[bool] = Field(
- default=False, description="Exclude internal and test users by applying the respective filters"
- )
- kind: Literal["RetentionQuery"] = "RetentionQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- properties: Optional[
- Union[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ],
- PropertyGroupFilter,
- ]
- ] = Field(default=[], description="Property filters for all series")
- response: Optional[RetentionQueryResponse] = None
- retentionFilter: RetentionFilter = Field(..., description="Properties specific to the retention insight")
- samplingFactor: Optional[float] = Field(default=None, description="Sampling rate")
-
-
-class CachedExperimentFunnelsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- credible_intervals: dict[str, list[float]]
- expected_loss: float
- funnels_query: Optional[FunnelsQuery] = None
- insight: list[list[dict[str, Any]]]
- is_cached: bool
- kind: Literal["ExperimentFunnelsQuery"] = "ExperimentFunnelsQuery"
- last_refresh: AwareDatetime
- next_allowed_client_refresh: AwareDatetime
- probability: dict[str, float]
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- significance_code: ExperimentSignificanceCode
- significant: bool
- stats_version: Optional[int] = None
- timezone: str
- variants: list[ExperimentVariantFunnelsBaseStats]
-
-
-class Response9(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- credible_intervals: dict[str, list[float]]
- expected_loss: float
- funnels_query: Optional[FunnelsQuery] = None
- insight: list[list[dict[str, Any]]]
- kind: Literal["ExperimentFunnelsQuery"] = "ExperimentFunnelsQuery"
- probability: dict[str, float]
- significance_code: ExperimentSignificanceCode
- significant: bool
- stats_version: Optional[int] = None
- variants: list[ExperimentVariantFunnelsBaseStats]
-
-
-class ExperimentFunnelsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- credible_intervals: dict[str, list[float]]
- expected_loss: float
- funnels_query: Optional[FunnelsQuery] = None
- insight: list[list[dict[str, Any]]]
- kind: Literal["ExperimentFunnelsQuery"] = "ExperimentFunnelsQuery"
- probability: dict[str, float]
- significance_code: ExperimentSignificanceCode
- significant: bool
- stats_version: Optional[int] = None
- variants: list[ExperimentVariantFunnelsBaseStats]
-
-
-class ExperimentTrendsQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- count_query: TrendsQuery
- experiment_id: Optional[int] = None
- exposure_query: Optional[TrendsQuery] = None
- kind: Literal["ExperimentTrendsQuery"] = "ExperimentTrendsQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- name: Optional[str] = None
- response: Optional[ExperimentTrendsQueryResponse] = None
- stats_version: Optional[int] = None
-
-
-class FunnelPathsFilter(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- funnelPathType: Optional[FunnelPathType] = None
- funnelSource: FunnelsQuery
- funnelStep: Optional[int] = None
-
-
-class FunnelsActorsQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- funnelCustomSteps: Optional[list[int]] = Field(
- default=None,
- description=(
- "Custom step numbers to get persons for. This overrides `funnelStep`. Primarily for correlation use."
- ),
- )
- funnelStep: Optional[int] = Field(
- default=None,
- description=(
- "Index of the step for which we want to get the timestamp for, per person. Positive for converted persons,"
- " negative for dropped of persons."
- ),
- )
- funnelStepBreakdown: Optional[Union[int, str, float, list[Union[int, str, float]]]] = Field(
- default=None,
- description=(
- "The breakdown value for which to get persons for. This is an array for person and event properties, a"
- " string for groups and an integer for cohorts."
- ),
- )
- funnelTrendsDropOff: Optional[bool] = None
- funnelTrendsEntrancePeriodStart: Optional[str] = Field(
- default=None,
- description="Used together with `funnelTrendsDropOff` for funnels time conversion date for the persons modal.",
- )
- includeRecordings: Optional[bool] = None
- kind: Literal["FunnelsActorsQuery"] = "FunnelsActorsQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- response: Optional[ActorsQueryResponse] = None
- source: FunnelsQuery
-
-
-class PathsQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- aggregation_group_type_index: Optional[int] = Field(default=None, description="Groups aggregation")
- dataColorTheme: Optional[float] = Field(default=None, description="Colors used in the insight's visualization")
- dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
- filterTestAccounts: Optional[bool] = Field(
- default=False, description="Exclude internal and test users by applying the respective filters"
- )
- funnelPathsFilter: Optional[FunnelPathsFilter] = Field(
- default=None, description="Used for displaying paths in relation to funnel steps."
- )
- kind: Literal["PathsQuery"] = "PathsQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- pathsFilter: PathsFilter = Field(..., description="Properties specific to the paths insight")
- properties: Optional[
- Union[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ],
- PropertyGroupFilter,
- ]
- ] = Field(default=[], description="Property filters for all series")
- response: Optional[PathsQueryResponse] = None
- samplingFactor: Optional[float] = Field(default=None, description="Sampling rate")
-
-
-class QueryResponseAlternative35(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- tables: dict[
- str,
- Union[
- DatabaseSchemaPostHogTable,
- DatabaseSchemaDataWarehouseTable,
- DatabaseSchemaViewTable,
- DatabaseSchemaBatchExportTable,
- DatabaseSchemaMaterializedViewTable,
- ],
- ]
-
-
-class QueryResponseAlternative(
- RootModel[
- Union[
- dict[str, Any],
- QueryResponseAlternative1,
- QueryResponseAlternative2,
- QueryResponseAlternative3,
- QueryResponseAlternative4,
- QueryResponseAlternative5,
- QueryResponseAlternative6,
- QueryResponseAlternative7,
- QueryResponseAlternative8,
- QueryResponseAlternative9,
- QueryResponseAlternative10,
- QueryResponseAlternative13,
- QueryResponseAlternative14,
- QueryResponseAlternative15,
- QueryResponseAlternative16,
- Any,
- QueryResponseAlternative17,
- QueryResponseAlternative18,
- QueryResponseAlternative19,
- QueryResponseAlternative20,
- QueryResponseAlternative21,
- QueryResponseAlternative24,
- QueryResponseAlternative25,
- QueryResponseAlternative26,
- QueryResponseAlternative27,
- QueryResponseAlternative28,
- QueryResponseAlternative29,
- QueryResponseAlternative30,
- QueryResponseAlternative31,
- QueryResponseAlternative34,
- QueryResponseAlternative35,
- QueryResponseAlternative36,
- QueryResponseAlternative37,
- QueryResponseAlternative38,
- QueryResponseAlternative39,
- ]
- ]
-):
- root: Union[
- dict[str, Any],
- QueryResponseAlternative1,
- QueryResponseAlternative2,
- QueryResponseAlternative3,
- QueryResponseAlternative4,
- QueryResponseAlternative5,
- QueryResponseAlternative6,
- QueryResponseAlternative7,
- QueryResponseAlternative8,
- QueryResponseAlternative9,
- QueryResponseAlternative10,
- QueryResponseAlternative13,
- QueryResponseAlternative14,
- QueryResponseAlternative15,
- QueryResponseAlternative16,
- Any,
- QueryResponseAlternative17,
- QueryResponseAlternative18,
- QueryResponseAlternative19,
- QueryResponseAlternative20,
- QueryResponseAlternative21,
- QueryResponseAlternative24,
- QueryResponseAlternative25,
- QueryResponseAlternative26,
- QueryResponseAlternative27,
- QueryResponseAlternative28,
- QueryResponseAlternative29,
- QueryResponseAlternative30,
- QueryResponseAlternative31,
- QueryResponseAlternative34,
- QueryResponseAlternative35,
- QueryResponseAlternative36,
- QueryResponseAlternative37,
- QueryResponseAlternative38,
- QueryResponseAlternative39,
- ]
-
-
-class DatabaseSchemaQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- tables: dict[
- str,
- Union[
- DatabaseSchemaPostHogTable,
- DatabaseSchemaDataWarehouseTable,
- DatabaseSchemaViewTable,
- DatabaseSchemaBatchExportTable,
- DatabaseSchemaMaterializedViewTable,
- ],
- ]
-
-
-class ExperimentFunnelsQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- experiment_id: Optional[int] = None
- funnels_query: FunnelsQuery
- kind: Literal["ExperimentFunnelsQuery"] = "ExperimentFunnelsQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- name: Optional[str] = None
- response: Optional[ExperimentFunnelsQueryResponse] = None
- stats_version: Optional[int] = None
-
-
-class FunnelCorrelationQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- funnelCorrelationEventExcludePropertyNames: Optional[list[str]] = None
- funnelCorrelationEventNames: Optional[list[str]] = None
- funnelCorrelationExcludeEventNames: Optional[list[str]] = None
- funnelCorrelationExcludeNames: Optional[list[str]] = None
- funnelCorrelationNames: Optional[list[str]] = None
- funnelCorrelationType: FunnelCorrelationResultsType
- kind: Literal["FunnelCorrelationQuery"] = "FunnelCorrelationQuery"
- response: Optional[FunnelCorrelationResponse] = None
- source: FunnelsActorsQuery
-
-
-class InsightVizNode(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- embedded: Optional[bool] = Field(default=None, description="Query is embedded inside another bordered component")
- full: Optional[bool] = Field(
- default=None, description="Show with most visual options enabled. Used in insight scene."
- )
- hidePersonsModal: Optional[bool] = None
- kind: Literal["InsightVizNode"] = "InsightVizNode"
- showCorrelationTable: Optional[bool] = None
- showFilters: Optional[bool] = None
- showHeader: Optional[bool] = None
- showLastComputation: Optional[bool] = None
- showLastComputationRefresh: Optional[bool] = None
- showResults: Optional[bool] = None
- showTable: Optional[bool] = None
- source: Union[TrendsQuery, FunnelsQuery, RetentionQuery, PathsQuery, StickinessQuery, LifecycleQuery] = Field(
- ..., discriminator="kind"
- )
- suppressSessionAnalysisWarning: Optional[bool] = None
- vizSpecificOptions: Optional[VizSpecificOptions] = None
-
-
-class DatabaseSchemaQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- kind: Literal["DatabaseSchemaQuery"] = "DatabaseSchemaQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- response: Optional[DatabaseSchemaQueryResponse] = None
-
-
-class FunnelCorrelationActorsQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- funnelCorrelationPersonConverted: Optional[bool] = None
- funnelCorrelationPersonEntity: Optional[Union[EventsNode, ActionsNode, DataWarehouseNode]] = None
- funnelCorrelationPropertyValues: Optional[
- list[
- Union[
- EventPropertyFilter,
- PersonPropertyFilter,
- ElementPropertyFilter,
- SessionPropertyFilter,
- CohortPropertyFilter,
- RecordingPropertyFilter,
- LogEntryPropertyFilter,
- GroupPropertyFilter,
- FeaturePropertyFilter,
- HogQLPropertyFilter,
- EmptyPropertyFilter,
- DataWarehousePropertyFilter,
- DataWarehousePersonPropertyFilter,
- ]
- ]
- ] = None
- includeRecordings: Optional[bool] = None
- kind: Literal["FunnelCorrelationActorsQuery"] = "FunnelCorrelationActorsQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- response: Optional[ActorsQueryResponse] = None
- source: FunnelCorrelationQuery
-
-
-class InsightActorsQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- breakdown: Optional[Union[str, list[str], int]] = None
- compare: Optional[Compare] = None
- day: Optional[Union[str, int]] = None
- includeRecordings: Optional[bool] = None
- interval: Optional[int] = Field(
- default=None, description="An interval selected out of available intervals in source query."
- )
- kind: Literal["InsightActorsQuery"] = "InsightActorsQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- response: Optional[ActorsQueryResponse] = None
- series: Optional[int] = None
- source: Union[TrendsQuery, FunnelsQuery, RetentionQuery, PathsQuery, StickinessQuery, LifecycleQuery] = Field(
- ..., discriminator="kind"
- )
- status: Optional[str] = None
-
-
-class InsightActorsQueryOptions(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- kind: Literal["InsightActorsQueryOptions"] = "InsightActorsQueryOptions"
- response: Optional[InsightActorsQueryOptionsResponse] = None
- source: Union[InsightActorsQuery, FunnelsActorsQuery, FunnelCorrelationActorsQuery]
-
-
-class ActorsQuery(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- fixedProperties: Optional[
- list[Union[PersonPropertyFilter, CohortPropertyFilter, HogQLPropertyFilter, EmptyPropertyFilter]]
- ] = Field(
- default=None,
- description=(
- "Currently only person filters supported. No filters for querying groups. See `filter_conditions()` in"
- " actor_strategies.py."
- ),
- )
- kind: Literal["ActorsQuery"] = "ActorsQuery"
- limit: Optional[int] = None
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- offset: Optional[int] = None
- orderBy: Optional[list[str]] = None
- properties: Optional[
- list[Union[PersonPropertyFilter, CohortPropertyFilter, HogQLPropertyFilter, EmptyPropertyFilter]]
- ] = Field(
- default=None,
- description=(
- "Currently only person filters supported. No filters for querying groups. See `filter_conditions()` in"
- " actor_strategies.py."
- ),
- )
- response: Optional[ActorsQueryResponse] = None
- search: Optional[str] = None
- select: Optional[list[str]] = None
- source: Optional[Union[InsightActorsQuery, FunnelsActorsQuery, FunnelCorrelationActorsQuery, HogQLQuery]] = None
-
-
-class DataTableNode(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- allowSorting: Optional[bool] = Field(
- default=None, description="Can the user click on column headers to sort the table? (default: true)"
- )
- columns: Optional[list[str]] = Field(
- default=None, description="Columns shown in the table, unless the `source` provides them."
- )
- embedded: Optional[bool] = Field(default=None, description="Uses the embedded version of LemonTable")
- expandable: Optional[bool] = Field(
- default=None, description="Can expand row to show raw event data (default: true)"
- )
- full: Optional[bool] = Field(default=None, description="Show with most visual options enabled. Used in scenes.")
- hiddenColumns: Optional[list[str]] = Field(
- default=None, description="Columns that aren't shown in the table, even if in columns or returned data"
- )
- kind: Literal["DataTableNode"] = "DataTableNode"
- propertiesViaUrl: Optional[bool] = Field(default=None, description="Link properties via the URL (default: false)")
- response: Optional[
- Union[
- dict[str, Any],
- Response,
- Response1,
- Response2,
- Response3,
- Response4,
- Response7,
- Response8,
- Response9,
- Response10,
- ]
- ] = None
- showActions: Optional[bool] = Field(default=None, description="Show the kebab menu at the end of the row")
- showColumnConfigurator: Optional[bool] = Field(
- default=None, description="Show a button to configure the table's columns if possible"
- )
- showDateRange: Optional[bool] = Field(default=None, description="Show date range selector")
- showElapsedTime: Optional[bool] = Field(default=None, description="Show the time it takes to run a query")
- showEventFilter: Optional[bool] = Field(
- default=None, description="Include an event filter above the table (EventsNode only)"
- )
- showExport: Optional[bool] = Field(default=None, description="Show the export button")
- showHogQLEditor: Optional[bool] = Field(default=None, description="Include a HogQL query editor above HogQL tables")
- showOpenEditorButton: Optional[bool] = Field(
- default=None, description="Show a button to open the current query as a new insight. (default: true)"
- )
- showPersistentColumnConfigurator: Optional[bool] = Field(
- default=None, description="Show a button to configure and persist the table's default columns if possible"
- )
- showPropertyFilter: Optional[Union[bool, list[TaxonomicFilterGroupType]]] = Field(
- default=None, description="Include a property filter above the table"
- )
- showReload: Optional[bool] = Field(default=None, description="Show a reload button")
- showResultsTable: Optional[bool] = Field(default=None, description="Show a results table")
- showSavedQueries: Optional[bool] = Field(default=None, description="Shows a list of saved queries")
- showSearch: Optional[bool] = Field(default=None, description="Include a free text search field (PersonsNode only)")
- showTestAccountFilters: Optional[bool] = Field(default=None, description="Show filter to exclude test accounts")
- showTimings: Optional[bool] = Field(default=None, description="Show a detailed query timing breakdown")
- source: Union[
- EventsNode,
- EventsQuery,
- PersonsNode,
- ActorsQuery,
- HogQLQuery,
- WebOverviewQuery,
- WebStatsTableQuery,
- WebExternalClicksTableQuery,
- WebGoalsQuery,
- SessionAttributionExplorerQuery,
- ErrorTrackingQuery,
- ExperimentFunnelsQuery,
- ExperimentTrendsQuery,
- ] = Field(..., description="Source of the events")
-
-
-class HogQLAutocomplete(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- endPosition: int = Field(..., description="End position of the editor word")
- filters: Optional[HogQLFilters] = Field(default=None, description="Table to validate the expression against")
- globals: Optional[dict[str, Any]] = Field(default=None, description="Global values in scope")
- kind: Literal["HogQLAutocomplete"] = "HogQLAutocomplete"
- language: HogLanguage = Field(..., description="Language to validate")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query: str = Field(..., description="Query to validate")
- response: Optional[HogQLAutocompleteResponse] = None
- sourceQuery: Optional[
- Union[
- EventsNode,
- ActionsNode,
- PersonsNode,
- EventsQuery,
- ActorsQuery,
- InsightActorsQuery,
- InsightActorsQueryOptions,
- SessionsTimelineQuery,
- HogQuery,
- HogQLQuery,
- HogQLMetadata,
- HogQLAutocomplete,
- WebOverviewQuery,
- WebStatsTableQuery,
- WebExternalClicksTableQuery,
- WebGoalsQuery,
- SessionAttributionExplorerQuery,
- ErrorTrackingQuery,
- ExperimentFunnelsQuery,
- ExperimentTrendsQuery,
- RecordingsQuery,
- ]
- ] = Field(default=None, description="Query in whose context to validate.")
- startPosition: int = Field(..., description="Start position of the editor word")
-
-
-class HogQLMetadata(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- debug: Optional[bool] = Field(
- default=None, description="Enable more verbose output, usually run from the /debug page"
- )
- filters: Optional[HogQLFilters] = Field(default=None, description="Extra filters applied to query via {filters}")
- globals: Optional[dict[str, Any]] = Field(default=None, description="Extra globals for the query")
- kind: Literal["HogQLMetadata"] = "HogQLMetadata"
- language: HogLanguage = Field(..., description="Language to validate")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query: str = Field(..., description="Query to validate")
- response: Optional[HogQLMetadataResponse] = None
- sourceQuery: Optional[
- Union[
- EventsNode,
- ActionsNode,
- PersonsNode,
- EventsQuery,
- ActorsQuery,
- InsightActorsQuery,
- InsightActorsQueryOptions,
- SessionsTimelineQuery,
- HogQuery,
- HogQLQuery,
- HogQLMetadata,
- HogQLAutocomplete,
- WebOverviewQuery,
- WebStatsTableQuery,
- WebExternalClicksTableQuery,
- WebGoalsQuery,
- SessionAttributionExplorerQuery,
- ErrorTrackingQuery,
- ExperimentFunnelsQuery,
- ExperimentTrendsQuery,
- RecordingsQuery,
- ]
- ] = Field(
- default=None,
- description='Query within which "expr" and "template" are validated. Defaults to "select * from events"',
- )
- variables: Optional[dict[str, HogQLVariable]] = Field(
- default=None, description="Variables to be subsituted into the query"
- )
-
-
-class QueryRequest(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- async_: Optional[bool] = Field(default=None, alias="async")
- client_query_id: Optional[str] = Field(
- default=None, description="Client provided query ID. Can be used to retrieve the status or cancel the query."
- )
- filters_override: Optional[DashboardFilter] = None
- query: Union[
- EventsNode,
- ActionsNode,
- PersonsNode,
- DataWarehouseNode,
- EventsQuery,
- ActorsQuery,
- InsightActorsQuery,
- InsightActorsQueryOptions,
- SessionsTimelineQuery,
- HogQuery,
- HogQLQuery,
- HogQLMetadata,
- HogQLAutocomplete,
- WebOverviewQuery,
- WebStatsTableQuery,
- WebExternalClicksTableQuery,
- WebGoalsQuery,
- SessionAttributionExplorerQuery,
- ErrorTrackingQuery,
- ExperimentFunnelsQuery,
- ExperimentTrendsQuery,
- DataVisualizationNode,
- DataTableNode,
- SavedInsightNode,
- InsightVizNode,
- TrendsQuery,
- FunnelsQuery,
- RetentionQuery,
- PathsQuery,
- StickinessQuery,
- LifecycleQuery,
- FunnelCorrelationQuery,
- DatabaseSchemaQuery,
- SuggestedQuestionsQuery,
- TeamTaxonomyQuery,
- EventTaxonomyQuery,
- ActorsPropertyTaxonomyQuery,
- ] = Field(
- ...,
- description=(
- "Submit a JSON string representing a query for PostHog data analysis, for example a HogQL query.\n\nExample"
- ' payload:\n\n```\n\n{"query": {"kind": "HogQLQuery", "query": "select * from events limit'
- ' 100"}}\n\n```\n\nFor more details on HogQL queries, see the [PostHog HogQL'
- " documentation](/docs/hogql#api-access)."
- ),
- discriminator="kind",
- )
- refresh: Optional[Union[bool, str]] = Field(
- default="blocking",
- description=(
- "Whether results should be calculated sync or async, and how much to rely on the cache:\n- `'blocking'` -"
- " calculate synchronously (returning only when the query is done), UNLESS there are very fresh results in"
- " the cache\n- `'async'` - kick off background calculation (returning immediately with a query status),"
- " UNLESS there are very fresh results in the cache\n- `'lazy_async'` - kick off background calculation,"
- " UNLESS there are somewhat fresh results in the cache\n- `'force_blocking'` - calculate synchronously,"
- " even if fresh results are already cached\n- `'force_async'` - kick off background calculation, even if"
- " fresh results are already cached\n- `'force_cache'` - return cached data or a cache miss; always"
- " completes immediately as it never calculates Background calculation can be tracked using the"
- " `query_status` response field."
- ),
- )
- variables_override: Optional[dict[str, dict[str, Any]]] = None
-
-
-class QuerySchemaRoot(
- RootModel[
- Union[
- EventsNode,
- ActionsNode,
- PersonsNode,
- DataWarehouseNode,
- EventsQuery,
- ActorsQuery,
- InsightActorsQuery,
- InsightActorsQueryOptions,
- SessionsTimelineQuery,
- HogQuery,
- HogQLQuery,
- HogQLMetadata,
- HogQLAutocomplete,
- WebOverviewQuery,
- WebStatsTableQuery,
- WebExternalClicksTableQuery,
- WebGoalsQuery,
- SessionAttributionExplorerQuery,
- ErrorTrackingQuery,
- ExperimentFunnelsQuery,
- ExperimentTrendsQuery,
- DataVisualizationNode,
- DataTableNode,
- SavedInsightNode,
- InsightVizNode,
- TrendsQuery,
- FunnelsQuery,
- RetentionQuery,
- PathsQuery,
- StickinessQuery,
- LifecycleQuery,
- FunnelCorrelationQuery,
- DatabaseSchemaQuery,
- SuggestedQuestionsQuery,
- TeamTaxonomyQuery,
- EventTaxonomyQuery,
- ActorsPropertyTaxonomyQuery,
- ]
- ]
-):
- root: Union[
- EventsNode,
- ActionsNode,
- PersonsNode,
- DataWarehouseNode,
- EventsQuery,
- ActorsQuery,
- InsightActorsQuery,
- InsightActorsQueryOptions,
- SessionsTimelineQuery,
- HogQuery,
- HogQLQuery,
- HogQLMetadata,
- HogQLAutocomplete,
- WebOverviewQuery,
- WebStatsTableQuery,
- WebExternalClicksTableQuery,
- WebGoalsQuery,
- SessionAttributionExplorerQuery,
- ErrorTrackingQuery,
- ExperimentFunnelsQuery,
- ExperimentTrendsQuery,
- DataVisualizationNode,
- DataTableNode,
- SavedInsightNode,
- InsightVizNode,
- TrendsQuery,
- FunnelsQuery,
- RetentionQuery,
- PathsQuery,
- StickinessQuery,
- LifecycleQuery,
- FunnelCorrelationQuery,
- DatabaseSchemaQuery,
- SuggestedQuestionsQuery,
- TeamTaxonomyQuery,
- EventTaxonomyQuery,
- ActorsPropertyTaxonomyQuery,
- ] = Field(..., discriminator="kind")
-
-
-PropertyGroupFilterValue.model_rebuild()
-QueryRequest.model_rebuild()
diff --git a/posthog/session_recordings/queries/session_recording_list_from_query.py b/posthog/session_recordings/queries/session_recording_list_from_query.py
index b1819c063f383..29348fd2e65fb 100644
--- a/posthog/session_recordings/queries/session_recording_list_from_query.py
+++ b/posthog/session_recordings/queries/session_recording_list_from_query.py
@@ -83,12 +83,12 @@ def _strip_person_and_event_and_cohort_properties(
return None
properties_to_keep = [
- g
- for g in properties
- if not is_event_property(g)
- and not is_person_property(g)
- and not is_group_property(g)
- and not is_cohort_property(g)
+ p
+ for p in properties
+ if not is_event_property(p)
+ and not is_person_property(p)
+ and not is_group_property(p)
+ and not is_cohort_property(p)
]
return properties_to_keep
diff --git a/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr b/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr
index df35cd7773af4..562e02922b074 100644
--- a/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr
+++ b/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr
@@ -798,6 +798,126 @@
allow_experimental_analyzer=0
'''
# ---
+# name: TestSessionRecordingsListFromQuery.test_can_filter_for_flags_0_session_1_matches_target_flag_is_True
+ '''
+ SELECT s.session_id AS session_id,
+ any(s.team_id),
+ any(s.distinct_id),
+ min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time,
+ max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time,
+ dateDiff('SECOND', start_time, end_time) AS duration,
+ argMinMerge(s.first_url) AS first_url,
+ sum(s.click_count) AS click_count,
+ sum(s.keypress_count) AS keypress_count,
+ sum(s.mouse_activity_count) AS mouse_activity_count,
+ divide(sum(s.active_milliseconds), 1000) AS active_seconds,
+ minus(duration, active_seconds) AS inactive_seconds,
+ sum(s.console_log_count) AS console_log_count,
+ sum(s.console_warn_count) AS console_warn_count,
+ sum(s.console_error_count) AS console_error_count,
+ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing,
+ round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score
+ FROM session_replay_events AS s
+ WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id,
+ (SELECT events.`$session_id` AS session_id
+ FROM events
+ WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$feature/target-flag'), ''), 'null'), '^"|"$', ''), 'true'), 0))
+ GROUP BY events.`$session_id`
+ HAVING 1)))
+ GROUP BY s.session_id
+ HAVING 1
+ ORDER BY start_time DESC
+ LIMIT 51
+ OFFSET 0 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1,
+ format_csv_allow_double_quotes=0,
+ max_ast_elements=4000000,
+ max_expanded_ast_elements=4000000,
+ max_bytes_before_external_group_by=0,
+ allow_experimental_analyzer=0
+ '''
+# ---
+# name: TestSessionRecordingsListFromQuery.test_can_filter_for_flags_1_session_2_matches_target_flag_is_False
+ '''
+ SELECT s.session_id AS session_id,
+ any(s.team_id),
+ any(s.distinct_id),
+ min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time,
+ max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time,
+ dateDiff('SECOND', start_time, end_time) AS duration,
+ argMinMerge(s.first_url) AS first_url,
+ sum(s.click_count) AS click_count,
+ sum(s.keypress_count) AS keypress_count,
+ sum(s.mouse_activity_count) AS mouse_activity_count,
+ divide(sum(s.active_milliseconds), 1000) AS active_seconds,
+ minus(duration, active_seconds) AS inactive_seconds,
+ sum(s.console_log_count) AS console_log_count,
+ sum(s.console_warn_count) AS console_warn_count,
+ sum(s.console_error_count) AS console_error_count,
+ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing,
+ round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score
+ FROM session_replay_events AS s
+ WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id,
+ (SELECT events.`$session_id` AS session_id
+ FROM events
+ WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$feature/target-flag'), ''), 'null'), '^"|"$', ''), 'false'), 0))
+ GROUP BY events.`$session_id`
+ HAVING 1)))
+ GROUP BY s.session_id
+ HAVING 1
+ ORDER BY start_time DESC
+ LIMIT 51
+ OFFSET 0 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1,
+ format_csv_allow_double_quotes=0,
+ max_ast_elements=4000000,
+ max_expanded_ast_elements=4000000,
+ max_bytes_before_external_group_by=0,
+ allow_experimental_analyzer=0
+ '''
+# ---
+# name: TestSessionRecordingsListFromQuery.test_can_filter_for_flags_2_sessions_1_and_2_match_target_flag_is_set
+ '''
+ SELECT s.session_id AS session_id,
+ any(s.team_id),
+ any(s.distinct_id),
+ min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time,
+ max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time,
+ dateDiff('SECOND', start_time, end_time) AS duration,
+ argMinMerge(s.first_url) AS first_url,
+ sum(s.click_count) AS click_count,
+ sum(s.keypress_count) AS keypress_count,
+ sum(s.mouse_activity_count) AS mouse_activity_count,
+ divide(sum(s.active_milliseconds), 1000) AS active_seconds,
+ minus(duration, active_seconds) AS inactive_seconds,
+ sum(s.console_log_count) AS console_log_count,
+ sum(s.console_warn_count) AS console_warn_count,
+ sum(s.console_error_count) AS console_error_count,
+ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing,
+ round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score
+ FROM session_replay_events AS s
+ WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id,
+ (SELECT events.`$session_id` AS session_id
+ FROM events
+ WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), isNotNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$feature/target-flag'), ''), 'null'), '^"|"$', '')))
+ GROUP BY events.`$session_id`
+ HAVING 1)))
+ GROUP BY s.session_id
+ HAVING 1
+ ORDER BY start_time DESC
+ LIMIT 51
+ OFFSET 0 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1,
+ format_csv_allow_double_quotes=0,
+ max_ast_elements=4000000,
+ max_expanded_ast_elements=4000000,
+ max_bytes_before_external_group_by=0,
+ allow_experimental_analyzer=0
+ '''
+# ---
# name: TestSessionRecordingsListFromQuery.test_date_from_filter
'''
SELECT s.session_id AS session_id,
diff --git a/posthog/session_recordings/queries/test/test_session_recording_list_from_query.py b/posthog/session_recordings/queries/test/test_session_recording_list_from_query.py
index 2acf2a47d5642..366338ed0a526 100644
--- a/posthog/session_recordings/queries/test/test_session_recording_list_from_query.py
+++ b/posthog/session_recordings/queries/test/test_session_recording_list_from_query.py
@@ -4243,3 +4243,96 @@ def test_top_level_event_host_property_test_account_filter(self):
"ongoing": 1,
}
]
+
+ @parameterized.expand(
+ [
+ (
+ "session 1 matches target flag is True",
+ [{"type": "event", "key": "$feature/target-flag", "operator": "exact", "value": ["true"]}],
+ ["1"],
+ ),
+ (
+ "session 2 matches target flag is False",
+ [{"type": "event", "key": "$feature/target-flag", "operator": "exact", "value": ["false"]}],
+ ["2"],
+ ),
+ (
+ "sessions 1 and 2 match target flag is set",
+ [{"type": "event", "key": "$feature/target-flag", "operator": "is_set", "value": "is_set"}],
+ ["1", "2"],
+ ),
+ # FIXME: we don't handle negation correctly at all, let's fix that in a follow-up
+ # because setup adds an event with no flags to each session every session matches this because we look for any event not matches not _all_ events not matches
+ # ("sessions 3 and 4 match target flag is not set", [{"type": "event", "key": "$feature/target-flag", "operator": "is_not_set", "value": "is_not_set"}], ["3", "4"]),
+ ]
+ )
+ @freeze_time("2021-01-21T20:00:00.000Z")
+ @snapshot_clickhouse_queries
+ def test_can_filter_for_flags(self, _name: str, properties: dict, expected: list[str]) -> None:
+ Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"})
+
+ produce_replay_summary(
+ distinct_id="user",
+ session_id="1",
+ first_timestamp=self.an_hour_ago,
+ team_id=self.team.id,
+ )
+ self.create_event(
+ "user",
+ self.an_hour_ago,
+ properties={
+ "$session_id": "1",
+ "$window_id": "1",
+ "$feature/target-flag": True,
+ },
+ )
+
+ produce_replay_summary(
+ distinct_id="user",
+ session_id="2",
+ first_timestamp=self.an_hour_ago,
+ team_id=self.team.id,
+ )
+ self.create_event(
+ "user",
+ self.an_hour_ago,
+ properties={
+ "$session_id": "2",
+ "$window_id": "1",
+ "$feature/target-flag": False,
+ },
+ )
+
+ produce_replay_summary(
+ distinct_id="user",
+ session_id="3",
+ first_timestamp=self.an_hour_ago,
+ team_id=self.team.id,
+ )
+ self.create_event(
+ "user",
+ self.an_hour_ago,
+ properties={
+ "$session_id": "3",
+ "$window_id": "1",
+ "$feature/flag-that-is-different": False,
+ },
+ )
+
+ produce_replay_summary(
+ distinct_id="user",
+ session_id="4",
+ first_timestamp=self.an_hour_ago,
+ team_id=self.team.id,
+ )
+ self.create_event(
+ "user",
+ self.an_hour_ago,
+ properties={
+ "$session_id": "4",
+ "$window_id": "1",
+ },
+ )
+
+ (session_recordings, _, _) = self._filter_recordings_by({"properties": properties})
+ assert sorted([sr["session_id"] for sr in session_recordings]) == expected
diff --git a/posthog/session_recordings/session_recording_api.py b/posthog/session_recordings/session_recording_api.py
index 979a4b1983967..fc716658d02de 100644
--- a/posthog/session_recordings/session_recording_api.py
+++ b/posthog/session_recordings/session_recording_api.py
@@ -15,7 +15,8 @@
from django.http import HttpResponse, JsonResponse
from drf_spectacular.utils import extend_schema
from prometheus_client import Counter, Histogram
-from rest_framework import exceptions, request, serializers, viewsets
+from pydantic import ValidationError
+from rest_framework import exceptions, request, serializers, viewsets, status
from rest_framework.mixins import UpdateModelMixin
from rest_framework.renderers import JSONRenderer
from rest_framework.response import Response
@@ -331,7 +332,15 @@ def list(self, request: request.Request, *args: Any, **kwargs: Any) -> Response:
data_dict = query_as_params_to_dict(request.GET.dict())
# we used to send `version` and it's not part of query, so we pop to make sure
data_dict.pop("version", None)
- query = RecordingsQuery.model_validate(data_dict)
+ # we used to send `hogql_filtering` and it's not part of query, so we pop to make sure
+ data_dict.pop("hogql_filtering", None)
+
+ try:
+ query = RecordingsQuery.model_validate(data_dict)
+ except ValidationError as pydantic_validation_error:
+ return Response(
+ {"validation_errors": json.loads(pydantic_validation_error.json())}, status=status.HTTP_400_BAD_REQUEST
+ )
self._maybe_report_recording_list_filters_changed(request, team=self.team)
return list_recordings_response(
diff --git a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr
index 1f7ec450915fe..43351b8340128 100644
--- a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr
+++ b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr
@@ -455,6 +455,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -811,6 +812,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -938,6 +940,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -1580,6 +1583,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -1877,6 +1881,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -2025,6 +2030,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -2279,6 +2285,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -2391,6 +2398,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -2636,6 +2644,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -2783,6 +2792,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -3039,6 +3049,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -3331,6 +3342,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -3530,6 +3542,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -3788,6 +3801,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -4091,6 +4105,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -4237,6 +4252,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -4497,6 +4513,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -4594,6 +4611,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -4813,6 +4831,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -5005,6 +5024,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -5267,6 +5287,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -5619,6 +5640,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -5792,6 +5814,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -6053,6 +6076,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -6226,6 +6250,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -6472,6 +6497,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -6788,6 +6814,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -6941,6 +6968,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -7189,6 +7217,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -7486,6 +7515,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -7634,6 +7664,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -7884,6 +7915,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -7996,6 +8028,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -8241,6 +8274,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -8388,6 +8422,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -8640,6 +8675,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
diff --git a/posthog/session_recordings/test/test_session_recordings.py b/posthog/session_recordings/test/test_session_recordings.py
index 561d30ea83b02..2a501a5643772 100644
--- a/posthog/session_recordings/test/test_session_recordings.py
+++ b/posthog/session_recordings/test/test_session_recordings.py
@@ -1200,3 +1200,35 @@ def test_404_when_no_snapshots(self) -> None:
f"/api/projects/{self.team.id}/session_recordings/1/snapshots?",
)
assert response.status_code == status.HTTP_404_NOT_FOUND
+
+ def test_400_when_invalid_list_query(self) -> None:
+ query_params = "&".join(
+ [
+ f'session_ids="invalid"',
+ "hogql_filtering=1",
+ "tomato=potato",
+ "version=2",
+ ]
+ )
+ response = self.client.get(
+ f"/api/projects/{self.team.id}/session_recordings?{query_params}",
+ )
+ assert response.status_code == status.HTTP_400_BAD_REQUEST
+ assert response.json() == {
+ "validation_errors": [
+ {
+ "type": "list_type",
+ "loc": ["session_ids"],
+ "msg": "Input should be a valid list",
+ "input": "invalid",
+ "url": "https://errors.pydantic.dev/2.9/v/list_type",
+ },
+ {
+ "type": "extra_forbidden",
+ "loc": ["tomato"],
+ "msg": "Extra inputs are not permitted",
+ "input": "potato",
+ "url": "https://errors.pydantic.dev/2.9/v/extra_forbidden",
+ },
+ ],
+ }
diff --git a/posthog/settings/data_stores.py b/posthog/settings/data_stores.py
index 77bc535da8a08..2c662baa42e60 100644
--- a/posthog/settings/data_stores.py
+++ b/posthog/settings/data_stores.py
@@ -148,6 +148,7 @@ def postgres_config(host: str) -> dict:
CLICKHOUSE_OFFLINE_CLUSTER_HOST: str | None = os.getenv("CLICKHOUSE_OFFLINE_CLUSTER_HOST", None)
CLICKHOUSE_USER: str = os.getenv("CLICKHOUSE_USER", "default")
CLICKHOUSE_PASSWORD: str = os.getenv("CLICKHOUSE_PASSWORD", "")
+CLICKHOUSE_USE_HTTP: str = get_from_env("CLICKHOUSE_USE_HTTP", False, type_cast=str_to_bool)
CLICKHOUSE_DATABASE: str = CLICKHOUSE_TEST_DB if TEST else os.getenv("CLICKHOUSE_DATABASE", "default")
CLICKHOUSE_CLUSTER: str = os.getenv("CLICKHOUSE_CLUSTER", "posthog")
CLICKHOUSE_CA: str | None = os.getenv("CLICKHOUSE_CA", None)
diff --git a/posthog/settings/temporal.py b/posthog/settings/temporal.py
index e168d12c46d84..2010a15e8ecab 100644
--- a/posthog/settings/temporal.py
+++ b/posthog/settings/temporal.py
@@ -19,16 +19,27 @@
BATCH_EXPORT_S3_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES: int = get_from_env(
"BATCH_EXPORT_S3_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES", 0, type_cast=int
)
+
BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES: int = 1024 * 1024 * 100 # 100MB
+BATCH_EXPORT_SNOWFLAKE_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES: int = get_from_env(
+ "BATCH_EXPORT_SNOWFLAKE_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES", 1024 * 1024 * 300, type_cast=int
+)
+
BATCH_EXPORT_POSTGRES_UPLOAD_CHUNK_SIZE_BYTES: int = 1024 * 1024 * 50 # 50MB
+BATCH_EXPORT_POSTGRES_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES: int = get_from_env(
+ "BATCH_EXPORT_POSTGRES_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES", 1024 * 1024 * 300, type_cast=int
+)
+
BATCH_EXPORT_BIGQUERY_UPLOAD_CHUNK_SIZE_BYTES: int = 1024 * 1024 * 100 # 100MB
BATCH_EXPORT_BIGQUERY_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES: int = get_from_env(
"BATCH_EXPORT_BIGQUERY_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES", 0, type_cast=int
)
+
BATCH_EXPORT_REDSHIFT_UPLOAD_CHUNK_SIZE_BYTES: int = 1024 * 1024 * 8 # 8MB
BATCH_EXPORT_REDSHIFT_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES: int = get_from_env(
"BATCH_EXPORT_REDSHIFT_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES", 1024 * 1024 * 300, type_cast=int
)
+
BATCH_EXPORT_HTTP_UPLOAD_CHUNK_SIZE_BYTES: int = 1024 * 1024 * 50 # 50MB
BATCH_EXPORT_HTTP_BATCH_SIZE: int = 5000
BATCH_EXPORT_BUFFER_QUEUE_MAX_SIZE_BYTES: int = 1024 * 1024 * 300 # 300MB
diff --git a/posthog/tasks/email.py b/posthog/tasks/email.py
index df13b60dcf286..4a8588a2c5210 100644
--- a/posthog/tasks/email.py
+++ b/posthog/tasks/email.py
@@ -1,6 +1,7 @@
import uuid
from datetime import datetime
-from typing import Optional
+from enum import Enum
+from typing import Literal, Optional
import posthoganalytics
import structlog
@@ -27,6 +28,14 @@
logger = structlog.get_logger(__name__)
+class NotificationSetting(Enum):
+ WEEKLY_PROJECT_DIGEST = "weekly_project_digest"
+ PLUGIN_DISABLED = "plugin_disabled"
+
+
+NotificationSettingType = Literal["weekly_project_digest", "plugin_disabled"]
+
+
def send_message_to_all_staff_users(message: EmailMessage) -> None:
for user in User.objects.filter(is_active=True, is_staff=True):
message.add_recipient(email=user.email, name=user.first_name)
@@ -54,6 +63,46 @@ def get_members_to_notify(team: Team, notification_setting: str) -> list[Organiz
return memberships_to_email
+def should_send_notification(
+ user: User,
+ notification_type: NotificationSettingType,
+ team_id: Optional[int] = None,
+) -> bool:
+ """
+ Determines if a notification should be sent to a user based on their notification settings.
+
+ Args:
+ user: The user to check settings for
+ notification_type: The type of notification being sent. It must be the enum member's value!
+ team_id: Optional team ID for team-specific notifications
+
+ Returns:
+ bool: True if the notification should be sent, False otherwise
+ """
+ settings = user.notification_settings
+
+ if notification_type == NotificationSetting.WEEKLY_PROJECT_DIGEST.value:
+ # First check global digest setting
+ if settings.get("all_weekly_digest_disabled", False):
+ return False
+
+ # Then check project-specific setting if team_id provided
+ if team_id is not None:
+ project_settings = settings.get("project_weekly_digest_disabled", {})
+ team_disabled = project_settings.get(str(team_id), False)
+ return not team_disabled
+
+ return True
+
+ elif notification_type == NotificationSetting.PLUGIN_DISABLED.value:
+ return not settings.get("plugin_disabled", True) # Default to True (disabled) if not set
+
+ # The below typeerror is ignored because we're currently handling the notification
+ # types above, so technically it's unreachable. However if another is added but
+ # not handled in this function, we want this as a fallback.
+ return True # type: ignore
+
+
@shared_task(**EMAIL_TASK_KWARGS)
def send_invite(invite_id: str) -> None:
campaign_key: str = f"invite_email_{invite_id}"
diff --git a/posthog/tasks/periodic_digest.py b/posthog/tasks/periodic_digest.py
index 829410be4e82d..5825260283423 100644
--- a/posthog/tasks/periodic_digest.py
+++ b/posthog/tasks/periodic_digest.py
@@ -8,6 +8,7 @@
from dateutil import parser
from django.db.models import QuerySet
from django.utils import timezone
+from posthoganalytics.client import Client
from sentry_sdk import capture_exception
from posthog.models.dashboard import Dashboard
@@ -16,15 +17,18 @@
from posthog.models.feature_flag import FeatureFlag
from posthog.models.feedback.survey import Survey
from posthog.models.messaging import MessagingRecord
+from posthog.models.organization import OrganizationMembership
from posthog.models.team.team import Team
from posthog.session_recordings.models.session_recording_playlist import (
SessionRecordingPlaylist,
)
-from posthog.tasks.usage_report import (
- USAGE_REPORT_TASK_KWARGS,
- capture_report,
- get_instance_metadata,
+from posthog.tasks.email import (
+ NotificationSetting,
+ NotificationSettingType,
+ should_send_notification,
)
+from posthog.tasks.report_utils import capture_event
+from posthog.tasks.usage_report import USAGE_REPORT_TASK_KWARGS, get_instance_metadata
from posthog.tasks.utils import CeleryQueue
from posthog.warehouse.models.external_data_source import ExternalDataSource
@@ -77,7 +81,7 @@ def get_teams_with_new_playlists(end: datetime, begin: datetime) -> QuerySet:
)
.exclude(
name="",
- derived_name="",
+ derived_name=None,
)
.values("team_id", "name", "short_id", "derived_name")
)
@@ -242,11 +246,12 @@ def send_periodic_digest_report(
**instance_metadata,
}
- capture_report.delay(
- capture_event_name="transactional email",
+ send_digest_notifications(
team_id=team_id,
- full_report_dict=full_report_dict,
- send_for_all_members=True,
+ organization_id=None, # Will be derived from team
+ event_name="transactional email",
+ properties=full_report_dict,
+ notification_type=NotificationSetting.WEEKLY_PROJECT_DIGEST.value,
)
# Mark as sent
@@ -293,3 +298,47 @@ def send_all_periodic_digest_reports(
except Exception as err:
capture_exception(err)
raise
+
+
+def send_digest_notifications(
+ *,
+ team_id: int,
+ organization_id: Optional[str],
+ event_name: str,
+ properties: dict[str, Any],
+ notification_type: NotificationSettingType,
+ timestamp: Optional[datetime] = None,
+) -> None:
+ """
+ Determines eligible recipients and sends individual notifications for digest reports.
+ """
+ pha_client = Client("sTMFPsFhdP1Ssg")
+
+ team = Team.objects.get(id=team_id) if not organization_id else None
+ organization_id = organization_id or str(team.organization_id)
+
+ users = (
+ [
+ membership.user
+ for membership in OrganizationMembership.objects.filter(organization_id=organization_id).select_related(
+ "user"
+ )
+ ]
+ if organization_id
+ else team.all_users_with_access()
+ )
+
+ eligible_users = [user for user in users if should_send_notification(user, notification_type, team_id)]
+ # Send individual events for each eligible user
+ for user in eligible_users:
+ capture_event(
+ pha_client=pha_client,
+ name=event_name,
+ organization_id=organization_id,
+ team_id=team_id,
+ properties=properties,
+ timestamp=timestamp,
+ distinct_id=user.distinct_id,
+ )
+
+ pha_client.group_identify("organization", organization_id, properties)
diff --git a/posthog/tasks/report_utils.py b/posthog/tasks/report_utils.py
new file mode 100644
index 0000000000000..75cd8a3fdc99b
--- /dev/null
+++ b/posthog/tasks/report_utils.py
@@ -0,0 +1,83 @@
+from datetime import datetime
+from typing import Any, Optional, Union, cast
+
+import structlog
+from dateutil import parser
+from django.conf import settings
+from posthoganalytics.client import Client
+from sentry_sdk import capture_exception
+
+from posthog.cloud_utils import is_cloud
+from posthog.models.organization import OrganizationMembership
+from posthog.models.team.team import Team
+from posthog.models.user import User
+from posthog.utils import get_machine_id
+
+logger = structlog.get_logger(__name__)
+
+
+def get_org_owner_or_first_user(organization_id: str) -> Optional[User]:
+ # Find the membership object for the org owner
+ user = None
+ membership = OrganizationMembership.objects.filter(
+ organization_id=organization_id, level=OrganizationMembership.Level.OWNER
+ ).first()
+ if not membership:
+ # If no owner membership is present, pick the first membership association we can find
+ membership = OrganizationMembership.objects.filter(organization_id=organization_id).first()
+ if hasattr(membership, "user"):
+ membership = cast(OrganizationMembership, membership)
+ user = membership.user
+ else:
+ capture_exception(
+ Exception("No user found for org while generating report"),
+ {"org": {"organization_id": organization_id}},
+ )
+ return user
+
+
+def capture_event(
+ *,
+ pha_client: Client,
+ name: str,
+ organization_id: Optional[str] = None,
+ team_id: Optional[int] = None,
+ properties: dict[str, Any],
+ timestamp: Optional[Union[datetime, str]] = None,
+ distinct_id: Optional[str] = None,
+) -> None:
+ """
+ Captures a single event.
+ """
+ if timestamp and isinstance(timestamp, str):
+ try:
+ timestamp = parser.isoparse(timestamp)
+ except ValueError:
+ timestamp = None
+
+ if not organization_id and not team_id:
+ raise ValueError("Either organization_id or team_id must be provided")
+
+ if not distinct_id:
+ if not organization_id:
+ team = Team.objects.get(id=team_id)
+ organization_id = str(team.organization_id)
+ org_owner = get_org_owner_or_first_user(organization_id)
+ distinct_id = org_owner.distinct_id if org_owner and org_owner.distinct_id else f"org-{organization_id}"
+
+ if is_cloud():
+ pha_client.capture(
+ distinct_id,
+ name,
+ {**properties, "scope": "user"},
+ groups={"organization": organization_id, "instance": settings.SITE_URL},
+ timestamp=timestamp,
+ )
+ else:
+ pha_client.capture(
+ get_machine_id(),
+ name,
+ {**properties, "scope": "machine"},
+ groups={"instance": settings.SITE_URL},
+ timestamp=timestamp,
+ )
diff --git a/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr
index a16e3b07ba1ab..0a4f22903ccf0 100644
--- a/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr
+++ b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr
@@ -108,6 +108,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -557,7 +558,8 @@
"posthog_experiment"."type",
"posthog_experiment"."variants",
"posthog_experiment"."metrics",
- "posthog_experiment"."metrics_secondary"
+ "posthog_experiment"."metrics_secondary",
+ "posthog_experiment"."stats_config"
FROM "posthog_experiment"
WHERE "posthog_experiment"."feature_flag_id" = 99999
'''
@@ -677,6 +679,7 @@
"posthog_user"."has_seen_product_intro_for",
"posthog_user"."strapi_id",
"posthog_user"."is_active",
+ "posthog_user"."role_at_organization",
"posthog_user"."theme_mode",
"posthog_user"."partial_notification_settings",
"posthog_user"."anonymize_data",
@@ -1186,7 +1189,8 @@
"posthog_experiment"."type",
"posthog_experiment"."variants",
"posthog_experiment"."metrics",
- "posthog_experiment"."metrics_secondary"
+ "posthog_experiment"."metrics_secondary",
+ "posthog_experiment"."stats_config"
FROM "posthog_experiment"
WHERE "posthog_experiment"."feature_flag_id" = 99999
'''
diff --git a/posthog/tasks/test/test_periodic_digest.py b/posthog/tasks/test/test_periodic_digest.py
index a93f96129106f..40ed11f154423 100644
--- a/posthog/tasks/test/test_periodic_digest.py
+++ b/posthog/tasks/test/test_periodic_digest.py
@@ -22,7 +22,7 @@ def setUp(self) -> None:
self.distinct_id = str(uuid4())
@freeze_time("2024-01-20T00:01:00Z")
- @patch("posthog.tasks.periodic_digest.capture_report")
+ @patch("posthog.tasks.periodic_digest.capture_event")
def test_periodic_digest_report(self, mock_capture: MagicMock) -> None:
# Create test data from "last week"
with freeze_time("2024-01-15T00:01:00Z"):
@@ -211,19 +211,22 @@ def test_periodic_digest_report(self, mock_capture: MagicMock) -> None:
"digest_items_with_data": 8,
}
- mock_capture.delay.assert_called_once_with(
- capture_event_name="transactional email",
+ mock_capture.assert_called_once_with(
+ pha_client=ANY,
+ distinct_id=str(self.user.distinct_id),
+ organization_id=str(self.team.organization_id),
+ name="transactional email",
team_id=self.team.id,
- full_report_dict=expected_properties,
- send_for_all_members=True,
+ properties=expected_properties,
+ timestamp=None,
)
- @patch("posthog.tasks.periodic_digest.capture_report")
+ @patch("posthog.tasks.periodic_digest.capture_event")
def test_periodic_digest_report_dry_run(self, mock_capture: MagicMock) -> None:
send_all_periodic_digest_reports(dry_run=True)
- mock_capture.delay.assert_not_called()
+ mock_capture.assert_not_called()
- @patch("posthog.tasks.periodic_digest.capture_report")
+ @patch("posthog.tasks.periodic_digest.capture_event")
def test_periodic_digest_report_custom_dates(self, mock_capture: MagicMock) -> None:
# Create test data
with freeze_time("2024-01-15T00:01:00Z"):
@@ -284,15 +287,18 @@ def test_periodic_digest_report_custom_dates(self, mock_capture: MagicMock) -> N
"digest_items_with_data": 1,
}
- mock_capture.delay.assert_called_once_with(
- capture_event_name="transactional email",
+ mock_capture.assert_called_once_with(
+ pha_client=ANY,
+ distinct_id=str(self.user.distinct_id),
+ organization_id=str(self.team.organization_id),
+ name="transactional email",
team_id=self.team.id,
- full_report_dict=expected_properties,
- send_for_all_members=True,
+ properties=expected_properties,
+ timestamp=None,
)
@freeze_time("2024-01-20T00:01:00Z")
- @patch("posthog.tasks.periodic_digest.capture_report")
+ @patch("posthog.tasks.periodic_digest.capture_event")
def test_periodic_digest_report_idempotency(self, mock_capture: MagicMock) -> None:
# Create test data
with freeze_time("2024-01-15T00:01:00Z"):
@@ -305,8 +311,8 @@ def test_periodic_digest_report_idempotency(self, mock_capture: MagicMock) -> No
send_all_periodic_digest_reports()
# Verify first call
- mock_capture.delay.assert_called_once()
- mock_capture.delay.reset_mock()
+ mock_capture.assert_called_once()
+ mock_capture.reset_mock()
# Check that messaging record was created
record = MessagingRecord.objects.get( # type: ignore
@@ -316,13 +322,13 @@ def test_periodic_digest_report_idempotency(self, mock_capture: MagicMock) -> No
# Second run - should not send the digest again
send_all_periodic_digest_reports()
- mock_capture.delay.assert_not_called()
+ mock_capture.assert_not_called()
# Verify only one record exists
self.assertEqual(MessagingRecord.objects.count(), 1)
@freeze_time("2024-01-20T00:01:00Z")
- @patch("posthog.tasks.periodic_digest.capture_report")
+ @patch("posthog.tasks.periodic_digest.capture_event")
def test_periodic_digest_different_periods(self, mock_capture: MagicMock) -> None:
# Create test data
with freeze_time("2024-01-15T00:01:00Z"):
@@ -333,14 +339,14 @@ def test_periodic_digest_different_periods(self, mock_capture: MagicMock) -> Non
# Send weekly digest
send_all_periodic_digest_reports()
- mock_capture.delay.assert_called_once()
- mock_capture.delay.reset_mock()
+ mock_capture.assert_called_once()
+ mock_capture.reset_mock()
# Send monthly digest (different period length)
send_all_periodic_digest_reports(
begin_date=(datetime.now() - timedelta(days=30)).isoformat(), end_date=datetime.now().isoformat()
)
- mock_capture.delay.assert_called_once()
+ mock_capture.assert_called_once()
# Verify two different records exist
records = MessagingRecord.objects.filter(raw_email=f"team_{self.team.id}") # type: ignore
@@ -349,17 +355,17 @@ def test_periodic_digest_different_periods(self, mock_capture: MagicMock) -> Non
self.assertEqual(campaign_keys, ["periodic_digest_2024-01-20_30d", "periodic_digest_2024-01-20_7d"])
@freeze_time("2024-01-20T00:01:00Z")
- @patch("posthog.tasks.periodic_digest.capture_report")
+ @patch("posthog.tasks.periodic_digest.capture_event")
def test_periodic_digest_empty_report_no_record(self, mock_capture: MagicMock) -> None:
# Run without any data (empty digest)
send_all_periodic_digest_reports()
# Verify no capture call and no messaging record
- mock_capture.delay.assert_not_called()
+ mock_capture.assert_not_called()
self.assertEqual(MessagingRecord.objects.count(), 0)
@freeze_time("2024-01-20T00:01:00Z")
- @patch("posthog.tasks.periodic_digest.capture_report")
+ @patch("posthog.tasks.periodic_digest.capture_event")
def test_periodic_digest_dry_run_no_record(self, mock_capture: MagicMock) -> None:
# Create test data
Dashboard.objects.create(
@@ -371,5 +377,71 @@ def test_periodic_digest_dry_run_no_record(self, mock_capture: MagicMock) -> Non
send_all_periodic_digest_reports(dry_run=True)
# Verify no capture call and no messaging record
- mock_capture.delay.assert_not_called()
+ mock_capture.assert_not_called()
self.assertEqual(MessagingRecord.objects.count(), 0)
+
+ @freeze_time("2024-01-20T00:01:00Z")
+ @patch("posthog.tasks.periodic_digest.capture_event")
+ def test_periodic_digest_excludes_playlists_without_names_and_derived_names(self, mock_capture: MagicMock) -> None:
+ # Create test data from "last week"
+ with freeze_time("2024-01-15T00:01:00Z"):
+ # Create playlists with various name states
+ valid_playlist = SessionRecordingPlaylist.objects.create(
+ team=self.team,
+ name="Valid Playlist",
+ derived_name="Derived Playlist",
+ )
+ SessionRecordingPlaylist.objects.create(
+ team=self.team,
+ name=None, # Null name should be excluded
+ derived_name=None,
+ )
+ SessionRecordingPlaylist.objects.create(
+ team=self.team,
+ name="", # Empty string name should be excluded
+ derived_name=None,
+ )
+
+ # Run the periodic digest report task
+ send_all_periodic_digest_reports()
+
+ # Extract the playlists from the capture call
+ call_args = mock_capture.call_args
+ self.assertIsNotNone(call_args)
+ properties = call_args[1]["properties"]
+ playlists = properties["new_playlists"]
+
+ # Verify only the valid playlist is included
+ self.assertEqual(len(playlists), 1)
+ self.assertEqual(playlists[0]["name"], "Valid Playlist")
+ self.assertEqual(playlists[0]["id"], valid_playlist.short_id)
+
+ @freeze_time("2024-01-20T00:01:00Z")
+ @patch("posthog.tasks.periodic_digest.capture_event")
+ def test_periodic_digest_respects_team_notification_settings(self, mock_capture: MagicMock) -> None:
+ # Create test data
+ with freeze_time("2024-01-15T00:01:00Z"):
+ Dashboard.objects.create(
+ team=self.team,
+ name="Test Dashboard",
+ )
+
+ # Create a second user who has disabled notifications for this team
+ user_with_disabled_notifications = self._create_user("test2@posthog.com")
+ user_with_disabled_notifications.partial_notification_settings = {
+ "project_weekly_digest_disabled": {str(self.team.id): True} # Disable notifications for this team
+ }
+ user_with_disabled_notifications.save()
+
+ # Add both users to the organization
+ self.organization.members.add(user_with_disabled_notifications)
+
+ # Run the periodic digest report task
+ send_all_periodic_digest_reports()
+
+ # Verify capture_event was only called once (for the original user)
+ mock_capture.assert_called_once()
+
+ # Verify the call was for the original user and not the one with disabled notifications
+ call_args = mock_capture.call_args[1]
+ self.assertEqual(call_args["distinct_id"], str(self.user.distinct_id))
diff --git a/posthog/tasks/usage_report.py b/posthog/tasks/usage_report.py
index 6e99b069d754d..3ec1917f17826 100644
--- a/posthog/tasks/usage_report.py
+++ b/posthog/tasks/usage_report.py
@@ -3,7 +3,7 @@
from collections import Counter
from collections.abc import Sequence
from datetime import datetime
-from typing import Any, Literal, Optional, TypedDict, Union, cast
+from typing import Any, Literal, Optional, TypedDict, Union
import requests
import structlog
@@ -20,7 +20,7 @@
from posthog import version_requirement
from posthog.clickhouse.client.connection import Workload
from posthog.client import sync_execute
-from posthog.cloud_utils import get_cached_instance_license, is_cloud
+from posthog.cloud_utils import get_cached_instance_license
from posthog.constants import FlagRequestType
from posthog.logging.timing import timed_log
from posthog.models import GroupTypeMapping, OrganizationMembership, User
@@ -32,12 +32,12 @@
from posthog.models.team.team import Team
from posthog.models.utils import namedtuplefetchall
from posthog.settings import CLICKHOUSE_CLUSTER, INSTANCE_TAG
+from posthog.tasks.report_utils import capture_event
from posthog.tasks.utils import CeleryQueue
from posthog.utils import (
get_helm_info_env,
get_instance_realm,
get_instance_region,
- get_machine_id,
get_previous_day,
)
from posthog.warehouse.models import ExternalDataJob
@@ -277,26 +277,6 @@ def get_org_user_count(organization_id: str) -> int:
return OrganizationMembership.objects.filter(organization_id=organization_id).count()
-def get_org_owner_or_first_user(organization_id: str) -> Optional[User]:
- # Find the membership object for the org owner
- user = None
- membership = OrganizationMembership.objects.filter(
- organization_id=organization_id, level=OrganizationMembership.Level.OWNER
- ).first()
- if not membership:
- # If no owner membership is present, pick the first membership association we can find
- membership = OrganizationMembership.objects.filter(organization_id=organization_id).first()
- if hasattr(membership, "user"):
- membership = cast(OrganizationMembership, membership)
- user = membership.user
- else:
- capture_exception(
- Exception("No user found for org while generating report"),
- {"org": {"organization_id": organization_id}},
- )
- return user
-
-
@shared_task(**USAGE_REPORT_TASK_KWARGS, max_retries=3, rate_limit="10/s")
def send_report_to_billing_service(org_id: str, report: dict[str, Any]) -> None:
if not settings.EE_AVAILABLE:
@@ -344,67 +324,6 @@ def send_report_to_billing_service(org_id: str, report: dict[str, Any]) -> None:
raise
-def capture_event(
- *,
- pha_client: Client,
- name: str,
- organization_id: Optional[str] = None,
- team_id: Optional[int] = None,
- properties: dict[str, Any],
- timestamp: Optional[Union[datetime, str]] = None,
- send_for_all_members: bool = False,
-) -> None:
- if timestamp and isinstance(timestamp, str):
- try:
- timestamp = parser.isoparse(timestamp)
- except ValueError:
- timestamp = None
-
- if not organization_id and not team_id:
- raise ValueError("Either organization_id or team_id must be provided")
-
- if is_cloud():
- distinct_ids = []
- if send_for_all_members:
- if organization_id:
- distinct_ids = list(
- OrganizationMembership.objects.filter(organization_id=organization_id).values_list(
- "user__distinct_id", flat=True
- )
- )
- elif team_id:
- team = Team.objects.get(id=team_id)
- distinct_ids = [user.distinct_id for user in team.all_users_with_access()]
- organization_id = str(team.organization_id)
- else:
- if not organization_id:
- team = Team.objects.get(id=team_id)
- organization_id = str(team.organization_id)
- org_owner = get_org_owner_or_first_user(organization_id) if organization_id else None
- distinct_ids.append(
- org_owner.distinct_id if org_owner and org_owner.distinct_id else f"org-{organization_id}"
- )
-
- for distinct_id in distinct_ids:
- pha_client.capture(
- distinct_id,
- name,
- {**properties, "scope": "user"},
- groups={"organization": organization_id, "instance": settings.SITE_URL},
- timestamp=timestamp,
- )
- pha_client.group_identify("organization", organization_id, properties)
- else:
- pha_client.capture(
- get_machine_id(),
- name,
- {**properties, "scope": "machine"},
- groups={"instance": settings.SITE_URL},
- timestamp=timestamp,
- )
- pha_client.group_identify("instance", settings.SITE_URL, properties)
-
-
@timed_log()
@retry(tries=QUERY_RETRIES, delay=QUERY_RETRY_DELAY, backoff=QUERY_RETRY_BACKOFF)
def get_teams_with_billable_event_count_in_period(
@@ -756,7 +675,6 @@ def capture_report(
team_id=team_id,
properties=full_report_dict,
timestamp=at_date,
- send_for_all_members=send_for_all_members,
)
logger.info(f"UsageReport sent to PostHog for organization {org_id}")
except Exception as err:
@@ -769,7 +687,6 @@ def capture_report(
organization_id=org_id,
team_id=team_id,
properties={"error": str(err)},
- send_for_all_members=send_for_all_members,
)
pha_client.flush()
diff --git a/posthog/temporal/batch_exports/batch_exports.py b/posthog/temporal/batch_exports/batch_exports.py
index e9f9288688dc2..344cc3899c256 100644
--- a/posthog/temporal/batch_exports/batch_exports.py
+++ b/posthog/temporal/batch_exports/batch_exports.py
@@ -1177,7 +1177,7 @@ async def execute_batch_export_insert_activity(
_, value, unit = interval.split(" ")
kwargs = {unit: int(value)}
# TODO: Consider removing this 20 minute minimum once we are more confident about hitting 5 minute or lower SLAs.
- start_to_close_timeout = max(dt.timedelta(minutes=20), dt.timedelta(**kwargs))
+ start_to_close_timeout = max(dt.timedelta(hours=12), dt.timedelta(**kwargs))
else:
raise ValueError(f"Unsupported interval: '{interval}'")
diff --git a/posthog/temporal/batch_exports/bigquery_batch_export.py b/posthog/temporal/batch_exports/bigquery_batch_export.py
index 5aa3965b5a8bd..91b0885c932da 100644
--- a/posthog/temporal/batch_exports/bigquery_batch_export.py
+++ b/posthog/temporal/batch_exports/bigquery_batch_export.py
@@ -563,6 +563,7 @@ async def flush(
self.rows_exported_counter.add(records_since_last_flush)
self.bytes_exported_counter.add(bytes_since_last_flush)
+ self.heartbeat_details.records_completed += records_since_last_flush
self.heartbeat_details.track_done_range(last_date_range, self.data_interval_start)
@@ -639,7 +640,7 @@ async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs) -> Records
record_batch_schema = await wait_for_schema_or_producer(queue, producer_task)
if record_batch_schema is None:
- return 0
+ return details.records_completed
record_batch_schema = pa.schema(
# NOTE: For some reason, some batches set non-nullable fields as non-nullable, whereas other
@@ -716,7 +717,7 @@ async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs) -> Records
bigquery_table=bigquery_stage_table if can_perform_merge else bigquery_table,
table_schema=stage_schema if can_perform_merge else schema,
)
- records_completed = await run_consumer(
+ await run_consumer(
consumer=consumer,
queue=queue,
producer_task=producer_task,
@@ -740,7 +741,7 @@ async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs) -> Records
stage_fields_cast_to_json=json_columns,
)
- return records_completed
+ return details.records_completed
@workflow.defn(name="bigquery-export", failure_exception_types=[workflow.NondeterminismError])
diff --git a/posthog/temporal/batch_exports/heartbeat.py b/posthog/temporal/batch_exports/heartbeat.py
index fdd21d0613eee..a873507be2be9 100644
--- a/posthog/temporal/batch_exports/heartbeat.py
+++ b/posthog/temporal/batch_exports/heartbeat.py
@@ -1,14 +1,14 @@
-import typing
-import datetime as dt
import collections.abc
import dataclasses
+import datetime as dt
+import typing
import structlog
from posthog.temporal.common.heartbeat import (
+ EmptyHeartbeatError,
HeartbeatDetails,
HeartbeatParseError,
- EmptyHeartbeatError,
NotEnoughHeartbeatValuesError,
)
@@ -27,6 +27,7 @@ class BatchExportRangeHeartbeatDetails(HeartbeatDetails):
"""
done_ranges: list[DateRange] = dataclasses.field(default_factory=list)
+ records_completed: int = 0
_remaining: collections.abc.Sequence[typing.Any] = dataclasses.field(default_factory=tuple)
@classmethod
@@ -37,10 +38,11 @@ def deserialize_details(cls, details: collections.abc.Sequence[typing.Any]) -> d
values. Moreover, we expect datetime values to be ISO-formatted strings.
"""
done_ranges: list[DateRange] = []
+ records_completed = 0
remaining = super().deserialize_details(details)
if len(remaining["_remaining"]) == 0:
- return {"done_ranges": done_ranges, **remaining}
+ return {"done_ranges": done_ranges, "records_completed": records_completed, **remaining}
first_detail = remaining["_remaining"][0]
remaining["_remaining"] = remaining["_remaining"][1:]
@@ -57,7 +59,18 @@ def deserialize_details(cls, details: collections.abc.Sequence[typing.Any]) -> d
done_ranges.append(datetime_bounds)
- return {"done_ranges": done_ranges, **remaining}
+ if len(remaining["_remaining"]) == 0:
+ return {"done_ranges": done_ranges, "records_completed": records_completed, **remaining}
+
+ next_detail = remaining["_remaining"][0]
+ remaining["_remaining"] = remaining["_remaining"][1:]
+
+ try:
+ records_completed = int(next_detail)
+ except (TypeError, ValueError) as e:
+ raise HeartbeatParseError("records_completed") from e
+
+ return {"done_ranges": done_ranges, "records_completed": records_completed, **remaining}
def serialize_details(self) -> tuple[typing.Any, ...]:
"""Serialize this into a tuple.
@@ -69,7 +82,7 @@ def serialize_details(self) -> tuple[typing.Any, ...]:
(start.isoformat() if start is not None else start, end.isoformat()) for (start, end) in self.done_ranges
]
serialized_parent_details = super().serialize_details()
- return (*serialized_parent_details[:-1], serialized_done_ranges, self._remaining)
+ return (*serialized_parent_details[:-1], serialized_done_ranges, self.records_completed, self._remaining)
@property
def empty(self) -> bool:
diff --git a/posthog/temporal/batch_exports/postgres_batch_export.py b/posthog/temporal/batch_exports/postgres_batch_export.py
index d71219265d623..3c70e5b747dea 100644
--- a/posthog/temporal/batch_exports/postgres_batch_export.py
+++ b/posthog/temporal/batch_exports/postgres_batch_export.py
@@ -29,18 +29,26 @@
default_fields,
execute_batch_export_insert_activity,
get_data_interval,
- iter_model_records,
start_batch_export_run,
)
-from posthog.temporal.batch_exports.metrics import (
- get_bytes_exported_metric,
- get_rows_exported_metric,
+from posthog.temporal.batch_exports.heartbeat import (
+ BatchExportRangeHeartbeatDetails,
+ DateRange,
+ should_resume_from_activity_heartbeat,
+)
+from posthog.temporal.batch_exports.spmc import (
+ Consumer,
+ Producer,
+ RecordBatchQueue,
+ run_consumer,
+ wait_for_schema_or_producer,
+)
+from posthog.temporal.batch_exports.temporary_file import (
+ BatchExportTemporaryFile,
+ WriterFormat,
)
-from posthog.temporal.batch_exports.temporary_file import CSVBatchExportWriter
from posthog.temporal.batch_exports.utils import (
JsonType,
- apeek_first_and_rewind,
- cast_record_batch_json_columns,
make_retryable_with_exponential_backoff,
set_status_to_running_task,
)
@@ -466,6 +474,70 @@ def get_postgres_fields_from_record_schema(
return pg_schema
+@dataclasses.dataclass
+class PostgreSQLHeartbeatDetails(BatchExportRangeHeartbeatDetails):
+ """The PostgreSQL batch export details included in every heartbeat."""
+
+ pass
+
+
+class PostgreSQLConsumer(Consumer):
+ def __init__(
+ self,
+ heartbeater: Heartbeater,
+ heartbeat_details: PostgreSQLHeartbeatDetails,
+ data_interval_start: dt.datetime | str | None,
+ data_interval_end: dt.datetime | str,
+ writer_format: WriterFormat,
+ postgresql_client: PostgreSQLClient,
+ postgresql_table: str,
+ postgresql_table_schema: str,
+ postgresql_table_fields: list[str],
+ ):
+ super().__init__(
+ heartbeater=heartbeater,
+ heartbeat_details=heartbeat_details,
+ data_interval_start=data_interval_start,
+ data_interval_end=data_interval_end,
+ writer_format=writer_format,
+ )
+ self.heartbeat_details: PostgreSQLHeartbeatDetails = heartbeat_details
+ self.postgresql_table = postgresql_table
+ self.postgresql_table_schema = postgresql_table_schema
+ self.postgresql_table_fields = postgresql_table_fields
+ self.postgresql_client = postgresql_client
+
+ async def flush(
+ self,
+ batch_export_file: BatchExportTemporaryFile,
+ records_since_last_flush: int,
+ bytes_since_last_flush: int,
+ flush_counter: int,
+ last_date_range: DateRange,
+ is_last: bool,
+ error: Exception | None,
+ ):
+ await self.logger.adebug(
+ "Copying %s records of size %s bytes",
+ records_since_last_flush,
+ bytes_since_last_flush,
+ )
+
+ await self.postgresql_client.copy_tsv_to_postgres(
+ batch_export_file,
+ self.postgresql_table_schema,
+ self.postgresql_table,
+ self.postgresql_table_fields,
+ )
+
+ await self.logger.ainfo("Copied %s to PostgreSQL table '%s'", records_since_last_flush, self.postgresql_table)
+ self.rows_exported_counter.add(records_since_last_flush)
+ self.bytes_exported_counter.add(bytes_since_last_flush)
+
+ self.heartbeat_details.records_completed += records_since_last_flush
+ self.heartbeat_details.track_done_range(last_date_range, self.data_interval_start)
+
+
@activity.defn
async def insert_into_postgres_activity(inputs: PostgresInsertInputs) -> RecordsCompleted:
"""Activity streams data from ClickHouse to Postgres."""
@@ -480,35 +552,67 @@ async def insert_into_postgres_activity(inputs: PostgresInsertInputs) -> Records
)
async with (
- Heartbeater(),
+ Heartbeater() as heartbeater,
set_status_to_running_task(run_id=inputs.run_id, logger=logger),
get_client(team_id=inputs.team_id) as client,
):
if not await client.is_alive():
raise ConnectionError("Cannot establish connection to ClickHouse")
+ _, details = await should_resume_from_activity_heartbeat(activity, PostgreSQLHeartbeatDetails)
+ if details is None:
+ details = PostgreSQLHeartbeatDetails()
+
+ done_ranges: list[DateRange] = details.done_ranges
+
model: BatchExportModel | BatchExportSchema | None = None
if inputs.batch_export_schema is None and "batch_export_model" in {
field.name for field in dataclasses.fields(inputs)
}:
model = inputs.batch_export_model
+ if model is not None:
+ model_name = model.name
+ extra_query_parameters = model.schema["values"] if model.schema is not None else None
+ fields = model.schema["fields"] if model.schema is not None else None
+ else:
+ model_name = "events"
+ extra_query_parameters = None
+ fields = None
else:
model = inputs.batch_export_schema
+ model_name = "custom"
+ extra_query_parameters = model["values"] if model is not None else {}
+ fields = model["fields"] if model is not None else None
- record_batch_iterator = iter_model_records(
- client=client,
- model=model,
+ data_interval_start = (
+ dt.datetime.fromisoformat(inputs.data_interval_start) if inputs.data_interval_start else None
+ )
+ data_interval_end = dt.datetime.fromisoformat(inputs.data_interval_end)
+ full_range = (data_interval_start, data_interval_end)
+
+ queue = RecordBatchQueue(max_size_bytes=settings.BATCH_EXPORT_POSTGRES_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES)
+ producer = Producer(clickhouse_client=client)
+ producer_task = producer.start(
+ queue=queue,
+ model_name=model_name,
+ is_backfill=inputs.is_backfill,
team_id=inputs.team_id,
- interval_start=inputs.data_interval_start,
- interval_end=inputs.data_interval_end,
+ full_range=full_range,
+ done_ranges=done_ranges,
+ fields=fields,
+ destination_default_fields=postgres_default_fields(),
exclude_events=inputs.exclude_events,
include_events=inputs.include_events,
- destination_default_fields=postgres_default_fields(),
- is_backfill=inputs.is_backfill,
+ extra_query_parameters=extra_query_parameters,
+ )
+
+ record_batch_schema = await wait_for_schema_or_producer(queue, producer_task)
+ if record_batch_schema is None:
+ return details.records_completed
+
+ record_batch_schema = pa.schema(
+ [field.with_nullable(True) for field in record_batch_schema if field.name != "_inserted_at"]
)
- first_record_batch, record_batch_iterator = await apeek_first_and_rewind(record_batch_iterator)
- if first_record_batch is None:
- return 0
if model is None or (isinstance(model, BatchExportModel) and model.name == "events"):
table_fields: Fields = [
@@ -526,17 +630,13 @@ async def insert_into_postgres_activity(inputs: PostgresInsertInputs) -> Records
]
else:
- column_names = [column for column in first_record_batch.schema.names if column != "_inserted_at"]
- record_schema = first_record_batch.select(column_names).schema
table_fields = get_postgres_fields_from_record_schema(
- record_schema, known_json_columns=["properties", "set", "set_once", "person_properties"]
+ record_batch_schema,
+ known_json_columns=["properties", "set", "set_once", "person_properties"],
)
schema_columns = [field[0] for field in table_fields]
- rows_exported = get_rows_exported_metric()
- bytes_exported = get_bytes_exported_metric()
-
requires_merge = (
isinstance(inputs.batch_export_model, BatchExportModel) and inputs.batch_export_model.name == "persons"
)
@@ -564,47 +664,33 @@ async def insert_into_postgres_activity(inputs: PostgresInsertInputs) -> Records
primary_key=primary_key,
) as pg_stage_table,
):
-
- async def flush_to_postgres(
- local_results_file,
- records_since_last_flush,
- bytes_since_last_flush,
- flush_counter: int,
- last_inserted_at,
- last: bool,
- error: Exception | None,
- ):
- await logger.adebug(
- "Copying %s records of size %s bytes",
- records_since_last_flush,
- bytes_since_last_flush,
- )
-
- table = pg_stage_table if requires_merge else pg_table
- await pg_client.copy_tsv_to_postgres(
- local_results_file,
- inputs.schema,
- table,
- schema_columns,
- )
- rows_exported.add(records_since_last_flush)
- bytes_exported.add(bytes_since_last_flush)
-
- writer = CSVBatchExportWriter(
+ consumer = PostgreSQLConsumer(
+ heartbeater=heartbeater,
+ heartbeat_details=details,
+ data_interval_end=data_interval_end,
+ data_interval_start=data_interval_start,
+ writer_format=WriterFormat.CSV,
+ postgresql_client=pg_client,
+ postgresql_table=pg_stage_table if requires_merge else pg_table,
+ postgresql_table_schema=inputs.schema,
+ postgresql_table_fields=schema_columns,
+ )
+ await run_consumer(
+ consumer=consumer,
+ queue=queue,
+ producer_task=producer_task,
+ schema=record_batch_schema,
max_bytes=settings.BATCH_EXPORT_POSTGRES_UPLOAD_CHUNK_SIZE_BYTES,
- flush_callable=flush_to_postgres,
- field_names=schema_columns,
- delimiter="\t",
- quoting=csv.QUOTE_MINIMAL,
- escape_char=None,
+ json_columns=(),
+ writer_file_kwargs={
+ "delimiter": "\t",
+ "quoting": csv.QUOTE_MINIMAL,
+ "escape_char": None,
+ "field_names": schema_columns,
+ },
+ multiple_files=True,
)
- async with writer.open_temporary_file():
- async for record_batch in record_batch_iterator:
- record_batch = cast_record_batch_json_columns(record_batch, json_columns=())
-
- await writer.write_record_batch(record_batch)
-
if requires_merge:
merge_key: Fields = (
("team_id", "INT"),
@@ -619,7 +705,7 @@ async def flush_to_postgres(
merge_key=merge_key,
)
- return writer.records_total
+ return details.records_completed
@workflow.defn(name="postgres-export", failure_exception_types=[workflow.NondeterminismError])
@@ -722,6 +808,12 @@ async def run(self, inputs: PostgresBatchExportInputs):
# Raised when the database doesn't support a particular feature we use.
# Generally, we have seen this when the database is read-only.
"FeatureNotSupported",
+ # A check constraint has been violated.
+ # We do not create any ourselves, so this generally is a user-managed check, so we
+ # should not retry.
+ "CheckViolation",
+ # We do not create foreign keys, so this is a user managed check we have failed.
+ "ForeignKeyViolation",
],
finish_inputs=finish_inputs,
)
diff --git a/posthog/temporal/batch_exports/redshift_batch_export.py b/posthog/temporal/batch_exports/redshift_batch_export.py
index 827bfa684c143..7abbbc885e09f 100644
--- a/posthog/temporal/batch_exports/redshift_batch_export.py
+++ b/posthog/temporal/batch_exports/redshift_batch_export.py
@@ -306,6 +306,7 @@ async def flush(
self.rows_exported_counter.add(records_since_last_flush)
self.bytes_exported_counter.add(bytes_since_last_flush)
+ self.heartbeat_details.records_completed += records_since_last_flush
self.heartbeat_details.track_done_range(last_date_range, self.data_interval_start)
@@ -352,7 +353,7 @@ async def insert_into_redshift_activity(inputs: RedshiftInsertInputs) -> Records
async with (
Heartbeater() as heartbeater,
set_status_to_running_task(run_id=inputs.run_id, logger=logger),
- get_client(team_id=inputs.team_id, max_block_size=10) as client,
+ get_client(team_id=inputs.team_id) as client,
):
if not await client.is_alive():
raise ConnectionError("Cannot establish connection to ClickHouse")
@@ -402,10 +403,12 @@ async def insert_into_redshift_activity(inputs: RedshiftInsertInputs) -> Records
exclude_events=inputs.exclude_events,
include_events=inputs.include_events,
extra_query_parameters=extra_query_parameters,
+ max_record_batch_size_bytes=1024 * 1024 * 2, # 2MB
)
+
record_batch_schema = await wait_for_schema_or_producer(queue, producer_task)
if record_batch_schema is None:
- return 0
+ return details.records_completed
record_batch_schema = pa.schema(
[field.with_nullable(True) for field in record_batch_schema if field.name != "_inserted_at"]
@@ -473,7 +476,7 @@ async def insert_into_redshift_activity(inputs: RedshiftInsertInputs) -> Records
redshift_client=redshift_client,
redshift_table=redshift_stage_table if requires_merge else redshift_table,
)
- records_completed = await run_consumer(
+ await run_consumer(
consumer=consumer,
queue=queue,
producer_task=producer_task,
@@ -503,7 +506,7 @@ async def insert_into_redshift_activity(inputs: RedshiftInsertInputs) -> Records
merge_key=merge_key,
)
- return records_completed
+ return details.records_completed
@workflow.defn(name="redshift-export", failure_exception_types=[workflow.NondeterminismError])
diff --git a/posthog/temporal/batch_exports/s3_batch_export.py b/posthog/temporal/batch_exports/s3_batch_export.py
index 8246db39eb6ad..812ac9adc68a1 100644
--- a/posthog/temporal/batch_exports/s3_batch_export.py
+++ b/posthog/temporal/batch_exports/s3_batch_export.py
@@ -55,7 +55,10 @@
from posthog.temporal.batch_exports.utils import set_status_to_running_task
from posthog.temporal.common.clickhouse import get_client
from posthog.temporal.common.heartbeat import Heartbeater
-from posthog.temporal.common.logger import bind_temporal_worker_logger
+from posthog.temporal.common.logger import (
+ bind_temporal_worker_logger,
+ get_internal_logger,
+)
NON_RETRYABLE_ERROR_TYPES = [
# S3 parameter validation failed.
@@ -90,6 +93,38 @@
}
+@dataclasses.dataclass
+class S3InsertInputs:
+ """Inputs for S3 exports."""
+
+ # TODO: do _not_ store credentials in temporal inputs. It makes it very hard
+ # to keep track of where credentials are being stored and increases the
+ # attach surface for credential leaks.
+
+ bucket_name: str
+ region: str
+ prefix: str
+ team_id: int
+ data_interval_start: str | None
+ data_interval_end: str
+ aws_access_key_id: str | None = None
+ aws_secret_access_key: str | None = None
+ compression: str | None = None
+ exclude_events: list[str] | None = None
+ include_events: list[str] | None = None
+ encryption: str | None = None
+ kms_key_id: str | None = None
+ endpoint_url: str | None = None
+ # TODO: In Python 3.11, this could be a enum.StrEnum.
+ file_format: str = "JSONLines"
+ max_file_size_mb: int | None = None
+ run_id: str | None = None
+ is_backfill: bool = False
+ batch_export_model: BatchExportModel | None = None
+ # TODO: Remove after updating existing batch exports
+ batch_export_schema: BatchExportSchema | None = None
+
+
def get_allowed_template_variables(inputs) -> dict[str, str]:
"""Derive from inputs a dictionary of supported template variables for the S3 key prefix."""
export_datetime = dt.datetime.fromisoformat(inputs.data_interval_end)
@@ -106,7 +141,7 @@ def get_allowed_template_variables(inputs) -> dict[str, str]:
}
-def get_s3_key(inputs) -> str:
+def get_s3_key(inputs: S3InsertInputs, file_number: int = 0) -> str:
"""Return an S3 key given S3InsertInputs."""
template_variables = get_allowed_template_variables(inputs)
key_prefix = inputs.prefix.format(**template_variables)
@@ -117,6 +152,9 @@ def get_s3_key(inputs) -> str:
raise UnsupportedFileFormatError(inputs.file_format, "S3")
base_file_name = f"{inputs.data_interval_start}-{inputs.data_interval_end}"
+ # to maintain backwards compatibility with the old file naming scheme
+ if inputs.max_file_size_mb is not None:
+ base_file_name = f"{base_file_name}-{file_number}"
if inputs.compression is not None:
file_name = base_file_name + f".{file_extension}.{COMPRESSION_EXTENSIONS[inputs.compression]}"
else:
@@ -222,6 +260,8 @@ def __init__(
if self.endpoint_url == "":
raise InvalidS3EndpointError("Endpoint URL is empty.")
+ self.logger = get_internal_logger()
+
def to_state(self) -> S3MultiPartUploadState:
"""Produce state tuple that can be used to resume this S3MultiPartUpload."""
# The second predicate is trivial but required by type-checking.
@@ -279,20 +319,20 @@ async def start(self) -> str:
upload_id: str = multipart_response["UploadId"]
self.upload_id = upload_id
-
+ await self.logger.adebug("Started multipart upload for key %s with upload id %s", self.key, upload_id)
return upload_id
- def continue_from_state(self, state: S3MultiPartUploadState):
+ async def continue_from_state(self, state: S3MultiPartUploadState):
"""Continue this S3MultiPartUpload from a previous state.
This method is intended to be used with the state found in an Activity heartbeat.
"""
self.upload_id = state.upload_id
self.parts = state.parts
-
+ await self.logger.adebug("Resuming multipart upload for key %s with upload id %s", self.key, self.upload_id)
return self.upload_id
- async def complete(self) -> str:
+ async def complete(self) -> str | None:
if self.is_upload_in_progress() is False:
raise NoUploadInProgressError()
@@ -308,7 +348,7 @@ async def complete(self) -> str:
self.upload_id = None
self.parts = []
- return response["Location"]
+ return response.get("Key")
async def abort(self):
"""Abort this S3 multi-part upload."""
@@ -394,6 +434,10 @@ async def upload_part_retryable(
error_code = err.response.get("Error", {}).get("Code", None)
attempt += 1
+ await self.logger.ainfo(
+ "Caught ClientError while uploading part %s: %s", next_part_number, error_code
+ )
+
if error_code is not None and error_code == "RequestTimeout":
if attempt >= max_attempts:
raise IntermittentUploadPartTimeoutError(part_number=next_part_number) from err
@@ -429,36 +473,48 @@ class S3HeartbeatDetails(BatchExportRangeHeartbeatDetails):
Attributes:
upload_state: State to continue a S3MultiPartUpload when activity execution resumes.
+ files_uploaded: The number of files we have uploaded so far
+ (we can upload several multi-part uploads in a single activity)
"""
upload_state: S3MultiPartUploadState | None = None
+ files_uploaded: int = 0
@classmethod
def deserialize_details(cls, details: collections.abc.Sequence[typing.Any]) -> dict[str, typing.Any]:
"""Attempt to initialize HeartbeatDetails from an activity's details."""
upload_state = None
+ files_uploaded = 0
remaining = super().deserialize_details(details)
if len(remaining["_remaining"]) == 0:
- return {"upload_state": upload_state, **remaining}
+ return {"upload_state": upload_state, "files_uploaded": files_uploaded, **remaining}
first_detail = remaining["_remaining"][0]
remaining["_remaining"] = remaining["_remaining"][1:]
if first_detail is None:
- return {"upload_state": None, **remaining}
+ upload_state = None
+ else:
+ try:
+ upload_state = S3MultiPartUploadState(*first_detail)
+ except (TypeError, ValueError) as e:
+ raise HeartbeatParseError("upload_state") from e
+
+ second_detail = remaining["_remaining"][0]
+ remaining["_remaining"] = remaining["_remaining"][1:]
try:
- upload_state = S3MultiPartUploadState(*first_detail)
+ files_uploaded = int(second_detail)
except (TypeError, ValueError) as e:
- raise HeartbeatParseError("upload_state") from e
+ raise HeartbeatParseError("files_uploaded") from e
- return {"upload_state": upload_state, **remaining}
+ return {"upload_state": upload_state, "files_uploaded": files_uploaded, **remaining}
def serialize_details(self) -> tuple[typing.Any, ...]:
"""Attempt to initialize HeartbeatDetails from an activity's details."""
serialized_parent_details = super().serialize_details()
- return (*serialized_parent_details[:-1], self.upload_state, self._remaining)
+ return (*serialized_parent_details[:-1], self.upload_state, self.files_uploaded, self._remaining)
def append_upload_state(self, upload_state: S3MultiPartUploadState):
if self.upload_state is None:
@@ -469,6 +525,10 @@ def append_upload_state(self, upload_state: S3MultiPartUploadState):
if part["PartNumber"] not in current_parts:
self.upload_state.parts.append(part)
+ def mark_file_upload_as_complete(self):
+ self.files_uploaded += 1
+ self.upload_state = None
+
class S3Consumer(Consumer):
def __init__(
@@ -479,6 +539,7 @@ def __init__(
data_interval_end: dt.datetime | str,
writer_format: WriterFormat,
s3_upload: S3MultiPartUpload,
+ s3_inputs: S3InsertInputs,
):
super().__init__(
heartbeater=heartbeater,
@@ -488,7 +549,9 @@ def __init__(
writer_format=writer_format,
)
self.heartbeat_details: S3HeartbeatDetails = heartbeat_details
- self.s3_upload = s3_upload
+ self.s3_upload: S3MultiPartUpload | None = s3_upload
+ self.s3_inputs = s3_inputs
+ self.file_number = 0
async def flush(
self,
@@ -501,6 +564,8 @@ async def flush(
error: Exception | None,
):
if error is not None:
+ if not self.s3_upload:
+ return
await self.logger.adebug("Error while writing part %d", self.s3_upload.part_number + 1, exc_info=error)
await self.logger.awarning(
"An error was detected while writing part %d. Partial part will not be uploaded in case it can be retried.",
@@ -508,51 +573,46 @@ async def flush(
)
return
- await self.logger.adebug(
- "Uploading part %s containing %s records with size %s bytes",
- self.s3_upload.part_number + 1,
- records_since_last_flush,
- bytes_since_last_flush,
- )
-
- await self.s3_upload.upload_part(batch_export_file)
+ if self.s3_upload is None:
+ self.s3_upload = initialize_upload(self.s3_inputs, self.file_number)
+
+ async with self.s3_upload as s3_upload:
+ await self.logger.adebug(
+ "Uploading file number %s part %s with upload id %s containing %s records with size %s bytes",
+ self.file_number,
+ s3_upload.part_number + 1,
+ s3_upload.upload_id,
+ records_since_last_flush,
+ bytes_since_last_flush,
+ )
+ await s3_upload.upload_part(batch_export_file)
- self.rows_exported_counter.add(records_since_last_flush)
- self.bytes_exported_counter.add(bytes_since_last_flush)
+ self.rows_exported_counter.add(records_since_last_flush)
+ self.bytes_exported_counter.add(bytes_since_last_flush)
- self.heartbeat_details.track_done_range(last_date_range, self.data_interval_start)
- self.heartbeat_details.append_upload_state(self.s3_upload.to_state())
+ if is_last:
+ await self.logger.adebug(
+ "Completing multipart upload %s for file number %s", s3_upload.upload_id, self.file_number
+ )
+ await s3_upload.complete()
+ if is_last:
+ self.s3_upload = None
+ self.heartbeat_details.mark_file_upload_as_complete()
+ self.file_number += 1
+ else:
+ self.heartbeat_details.append_upload_state(self.s3_upload.to_state())
-@dataclasses.dataclass
-class S3InsertInputs:
- """Inputs for S3 exports."""
-
- # TODO: do _not_ store credentials in temporal inputs. It makes it very hard
- # to keep track of where credentials are being stored and increases the
- # attach surface for credential leaks.
+ self.heartbeat_details.records_completed += records_since_last_flush
+ self.heartbeat_details.track_done_range(last_date_range, self.data_interval_start)
- bucket_name: str
- region: str
- prefix: str
- team_id: int
- data_interval_start: str | None
- data_interval_end: str
- aws_access_key_id: str | None = None
- aws_secret_access_key: str | None = None
- compression: str | None = None
- exclude_events: list[str] | None = None
- include_events: list[str] | None = None
- encryption: str | None = None
- kms_key_id: str | None = None
- endpoint_url: str | None = None
- # TODO: In Python 3.11, this could be a enum.StrEnum.
- file_format: str = "JSONLines"
- run_id: str | None = None
- is_backfill: bool = False
- batch_export_model: BatchExportModel | None = None
- # TODO: Remove after updating existing batch exports
- batch_export_schema: BatchExportSchema | None = None
+ async def close(self):
+ if self.s3_upload is not None:
+ await self.logger.adebug(
+ "Completing multipart upload %s for file number %s", self.s3_upload.upload_id, self.file_number
+ )
+ await self.s3_upload.complete()
+ self.heartbeat_details.mark_file_upload_as_complete()
async def initialize_and_resume_multipart_upload(
@@ -561,28 +621,17 @@ async def initialize_and_resume_multipart_upload(
"""Initialize a S3MultiPartUpload and resume it from a hearbeat state if available."""
logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="S3")
- try:
- key = get_s3_key(inputs)
- except Exception as e:
- raise InvalidS3Key(e) from e
-
- s3_upload = S3MultiPartUpload(
- bucket_name=inputs.bucket_name,
- key=key,
- encryption=inputs.encryption,
- kms_key_id=inputs.kms_key_id,
- region_name=inputs.region,
- aws_access_key_id=inputs.aws_access_key_id,
- aws_secret_access_key=inputs.aws_secret_access_key,
- endpoint_url=inputs.endpoint_url or None,
- )
-
_, details = await should_resume_from_activity_heartbeat(activity, S3HeartbeatDetails)
if details is None:
details = S3HeartbeatDetails()
+ files_uploaded = details.files_uploaded or 0
+ file_number = files_uploaded
+
+ s3_upload = initialize_upload(inputs, file_number)
+
if details.upload_state:
- s3_upload.continue_from_state(details.upload_state)
+ await s3_upload.continue_from_state(details.upload_state)
if inputs.compression == "brotli":
# Even if we receive details we cannot resume a brotli compressed upload as
@@ -598,6 +647,26 @@ async def initialize_and_resume_multipart_upload(
return s3_upload, details
+def initialize_upload(inputs: S3InsertInputs, file_number: int) -> S3MultiPartUpload:
+ """Initialize a S3MultiPartUpload."""
+
+ try:
+ key = get_s3_key(inputs, file_number)
+ except Exception as e:
+ raise InvalidS3Key(e) from e
+
+ return S3MultiPartUpload(
+ bucket_name=inputs.bucket_name,
+ key=key,
+ encryption=inputs.encryption,
+ kms_key_id=inputs.kms_key_id,
+ region_name=inputs.region,
+ aws_access_key_id=inputs.aws_access_key_id,
+ aws_secret_access_key=inputs.aws_secret_access_key,
+ endpoint_url=inputs.endpoint_url or None,
+ )
+
+
def s3_default_fields() -> list[BatchExportField]:
"""Default fields for an S3 batch export.
@@ -619,7 +688,8 @@ def s3_default_fields() -> list[BatchExportField]:
async def insert_into_s3_activity(inputs: S3InsertInputs) -> RecordsCompleted:
"""Activity to batch export data from PostHog's ClickHouse to S3.
- It currently only creates a single file per run, and uploads as a multipart upload.
+ It will upload multiple files if the max_file_size_mb is set, otherwise it
+ will upload a single file. File uploads are done using multipart upload.
TODO: this implementation currently tries to export as one run, but it could
be a very big date range and time consuming, better to split into multiple
@@ -700,12 +770,13 @@ async def insert_into_s3_activity(inputs: S3InsertInputs) -> RecordsCompleted:
exclude_events=inputs.exclude_events,
include_events=inputs.include_events,
extra_query_parameters=extra_query_parameters,
+ max_record_batch_size_bytes=1024 * 1024 * 10, # 10MB
+ use_latest_schema=True,
)
- records_completed = 0
record_batch_schema = await wait_for_schema_or_producer(queue, producer_task)
if record_batch_schema is None:
- return records_completed
+ return details.records_completed
record_batch_schema = pa.schema(
# NOTE: For some reason, some batches set non-nullable fields as non-nullable, whereas other
@@ -716,28 +787,27 @@ async def insert_into_s3_activity(inputs: S3InsertInputs) -> RecordsCompleted:
[field.with_nullable(True) for field in record_batch_schema]
)
- async with s3_upload as s3_upload:
- consumer = S3Consumer(
- heartbeater=heartbeater,
- heartbeat_details=details,
- data_interval_end=data_interval_end,
- data_interval_start=data_interval_start,
- writer_format=WriterFormat.from_str(inputs.file_format, "S3"),
- s3_upload=s3_upload,
- )
- records_completed = await run_consumer(
- consumer=consumer,
- queue=queue,
- producer_task=producer_task,
- schema=record_batch_schema,
- max_bytes=settings.BATCH_EXPORT_S3_UPLOAD_CHUNK_SIZE_BYTES,
- include_inserted_at=True,
- writer_file_kwargs={"compression": inputs.compression},
- )
-
- await s3_upload.complete()
+ consumer = S3Consumer(
+ heartbeater=heartbeater,
+ heartbeat_details=details,
+ data_interval_end=data_interval_end,
+ data_interval_start=data_interval_start,
+ writer_format=WriterFormat.from_str(inputs.file_format, "S3"),
+ s3_upload=s3_upload,
+ s3_inputs=inputs,
+ )
+ await run_consumer(
+ consumer=consumer,
+ queue=queue,
+ producer_task=producer_task,
+ schema=record_batch_schema,
+ max_bytes=settings.BATCH_EXPORT_S3_UPLOAD_CHUNK_SIZE_BYTES,
+ include_inserted_at=True,
+ writer_file_kwargs={"compression": inputs.compression},
+ max_file_size_bytes=inputs.max_file_size_mb * 1024 * 1024 if inputs.max_file_size_mb else 0,
+ )
- return records_completed
+ return details.records_completed
@workflow.defn(name="s3-export", failure_exception_types=[workflow.NondeterminismError])
@@ -805,6 +875,7 @@ async def run(self, inputs: S3BatchExportInputs):
encryption=inputs.encryption,
kms_key_id=inputs.kms_key_id,
file_format=inputs.file_format,
+ max_file_size_mb=inputs.max_file_size_mb,
run_id=run_id,
is_backfill=inputs.is_backfill,
batch_export_model=inputs.batch_export_model,
diff --git a/posthog/temporal/batch_exports/snowflake_batch_export.py b/posthog/temporal/batch_exports/snowflake_batch_export.py
index 8887fcd52a317..ebdea7749e014 100644
--- a/posthog/temporal/batch_exports/snowflake_batch_export.py
+++ b/posthog/temporal/batch_exports/snowflake_batch_export.py
@@ -12,7 +12,7 @@
import snowflake.connector
from django.conf import settings
from snowflake.connector.connection import SnowflakeConnection
-from snowflake.connector.errors import OperationalError, InterfaceError
+from snowflake.connector.errors import InterfaceError, OperationalError
from temporalio import activity, workflow
from temporalio.common import RetryPolicy
@@ -31,32 +31,43 @@
default_fields,
execute_batch_export_insert_activity,
get_data_interval,
- iter_model_records,
start_batch_export_run,
)
-from posthog.temporal.batch_exports.metrics import (
- get_bytes_exported_metric,
- get_rows_exported_metric,
+from posthog.temporal.batch_exports.heartbeat import (
+ BatchExportRangeHeartbeatDetails,
+ DateRange,
+ should_resume_from_activity_heartbeat,
+)
+from posthog.temporal.batch_exports.spmc import (
+ Consumer,
+ Producer,
+ RecordBatchQueue,
+ run_consumer,
+ wait_for_schema_or_producer,
)
from posthog.temporal.batch_exports.temporary_file import (
BatchExportTemporaryFile,
- JSONLBatchExportWriter,
+ WriterFormat,
)
from posthog.temporal.batch_exports.utils import (
JsonType,
- apeek_first_and_rewind,
- cast_record_batch_json_columns,
set_status_to_running_task,
)
from posthog.temporal.common.clickhouse import get_client
from posthog.temporal.common.heartbeat import Heartbeater
from posthog.temporal.common.logger import bind_temporal_worker_logger
-from posthog.temporal.batch_exports.heartbeat import (
- BatchExportRangeHeartbeatDetails,
- DateRange,
- HeartbeatParseError,
- should_resume_from_activity_heartbeat,
-)
+
+NON_RETRYABLE_ERROR_TYPES = [
+ # Raised when we cannot connect to Snowflake.
+ "DatabaseError",
+ # Raised by Snowflake when a query cannot be compiled.
+ # Usually this means we don't have table permissions or something doesn't exist (db, schema).
+ "ProgrammingError",
+ # Raised by Snowflake with an incorrect account name.
+ "ForbiddenError",
+ # Our own exception when we can't connect to Snowflake, usually due to invalid parameters.
+ "SnowflakeConnectionError",
+]
class SnowflakeFileNotUploadedError(Exception):
@@ -91,37 +102,9 @@ class SnowflakeRetryableConnectionError(Exception):
@dataclasses.dataclass
class SnowflakeHeartbeatDetails(BatchExportRangeHeartbeatDetails):
- """The Snowflake batch export details included in every heartbeat.
-
- Attributes:
- file_no: The file number of the last file we managed to upload.
- """
-
- file_no: int = 0
-
- @classmethod
- def deserialize_details(cls, details: collections.abc.Sequence[typing.Any]) -> dict[str, typing.Any]:
- """Attempt to initialize HeartbeatDetails from an activity's details."""
- file_no = 0
- remaining = super().deserialize_details(details)
-
- if len(remaining["_remaining"]) == 0:
- return {"file_no": 0, **remaining}
-
- first_detail = remaining["_remaining"][0]
- remaining["_remaining"] = remaining["_remaining"][1:]
-
- try:
- file_no = int(first_detail)
- except (TypeError, ValueError) as e:
- raise HeartbeatParseError("file_no") from e
-
- return {"file_no": file_no, **remaining}
+ """The Snowflake batch export details included in every heartbeat."""
- def serialize_details(self) -> tuple[typing.Any, ...]:
- """Attempt to initialize HeartbeatDetails from an activity's details."""
- serialized_parent_details = super().serialize_details()
- return (*serialized_parent_details[:-1], self.file_no, self._remaining)
+ pass
@dataclasses.dataclass
@@ -344,7 +327,6 @@ async def put_file_to_snowflake_table(
file: BatchExportTemporaryFile,
table_stage_prefix: str,
table_name: str,
- file_no: int,
):
"""Executes a PUT query using the provided cursor to the provided table_name.
@@ -352,14 +334,9 @@ async def put_file_to_snowflake_table(
call to run_in_executor: Since execute ends up boiling down to blocking IO (HTTP request),
the event loop should not be locked up.
- We add a file_no to the file_name when executing PUT as Snowflake will reject any files with the same
- name. Since batch exports re-use the same file, our name does not change, but we don't want Snowflake
- to reject or overwrite our new data.
-
Args:
file: The name of the local file to PUT.
table_name: The name of the Snowflake table where to PUT the file.
- file_no: An int to identify which file number this is.
Raises:
TypeError: If we don't get a tuple back from Snowflake (should never happen).
@@ -371,7 +348,7 @@ async def put_file_to_snowflake_table(
# So we ask mypy to be nice with us.
reader = io.BufferedReader(file) # type: ignore
query = f"""
- PUT file://{file.name}_{file_no}.jsonl '@%"{table_name}"/{table_stage_prefix}'
+ PUT file://{file.name} '@%"{table_name}"/{table_stage_prefix}'
"""
with self.connection.cursor() as cursor:
@@ -518,6 +495,61 @@ def snowflake_default_fields() -> list[BatchExportField]:
return batch_export_fields
+class SnowflakeConsumer(Consumer):
+ def __init__(
+ self,
+ heartbeater: Heartbeater,
+ heartbeat_details: SnowflakeHeartbeatDetails,
+ data_interval_start: dt.datetime | str | None,
+ data_interval_end: dt.datetime | str,
+ writer_format: WriterFormat,
+ snowflake_client: SnowflakeClient,
+ snowflake_table: str,
+ snowflake_table_stage_prefix: str,
+ ):
+ super().__init__(
+ heartbeater=heartbeater,
+ heartbeat_details=heartbeat_details,
+ data_interval_start=data_interval_start,
+ data_interval_end=data_interval_end,
+ writer_format=writer_format,
+ )
+ self.heartbeat_details: SnowflakeHeartbeatDetails = heartbeat_details
+ self.snowflake_table = snowflake_table
+ self.snowflake_client = snowflake_client
+ self.snowflake_table_stage_prefix = snowflake_table_stage_prefix
+
+ async def flush(
+ self,
+ batch_export_file: BatchExportTemporaryFile,
+ records_since_last_flush: int,
+ bytes_since_last_flush: int,
+ flush_counter: int,
+ last_date_range: DateRange,
+ is_last: bool,
+ error: Exception | None,
+ ):
+ await self.logger.ainfo(
+ "Putting file %s containing %s records with size %s bytes",
+ flush_counter,
+ records_since_last_flush,
+ bytes_since_last_flush,
+ )
+
+ await self.snowflake_client.put_file_to_snowflake_table(
+ batch_export_file,
+ self.snowflake_table_stage_prefix,
+ self.snowflake_table,
+ )
+
+ await self.logger.adebug("Loaded %s to Snowflake table '%s'", records_since_last_flush, self.snowflake_table)
+ self.rows_exported_counter.add(records_since_last_flush)
+ self.bytes_exported_counter.add(bytes_since_last_flush)
+
+ self.heartbeat_details.records_completed += records_since_last_flush
+ self.heartbeat_details.track_done_range(last_date_range, self.data_interval_start)
+
+
def get_snowflake_fields_from_record_schema(
record_schema: pa.Schema, known_variant_columns: list[str]
) -> list[SnowflakeField]:
@@ -594,42 +626,63 @@ async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs) -> Recor
details = SnowflakeHeartbeatDetails()
done_ranges: list[DateRange] = details.done_ranges
- if done_ranges:
- data_interval_start: str | None = done_ranges[-1][1].isoformat()
- else:
- data_interval_start = inputs.data_interval_start
-
- current_flush_counter = details.file_no
-
- rows_exported = get_rows_exported_metric()
- bytes_exported = get_bytes_exported_metric()
model: BatchExportModel | BatchExportSchema | None = None
if inputs.batch_export_schema is None and "batch_export_model" in {
field.name for field in dataclasses.fields(inputs)
}:
model = inputs.batch_export_model
+ if model is not None:
+ model_name = model.name
+ extra_query_parameters = model.schema["values"] if model.schema is not None else None
+ fields = model.schema["fields"] if model.schema is not None else None
+ else:
+ model_name = "events"
+ extra_query_parameters = None
+ fields = None
else:
model = inputs.batch_export_schema
+ model_name = "custom"
+ extra_query_parameters = model["values"] if model is not None else {}
+ fields = model["fields"] if model is not None else None
- records_iterator = iter_model_records(
- client=client,
- model=model,
+ data_interval_start = (
+ dt.datetime.fromisoformat(inputs.data_interval_start) if inputs.data_interval_start else None
+ )
+ data_interval_end = dt.datetime.fromisoformat(inputs.data_interval_end)
+ full_range = (data_interval_start, data_interval_end)
+
+ queue = RecordBatchQueue(max_size_bytes=settings.BATCH_EXPORT_SNOWFLAKE_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES)
+ producer = Producer(clickhouse_client=client)
+ producer_task = producer.start(
+ queue=queue,
+ model_name=model_name,
+ is_backfill=inputs.is_backfill,
team_id=inputs.team_id,
- interval_start=data_interval_start,
- interval_end=inputs.data_interval_end,
+ full_range=full_range,
+ done_ranges=done_ranges,
+ fields=fields,
+ destination_default_fields=snowflake_default_fields(),
exclude_events=inputs.exclude_events,
include_events=inputs.include_events,
- destination_default_fields=snowflake_default_fields(),
- is_backfill=inputs.is_backfill,
+ extra_query_parameters=extra_query_parameters,
+ )
+ records_completed = 0
+
+ record_batch_schema = await wait_for_schema_or_producer(queue, producer_task)
+ if record_batch_schema is None:
+ return records_completed
+
+ record_batch_schema = pa.schema(
+ # NOTE: For some reason, some batches set non-nullable fields as non-nullable, whereas other
+ # record batches have them as nullable.
+ # Until we figure it out, we set all fields to nullable. There are some fields we know
+ # are not nullable, but I'm opting for the more flexible option until we out why schemas differ
+ # between batches.
+ [field.with_nullable(True) for field in record_batch_schema if field.name != "_inserted_at"]
)
- first_record_batch, records_iterator = await apeek_first_and_rewind(records_iterator)
-
- if first_record_batch is None:
- return 0
known_variant_columns = ["properties", "people_set", "people_set_once", "person_properties"]
- first_record_batch = cast_record_batch_json_columns(first_record_batch, json_columns=known_variant_columns)
if model is None or (isinstance(model, BatchExportModel) and model.name == "events"):
table_fields = [
@@ -647,10 +700,8 @@ async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs) -> Recor
]
else:
- column_names = [column for column in first_record_batch.schema.names if column != "_inserted_at"]
- record_schema = first_record_batch.select(column_names).schema
table_fields = get_snowflake_fields_from_record_schema(
- record_schema,
+ record_batch_schema,
known_variant_columns=known_variant_columns,
)
@@ -671,57 +722,30 @@ async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs) -> Recor
stagle_table_name, data_interval_end_str, table_fields, create=requires_merge, delete=requires_merge
) as snow_stage_table,
):
- record_columns = [field[0] for field in table_fields]
- record_schema = pa.schema(
- [field.with_nullable(True) for field in first_record_batch.select(record_columns).schema]
+ consumer = SnowflakeConsumer(
+ heartbeater=heartbeater,
+ heartbeat_details=details,
+ data_interval_end=data_interval_end,
+ data_interval_start=data_interval_start,
+ writer_format=WriterFormat.JSONL,
+ snowflake_client=snow_client,
+ snowflake_table=snow_stage_table if requires_merge else snow_table,
+ snowflake_table_stage_prefix=data_interval_end_str,
)
-
- async def flush_to_snowflake(
- local_results_file,
- records_since_last_flush,
- bytes_since_last_flush,
- flush_counter: int,
- last_date_range: DateRange,
- last: bool,
- error: Exception | None,
- ):
- logger.info(
- "Putting %sfile %s containing %s records with size %s bytes",
- "last " if last else "",
- flush_counter,
- records_since_last_flush,
- bytes_since_last_flush,
- )
-
- table = snow_stage_table if requires_merge else snow_table
-
- await snow_client.put_file_to_snowflake_table(
- local_results_file, data_interval_end_str, table, flush_counter
- )
- rows_exported.add(records_since_last_flush)
- bytes_exported.add(bytes_since_last_flush)
-
- details.track_done_range(last_date_range, data_interval_start)
- details.file_no = flush_counter
- heartbeater.set_from_heartbeat_details(details)
-
- writer = JSONLBatchExportWriter(
+ records_completed = await run_consumer(
+ consumer=consumer,
+ queue=queue,
+ producer_task=producer_task,
+ schema=record_batch_schema,
max_bytes=settings.BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES,
- flush_callable=flush_to_snowflake,
+ json_columns=known_variant_columns,
+ multiple_files=True,
)
- async with writer.open_temporary_file(current_flush_counter):
- async for record_batch in records_iterator:
- record_batch = cast_record_batch_json_columns(record_batch, json_columns=known_variant_columns)
-
- await writer.write_record_batch(record_batch)
-
- details.complete_done_ranges(inputs.data_interval_end)
- heartbeater.set_from_heartbeat_details(details)
-
await snow_client.copy_loaded_files_to_snowflake_table(
snow_stage_table if requires_merge else snow_table, data_interval_end_str
)
+
if requires_merge:
merge_key = (
("team_id", "INT64"),
@@ -734,7 +758,7 @@ async def flush_to_snowflake(
merge_key=merge_key,
)
- return writer.records_total
+ return records_completed
@workflow.defn(name="snowflake-export", failure_exception_types=[workflow.NondeterminismError])
@@ -811,16 +835,6 @@ async def run(self, inputs: SnowflakeBatchExportInputs):
insert_into_snowflake_activity,
insert_inputs,
interval=inputs.interval,
- non_retryable_error_types=[
- # Raised when we cannot connect to Snowflake.
- "DatabaseError",
- # Raised by Snowflake when a query cannot be compiled.
- # Usually this means we don't have table permissions or something doesn't exist (db, schema).
- "ProgrammingError",
- # Raised by Snowflake with an incorrect account name.
- "ForbiddenError",
- # Our own exception when we can't connect to Snowflake, usually due to invalid parameters.
- "SnowflakeConnectionError",
- ],
+ non_retryable_error_types=NON_RETRYABLE_ERROR_TYPES,
finish_inputs=finish_inputs,
)
diff --git a/posthog/temporal/batch_exports/spmc.py b/posthog/temporal/batch_exports/spmc.py
index f2388a4ed5fa1..d4f8156406bb3 100644
--- a/posthog/temporal/batch_exports/spmc.py
+++ b/posthog/temporal/batch_exports/spmc.py
@@ -7,7 +7,6 @@
import uuid
import pyarrow as pa
-import structlog
import temporalio.common
from django.conf import settings
@@ -42,8 +41,7 @@
)
from posthog.temporal.common.clickhouse import ClickHouseClient
from posthog.temporal.common.heartbeat import Heartbeater
-
-logger = structlog.get_logger()
+from posthog.temporal.common.logger import get_internal_logger
class RecordBatchQueue(asyncio.Queue):
@@ -123,6 +121,7 @@ async def raise_on_task_failure(task: asyncio.Task) -> None:
return
exc = task.exception()
+ logger = get_internal_logger()
await logger.aexception("%s task failed", task.get_name(), exc_info=exc)
raise RecordBatchTaskError() from exc
@@ -187,7 +186,7 @@ def __init__(
self.data_interval_start = data_interval_start
self.data_interval_end = data_interval_end
self.writer_format = writer_format
- self.logger = logger
+ self.logger = get_internal_logger()
@property
def rows_exported_counter(self) -> temporalio.common.MetricCounter:
@@ -209,6 +208,7 @@ def create_consumer_task(
multiple_files: bool = False,
include_inserted_at: bool = False,
task_name: str = "record_batch_consumer",
+ max_file_size_bytes: int = 0,
**kwargs,
) -> asyncio.Task:
"""Create a record batch consumer task."""
@@ -221,6 +221,7 @@ def create_consumer_task(
json_columns=json_columns,
multiple_files=multiple_files,
include_inserted_at=include_inserted_at,
+ max_file_size_bytes=max_file_size_bytes,
**kwargs,
),
name=task_name,
@@ -264,6 +265,7 @@ async def start(
json_columns: collections.abc.Sequence[str],
multiple_files: bool = False,
include_inserted_at: bool = False,
+ max_file_size_bytes: int = 0,
**kwargs,
) -> int:
"""Start consuming record batches from queue.
@@ -281,7 +283,14 @@ async def start(
Total number of records in all consumed record batches.
"""
schema = cast_record_batch_schema_json_columns(schema, json_columns=json_columns)
- writer = get_batch_export_writer(self.writer_format, self.flush, schema=schema, max_bytes=max_bytes, **kwargs)
+ writer = get_batch_export_writer(
+ self.writer_format,
+ self.flush,
+ schema=schema,
+ max_bytes=max_bytes,
+ max_file_size_bytes=max_file_size_bytes,
+ **kwargs,
+ )
record_batches_count = 0
record_batches_count_total = 0
@@ -298,16 +307,15 @@ async def start(
await writer.write_record_batch(record_batch, flush=False, include_inserted_at=include_inserted_at)
- if writer.should_flush():
+ if writer.should_flush() or writer.should_hard_flush():
await self.logger.adebug(
"Flushing %s records from %s record batches", writer.records_since_last_flush, record_batches_count
)
records_count += writer.records_since_last_flush
- if multiple_files:
- await writer.close_temporary_file()
- writer._batch_export_file = await asyncio.to_thread(writer.create_temporary_file)
+ if multiple_files or writer.should_hard_flush():
+ await writer.hard_flush()
else:
await writer.flush()
@@ -315,6 +323,8 @@ async def start(
queue.task_done()
record_batches_count = 0
+ self.heartbeater.set_from_heartbeat_details(self.heartbeat_details)
+
records_count += writer.records_since_last_flush
await self.logger.adebug(
@@ -324,10 +334,15 @@ async def start(
)
await writer.close_temporary_file()
+ await self.close()
self.heartbeater.set_from_heartbeat_details(self.heartbeat_details)
return records_count
+ async def close(self):
+ """This method can be overridden by subclasses to perform any additional cleanup."""
+ pass
+
async def generate_record_batches_from_queue(
self,
queue: RecordBatchQueue,
@@ -379,6 +394,7 @@ async def run_consumer(
multiple_files: bool = False,
writer_file_kwargs: collections.abc.Mapping[str, typing.Any] | None = None,
include_inserted_at: bool = False,
+ max_file_size_bytes: int = 0,
**kwargs,
) -> int:
"""Run one record batch consumer.
@@ -418,7 +434,7 @@ def consumer_done_callback(task: asyncio.Task):
consumer_tasks_pending.remove(task)
consumer_tasks_done.add(task)
- await logger.adebug("Starting record batch consumer")
+ await consumer.logger.adebug("Starting record batch consumer")
consumer_task = consumer.create_consumer_task(
queue=queue,
@@ -428,6 +444,7 @@ def consumer_done_callback(task: asyncio.Task):
json_columns=json_columns,
multiple_files=multiple_files,
include_inserted_at=include_inserted_at,
+ max_file_size_bytes=max_file_size_bytes,
**writer_file_kwargs or {},
)
consumer_tasks_pending.add(consumer_task)
@@ -442,7 +459,7 @@ def consumer_done_callback(task: asyncio.Task):
raise consumer_task_exception
await raise_on_task_failure(producer_task)
- await logger.adebug("Successfully finished record batch consumer")
+ await consumer.logger.adebug("Successfully finished record batch consumer")
consumer.complete_heartbeat()
@@ -491,6 +508,7 @@ class Producer:
def __init__(self, clickhouse_client: ClickHouseClient):
self.clickhouse_client = clickhouse_client
self._task: asyncio.Task | None = None
+ self.logger = get_internal_logger()
@property
def task(self) -> asyncio.Task:
@@ -509,6 +527,8 @@ def start(
fields: list[BatchExportField] | None = None,
destination_default_fields: list[BatchExportField] | None = None,
use_latest_schema: bool = False,
+ max_record_batch_size_bytes: int = 0,
+ min_records_per_batch: int = 100,
**parameters,
) -> asyncio.Task:
if fields is None:
@@ -575,7 +595,13 @@ def start(
self._task = asyncio.create_task(
self.produce_batch_export_record_batches_from_range(
- query=query, full_range=full_range, done_ranges=done_ranges, queue=queue, query_parameters=parameters
+ query=query,
+ full_range=full_range,
+ done_ranges=done_ranges,
+ queue=queue,
+ query_parameters=parameters,
+ max_record_batch_size_bytes=max_record_batch_size_bytes,
+ min_records_per_batch=min_records_per_batch,
),
name="record_batch_producer",
)
@@ -589,16 +615,84 @@ async def produce_batch_export_record_batches_from_range(
done_ranges: collections.abc.Sequence[tuple[dt.datetime, dt.datetime]],
queue: RecordBatchQueue,
query_parameters: dict[str, typing.Any],
+ max_record_batch_size_bytes: int = 0,
+ min_records_per_batch: int = 100,
):
+ """Produce Arrow record batches for a given date range into `queue`.
+
+ Arguments:
+ query: The ClickHouse query used to obtain record batches. The query should be have a
+ `FORMAT ArrowStream` clause, although we do not enforce this.
+ full_range: The full date range of record batches to produce.
+ done_ranges: Date ranges of record batches that have already been exported, and thus
+ should be skipped.
+ queue: The queue where to produce record batches.
+ query_parameters: Additional query parameters.
+ max_record_batch_size_bytes: The max size in bytes of a record batch to insert in `queue`.
+ If a record batch is larger than this, `slice_record_batch` will be used to slice it
+ into smaller record batches.
+ min_records_batch_per_batch: If slicing a record batch, each slice should contain at least
+ this number of records.
+ """
for interval_start, interval_end in generate_query_ranges(full_range, done_ranges):
if interval_start is not None:
query_parameters["interval_start"] = interval_start.strftime("%Y-%m-%d %H:%M:%S.%f")
query_parameters["interval_end"] = interval_end.strftime("%Y-%m-%d %H:%M:%S.%f")
query_id = uuid.uuid4()
- await self.clickhouse_client.aproduce_query_as_arrow_record_batches(
- query, queue=queue, query_parameters=query_parameters, query_id=str(query_id)
- )
+ try:
+ async for record_batch in self.clickhouse_client.astream_query_as_arrow(
+ query, query_parameters=query_parameters, query_id=str(query_id)
+ ):
+ for record_batch_slice in slice_record_batch(
+ record_batch, max_record_batch_size_bytes, min_records_per_batch
+ ):
+ await queue.put(record_batch_slice)
+
+ except Exception as e:
+ await self.logger.aexception("Unexpected error occurred while producing record batches", exc_info=e)
+ raise
+
+
+def slice_record_batch(
+ record_batch: pa.RecordBatch, max_record_batch_size_bytes: int = 0, min_records_per_batch: int = 100
+) -> typing.Iterator[pa.RecordBatch]:
+ """Slice a large Arrow record batch into one or more record batches.
+
+ The underlying call to `pa.RecordBatch.slice` is a zero-copy operation, so the
+ memory footprint of slicing is very low, beyond some additional metadata
+ required for the slice.
+
+ Arguments:
+ record_batch: The record batch to slice.
+ max_record_batch_size_bytes: The max size in bytes of a record batch to
+ yield. If the provided `record_batch` is larger than this, then it
+ will be sliced into multiple record batches.
+ min_records_batch_per_batch: Each slice yielded should contain at least
+ this number of records.
+ """
+ total_rows = record_batch.num_rows
+ yielded_rows = 0
+ offset = 0
+ length = total_rows
+
+ if max_record_batch_size_bytes <= 0 or max_record_batch_size_bytes > record_batch.nbytes:
+ yield record_batch
+ return
+
+ while yielded_rows < total_rows:
+ sliced_record_batch = record_batch.slice(offset=offset, length=length)
+ current_rows = sliced_record_batch.num_rows
+
+ if max_record_batch_size_bytes < sliced_record_batch.nbytes and min_records_per_batch < current_rows:
+ length -= 1
+ continue
+
+ yield sliced_record_batch
+
+ yielded_rows += current_rows
+ offset = offset + length
+ length = total_rows - yielded_rows
def generate_query_ranges(
diff --git a/posthog/temporal/batch_exports/temporary_file.py b/posthog/temporal/batch_exports/temporary_file.py
index de04733eb80c1..bafda0173eaf2 100644
--- a/posthog/temporal/batch_exports/temporary_file.py
+++ b/posthog/temporal/batch_exports/temporary_file.py
@@ -291,6 +291,10 @@ class BatchExportWriter(abc.ABC):
upon reaching or surpassing this threshold. Keep in mind we write on a RecordBatch
per RecordBatch basis, which means the threshold will be surpassed by at most the
size of a RecordBatch before a flush occurs.
+ max_file_size_bytes: Flush the temporary file with the provided `flush_callable`
+ upon reaching or surpassing this threshold. This results in a 'hard flush' of the
+ temporary file, which means the file will be closed and a new one will be created.
+ If set to 0, this will be ignored.
flush_callable: A callback to flush the temporary file when `max_bytes` is reached.
The temporary file will be reset after calling `flush_callable`. When calling
`flush_callable` the following positional arguments will be passed: The temporary file
@@ -311,10 +315,12 @@ def __init__(
self,
flush_callable: FlushCallable,
max_bytes: int,
+ max_file_size_bytes: int = 0,
file_kwargs: collections.abc.Mapping[str, typing.Any] | None = None,
):
self.flush_callable = flush_callable
self.max_bytes = max_bytes
+ self.max_file_size_bytes = max_file_size_bytes
self.file_kwargs: collections.abc.Mapping[str, typing.Any] = file_kwargs or {}
self._batch_export_file: BatchExportTemporaryFile | None = None
@@ -447,6 +453,9 @@ async def write_record_batch(
def should_flush(self) -> bool:
return self.bytes_since_last_flush >= self.max_bytes
+ def should_hard_flush(self) -> bool:
+ return self.max_file_size_bytes > 0 and self.bytes_total >= self.max_file_size_bytes
+
async def flush(self, is_last: bool = False) -> None:
"""Call the provided `flush_callable` and reset underlying file.
@@ -477,6 +486,15 @@ async def flush(self, is_last: bool = False) -> None:
self.start_at_since_last_flush = None
self.end_at_since_last_flush = None
+ async def hard_flush(self):
+ """Flush the underlying file by closing the temporary file and creating a new one.
+
+ This is useful is we want to write a whole file, rather than flushing a
+ part of it for example.
+ """
+ await self.close_temporary_file()
+ self._batch_export_file = await asyncio.to_thread(self.create_temporary_file)
+
class WriterFormat(enum.StrEnum):
JSONL = enum.auto()
@@ -499,12 +517,15 @@ def from_str(format_str: str, destination: str):
raise UnsupportedFileFormatError(format_str, destination)
-def get_batch_export_writer(writer_format: WriterFormat, flush_callable: FlushCallable, max_bytes: int, **kwargs):
+def get_batch_export_writer(
+ writer_format: WriterFormat, flush_callable: FlushCallable, max_bytes: int, max_file_size_bytes: int = 0, **kwargs
+):
match writer_format:
case WriterFormat.CSV:
return CSVBatchExportWriter(
max_bytes=max_bytes,
flush_callable=flush_callable,
+ max_file_size_bytes=max_file_size_bytes,
**kwargs,
)
@@ -512,6 +533,7 @@ def get_batch_export_writer(writer_format: WriterFormat, flush_callable: FlushCa
return JSONLBatchExportWriter(
max_bytes=max_bytes,
flush_callable=flush_callable,
+ max_file_size_bytes=max_file_size_bytes,
**kwargs,
)
@@ -519,6 +541,7 @@ def get_batch_export_writer(writer_format: WriterFormat, flush_callable: FlushCa
return ParquetBatchExportWriter(
max_bytes=max_bytes,
flush_callable=flush_callable,
+ max_file_size_bytes=max_file_size_bytes,
**kwargs,
)
@@ -545,11 +568,13 @@ def __init__(
schema: pa.Schema | None = None,
compression: None | str = None,
default: typing.Callable = str,
+ max_file_size_bytes: int = 0,
):
super().__init__(
max_bytes=max_bytes,
flush_callable=flush_callable,
file_kwargs={"compression": compression},
+ max_file_size_bytes=max_file_size_bytes,
)
self.default = default
@@ -622,11 +647,13 @@ def __init__(
line_terminator: str = "\n",
quoting=csv.QUOTE_NONE,
compression: str | None = None,
+ max_file_size_bytes: int = 0,
):
super().__init__(
max_bytes=max_bytes,
flush_callable=flush_callable,
file_kwargs={"compression": compression},
+ max_file_size_bytes=max_file_size_bytes,
)
self.field_names = field_names
self.extras_action: typing.Literal["raise", "ignore"] = extras_action
@@ -684,16 +711,17 @@ def __init__(
schema: pa.Schema,
compression: str | None = "snappy",
compression_level: int | None = None,
+ max_file_size_bytes: int = 0,
):
super().__init__(
max_bytes=max_bytes,
flush_callable=flush_callable,
file_kwargs={"compression": None}, # ParquetWriter handles compression
+ max_file_size_bytes=max_file_size_bytes,
)
self.schema = schema
self.compression = compression
self.compression_level = compression_level
-
self._parquet_writer: pq.ParquetWriter | None = None
@property
diff --git a/posthog/temporal/common/logger.py b/posthog/temporal/common/logger.py
index db3767848770a..0b25c072fe31c 100644
--- a/posthog/temporal/common/logger.py
+++ b/posthog/temporal/common/logger.py
@@ -1,18 +1,18 @@
import asyncio
-from contextvars import copy_context
import json
import logging
+import queue as sync_queue
import ssl
import threading
import uuid
-from kafka import KafkaProducer
-import queue as sync_queue
+from contextvars import copy_context
import aiokafka
import structlog
import temporalio.activity
import temporalio.workflow
from django.conf import settings
+from kafka import KafkaProducer
from structlog.processors import EventRenamer
from structlog.typing import FilteringBoundLogger
@@ -21,6 +21,18 @@
BACKGROUND_LOGGER_TASKS = set()
+def get_internal_logger():
+ """Return a logger for internal use, where logs do not get sent to Kafka.
+
+ We attach the temporal context to the logger for easier debugging (for
+ example, we can track things like the workflow id across log entries).
+ """
+ logger = structlog.get_logger()
+ temporal_context = get_temporal_context()
+
+ return logger.new(**temporal_context)
+
+
async def bind_temporal_worker_logger(team_id: int, destination: str | None = None) -> FilteringBoundLogger:
"""Return a bound logger for Temporal Workers."""
if not structlog.is_configured():
diff --git a/posthog/temporal/common/posthog_client.py b/posthog/temporal/common/posthog_client.py
new file mode 100644
index 0000000000000..1cac853ea849d
--- /dev/null
+++ b/posthog/temporal/common/posthog_client.py
@@ -0,0 +1,53 @@
+from typing import Any, Optional
+from posthoganalytics.client import Client
+from temporalio.worker import (
+ ActivityInboundInterceptor,
+ ExecuteActivityInput,
+ ExecuteWorkflowInput,
+ Interceptor,
+ WorkflowInboundInterceptor,
+ WorkflowInterceptorClassInput,
+)
+
+
+class _PostHogClientActivityInboundInterceptor(ActivityInboundInterceptor):
+ async def execute_activity(self, input: ExecuteActivityInput) -> Any:
+ ph_client = Client(api_key="sTMFPsFhdP1Ssg", enable_exception_autocapture=True)
+
+ try:
+ activity_result = await super().execute_activity(input)
+ except:
+ raise
+ finally:
+ ph_client.flush()
+
+ return activity_result
+
+
+class _PostHogClientWorkflowInterceptor(WorkflowInboundInterceptor):
+ async def execute_workflow(self, input: ExecuteWorkflowInput) -> Any:
+ ph_client = Client(api_key="sTMFPsFhdP1Ssg", enable_exception_autocapture=True)
+
+ try:
+ workflow_result = await super().execute_workflow(input)
+ except:
+ raise
+ finally:
+ ph_client.flush()
+
+ return workflow_result
+
+
+class PostHogClientInterceptor(Interceptor):
+ """PostHog Interceptor class which will report workflow & activity exceptions to PostHog"""
+
+ def intercept_activity(self, next: ActivityInboundInterceptor) -> ActivityInboundInterceptor:
+ """Implementation of
+ :py:meth:`temporalio.worker.Interceptor.intercept_activity`.
+ """
+ return _PostHogClientActivityInboundInterceptor(super().intercept_activity(next))
+
+ def workflow_interceptor_class(
+ self, input: WorkflowInterceptorClassInput
+ ) -> Optional[type[WorkflowInboundInterceptor]]:
+ return _PostHogClientWorkflowInterceptor
diff --git a/posthog/temporal/common/worker.py b/posthog/temporal/common/worker.py
index f5db3d6b0417d..0abd4c19127e6 100644
--- a/posthog/temporal/common/worker.py
+++ b/posthog/temporal/common/worker.py
@@ -6,7 +6,6 @@
from temporalio.runtime import PrometheusConfig, Runtime, TelemetryConfig
from temporalio.worker import UnsandboxedWorkflowRunner, Worker
-from posthog.constants import DATA_WAREHOUSE_TASK_QUEUE_V2
from posthog.temporal.common.client import connect
from posthog.temporal.common.sentry import SentryInterceptor
@@ -35,35 +34,19 @@ async def start_worker(
client_key,
runtime=runtime,
)
- if task_queue == DATA_WAREHOUSE_TASK_QUEUE_V2:
- worker = Worker(
- client,
- task_queue=task_queue,
- workflows=workflows,
- activities=activities,
- workflow_runner=UnsandboxedWorkflowRunner(),
- graceful_shutdown_timeout=timedelta(minutes=5),
- interceptors=[SentryInterceptor()],
- activity_executor=ThreadPoolExecutor(max_workers=max_concurrent_activities or 50),
- # Only run one workflow at a time
- max_concurrent_activities=1,
- max_concurrent_workflow_task_polls=1,
- max_concurrent_workflow_tasks=1,
- max_cached_workflows=0,
- )
- else:
- worker = Worker(
- client,
- task_queue=task_queue,
- workflows=workflows,
- activities=activities,
- workflow_runner=UnsandboxedWorkflowRunner(),
- graceful_shutdown_timeout=timedelta(minutes=5),
- interceptors=[SentryInterceptor()],
- activity_executor=ThreadPoolExecutor(max_workers=max_concurrent_activities or 50),
- max_concurrent_activities=max_concurrent_activities or 50,
- max_concurrent_workflow_tasks=max_concurrent_workflow_tasks,
- )
+
+ worker = Worker(
+ client,
+ task_queue=task_queue,
+ workflows=workflows,
+ activities=activities,
+ workflow_runner=UnsandboxedWorkflowRunner(),
+ graceful_shutdown_timeout=timedelta(minutes=5),
+ interceptors=[SentryInterceptor()],
+ activity_executor=ThreadPoolExecutor(max_workers=max_concurrent_activities or 50),
+ max_concurrent_activities=max_concurrent_activities or 50,
+ max_concurrent_workflow_tasks=max_concurrent_workflow_tasks,
+ )
# catch the TERM signal, and stop the worker gracefully
# https://github.com/temporalio/sdk-python#worker-shutdown
diff --git a/posthog/temporal/data_imports/pipelines/pipeline/delta_table_subprocess.py b/posthog/temporal/data_imports/pipelines/pipeline/delta_table_subprocess.py
new file mode 100644
index 0000000000000..1c93343f6a3c3
--- /dev/null
+++ b/posthog/temporal/data_imports/pipelines/pipeline/delta_table_subprocess.py
@@ -0,0 +1,48 @@
+import argparse
+import os
+import sys
+from deltalake import DeltaTable
+
+
+def _get_credentials():
+ is_test = (
+ "test" in sys.argv
+ or sys.argv[0].endswith("pytest")
+ or os.getenv("TEST", "false").lower() in ("y", "yes", "t", "true", "on", "1")
+ )
+
+ if is_test:
+ return {
+ "endpoint_url": os.getenv("OBJECT_STORAGE_ENDPOINT", "http://localhost:19000"),
+ "aws_access_key_id": os.getenv("AIRBYTE_BUCKET_KEY", None),
+ "aws_secret_access_key": os.getenv("AIRBYTE_BUCKET_SECRET", None),
+ "region_name": os.getenv("AIRBYTE_BUCKET_REGION", None),
+ "AWS_DEFAULT_REGION": os.getenv("AIRBYTE_BUCKET_REGION", None),
+ "AWS_ALLOW_HTTP": "true",
+ "AWS_S3_ALLOW_UNSAFE_RENAME": "true",
+ }
+
+ return {
+ "aws_access_key_id": os.getenv("AIRBYTE_BUCKET_KEY", None),
+ "aws_secret_access_key": os.getenv("AIRBYTE_BUCKET_SECRET", None),
+ "region_name": os.getenv("AIRBYTE_BUCKET_REGION", None),
+ "AWS_DEFAULT_REGION": os.getenv("AIRBYTE_BUCKET_REGION", None),
+ "AWS_S3_ALLOW_UNSAFE_RENAME": "true",
+ }
+
+
+def run_operations(table_uri: str) -> None:
+ storage_options = _get_credentials()
+
+ delta_table = DeltaTable(table_uri=table_uri, storage_options=storage_options)
+ delta_table.optimize.compact()
+ delta_table.vacuum(retention_hours=24, enforce_retention_duration=False, dry_run=False)
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description="Run Delta operations")
+ parser.add_argument("--table_uri", required=True, help="S3 table_uri for the delta table")
+
+ args = parser.parse_args()
+
+ run_operations(args.table_uri)
diff --git a/posthog/temporal/data_imports/pipelines/pipeline/pipeline.py b/posthog/temporal/data_imports/pipelines/pipeline/pipeline.py
index f61e1637b9d88..49106164d7ad2 100644
--- a/posthog/temporal/data_imports/pipelines/pipeline/pipeline.py
+++ b/posthog/temporal/data_imports/pipelines/pipeline/pipeline.py
@@ -1,7 +1,9 @@
import gc
import time
from typing import Any
+import os
import pyarrow as pa
+import subprocess
from dlt.sources import DltSource, DltResource
import deltalake as deltalake
from posthog.temporal.common.logger import FilteringBoundLogger
@@ -137,10 +139,21 @@ def _post_run_operations(self, row_count: int):
self._logger.debug("No deltalake table, not continuing with post-run ops")
return
- self._logger.debug("Skipping compact and vacuuming")
- # self._logger.info("Compacting delta table")
- # delta_table.optimize.compact()
- # delta_table.vacuum(retention_hours=24, enforce_retention_duration=False, dry_run=False)
+ self._logger.debug("Spawning new process for deltatable compact and vacuuming")
+ process = subprocess.Popen(
+ [
+ "python",
+ f"{os.getcwd()}/posthog/temporal/data_imports/pipelines/pipeline/delta_table_subprocess.py",
+ "--table_uri",
+ self._delta_table_helper._get_delta_table_uri(),
+ ],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+ stdout, stderr = process.communicate()
+
+ if process.returncode != 0:
+ raise Exception(f"Delta subprocess failed: {stderr.decode()}")
file_uris = delta_table.file_uris()
self._logger.info(f"Preparing S3 files - total parquet files: {len(file_uris)}")
diff --git a/posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py b/posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py
index 03c966491f650..5fef253e74501 100644
--- a/posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py
+++ b/posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py
@@ -1,4 +1,6 @@
+import decimal
import json
+import math
from typing import Any, Optional
from collections.abc import Sequence
@@ -94,6 +96,11 @@ def row_tuples_to_arrow(rows: Sequence[RowAny], columns: TTableSchemaColumns, tz
)
json_str_array = pa.array([None if s is None else json_dumps(s) for s in columnar_known_types[field.name]])
columnar_known_types[field.name] = json_str_array
+ if issubclass(py_type, decimal.Decimal):
+ # Remove any NaN values from decimal columns
+ columnar_known_types[field.name] = np.array(
+ [None if x is not None and math.isnan(x) else x for x in columnar_known_types[field.name]]
+ )
# If there are unknown type columns, first create a table to infer their types
if columnar_unknown_types:
diff --git a/posthog/temporal/data_imports/workflow_activities/import_data_sync.py b/posthog/temporal/data_imports/workflow_activities/import_data_sync.py
index a4696cf453410..3f166ec0049a9 100644
--- a/posthog/temporal/data_imports/workflow_activities/import_data_sync.py
+++ b/posthog/temporal/data_imports/workflow_activities/import_data_sync.py
@@ -51,6 +51,21 @@ def process_incremental_last_value(value: Any | None, field_type: IncrementalFie
return parser.parse(value).date()
+def _trim_source_job_inputs(source: ExternalDataSource) -> None:
+ if not source.job_inputs:
+ return
+
+ did_update_inputs = False
+ for key, value in source.job_inputs.items():
+ if isinstance(value, str):
+ if value.startswith(" ") or value.endswith(" "):
+ source.job_inputs[key] = value.strip()
+ did_update_inputs = True
+
+ if did_update_inputs:
+ source.save()
+
+
@activity.defn
def import_data_activity_sync(inputs: ImportDataActivityInputs):
logger = bind_temporal_worker_logger_sync(team_id=inputs.team_id)
@@ -73,6 +88,8 @@ def import_data_activity_sync(inputs: ImportDataActivityInputs):
dataset_name=model.folder_path(),
)
+ _trim_source_job_inputs(model.pipeline)
+
reset_pipeline = model.pipeline.job_inputs.get("reset_pipeline", "False") == "True"
schema = (
@@ -526,4 +543,5 @@ def _run(
source = ExternalDataSource.objects.get(id=inputs.source_id)
source.job_inputs.pop("reset_pipeline", None)
+
source.save()
diff --git a/posthog/temporal/tests/batch_exports/conftest.py b/posthog/temporal/tests/batch_exports/conftest.py
index 027c9c40dbdf5..2c1d6f7793d15 100644
--- a/posthog/temporal/tests/batch_exports/conftest.py
+++ b/posthog/temporal/tests/batch_exports/conftest.py
@@ -5,8 +5,10 @@
import psycopg
import pytest
import pytest_asyncio
+import temporalio.worker
from psycopg import sql
+from posthog import constants
from posthog.temporal.tests.utils.events import generate_test_events_in_clickhouse
from posthog.temporal.tests.utils.persons import (
generate_test_person_distinct_id2_in_clickhouse,
@@ -147,6 +149,24 @@ async def setup_postgres_test_db(postgres_config):
await connection.close()
+@pytest_asyncio.fixture
+async def temporal_worker(temporal_client, workflows, activities):
+ worker = temporalio.worker.Worker(
+ temporal_client,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
+ workflows=workflows,
+ activities=activities,
+ workflow_runner=temporalio.worker.UnsandboxedWorkflowRunner(),
+ )
+
+ worker_run = asyncio.create_task(worker.run())
+
+ yield worker
+
+ worker_run.cancel()
+ await asyncio.wait([worker_run])
+
+
@pytest_asyncio.fixture(scope="module", autouse=True)
async def create_clickhouse_tables_and_views(clickhouse_client, django_db_setup):
from posthog.batch_exports.sql import (
diff --git a/posthog/temporal/tests/batch_exports/test_backfill_batch_export.py b/posthog/temporal/tests/batch_exports/test_backfill_batch_export.py
index e422dc5088e4c..9c9dcc618759f 100644
--- a/posthog/temporal/tests/batch_exports/test_backfill_batch_export.py
+++ b/posthog/temporal/tests/batch_exports/test_backfill_batch_export.py
@@ -16,6 +16,7 @@
from asgiref.sync import sync_to_async
from django.conf import settings
+from posthog import constants
from posthog.models import Team
from posthog.temporal.batch_exports.backfill_batch_export import (
BackfillBatchExportInputs,
@@ -298,7 +299,7 @@ async def test_backfill_batch_export_workflow(temporal_worker, temporal_schedule
BackfillBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
execution_timeout=dt.timedelta(minutes=1),
retry_policy=temporalio.common.RetryPolicy(maximum_attempts=1),
)
@@ -379,7 +380,7 @@ async def test_backfill_batch_export_workflow_no_end_at(
BackfillBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
execution_timeout=dt.timedelta(minutes=1),
retry_policy=temporalio.common.RetryPolicy(maximum_attempts=1),
)
@@ -455,7 +456,7 @@ async def test_backfill_batch_export_workflow_fails_when_schedule_deleted(
BackfillBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
execution_timeout=dt.timedelta(seconds=20),
retry_policy=temporalio.common.RetryPolicy(maximum_attempts=1),
)
@@ -497,7 +498,7 @@ async def test_backfill_batch_export_workflow_fails_when_schedule_deleted_after_
BackfillBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
execution_timeout=dt.timedelta(seconds=20),
retry_policy=temporalio.common.RetryPolicy(maximum_attempts=1),
)
@@ -583,7 +584,7 @@ async def test_backfill_batch_export_workflow_is_cancelled_on_repeated_failures(
BackfillBatchExportWorkflow.run,
inputs,
id=backfill_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
execution_timeout=dt.timedelta(minutes=2),
retry_policy=temporalio.common.RetryPolicy(maximum_attempts=1),
)
@@ -653,7 +654,7 @@ async def test_backfill_utc_batch_export_workflow_with_timezone_aware_bounds(
BackfillBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
execution_timeout=dt.timedelta(minutes=1),
retry_policy=temporalio.common.RetryPolicy(maximum_attempts=1),
)
@@ -748,7 +749,7 @@ async def test_backfill_aware_batch_export_workflow_with_timezone_aware_bounds(
BackfillBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
execution_timeout=dt.timedelta(minutes=1),
retry_policy=temporalio.common.RetryPolicy(maximum_attempts=1),
)
@@ -819,7 +820,7 @@ async def test_backfill_batch_export_workflow_no_start_at(temporal_worker, tempo
BackfillBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
execution_timeout=dt.timedelta(minutes=1),
retry_policy=temporalio.common.RetryPolicy(maximum_attempts=1),
)
diff --git a/posthog/temporal/tests/batch_exports/test_import_data.py b/posthog/temporal/tests/batch_exports/test_import_data.py
index abf9bb56b094e..7d484ad4b159b 100644
--- a/posthog/temporal/tests/batch_exports/test_import_data.py
+++ b/posthog/temporal/tests/batch_exports/test_import_data.py
@@ -54,6 +54,45 @@ def _setup(team: Team, job_inputs: dict[Any, Any]) -> ImportDataActivityInputs:
return ImportDataActivityInputs(team_id=team.pk, schema_id=schema.pk, source_id=source.pk, run_id=str(job.pk))
+@pytest.mark.django_db(transaction=True)
+def test_job_inputs_with_whitespace(activity_environment, team, **kwargs):
+ job_inputs = {
+ "host": " host.com ",
+ "port": 5432,
+ "user": "Username ",
+ "password": " password",
+ "database": " database",
+ "schema": "schema ",
+ }
+
+ activity_inputs = _setup(team, job_inputs)
+
+ with (
+ mock.patch(
+ "posthog.temporal.data_imports.pipelines.sql_database_v2.sql_source_for_type"
+ ) as sql_source_for_type,
+ mock.patch("posthog.temporal.data_imports.workflow_activities.import_data_sync._run"),
+ ):
+ activity_environment.run(import_data_activity_sync, activity_inputs)
+
+ sql_source_for_type.assert_called_once_with(
+ source_type=ExternalDataSource.Type.POSTGRES,
+ host="host.com",
+ port="5432",
+ user="Username",
+ password="password",
+ database="database",
+ sslmode="prefer",
+ schema="schema",
+ table_names=["table_1"],
+ incremental_field=None,
+ incremental_field_type=None,
+ db_incremental_field_last_value=None,
+ team_id=team.id,
+ using_ssl=True,
+ )
+
+
@pytest.mark.django_db(transaction=True)
def test_postgres_source_without_ssh_tunnel(activity_environment, team, **kwargs):
job_inputs = {
diff --git a/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py
index 65c95d8cd0bd0..db2022b464e0b 100644
--- a/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py
+++ b/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py
@@ -16,6 +16,7 @@
from temporalio.testing import WorkflowEnvironment
from temporalio.worker import UnsandboxedWorkflowRunner, Worker
+from posthog import constants
from posthog.batch_exports.service import BatchExportModel, BatchExportSchema
from posthog.temporal.batch_exports.batch_exports import (
finish_batch_export_run,
@@ -513,7 +514,7 @@ async def test_postgres_export_workflow(
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[PostgresBatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -527,7 +528,7 @@ async def test_postgres_export_workflow(
PostgresBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
execution_timeout=dt.timedelta(seconds=10),
)
@@ -601,7 +602,7 @@ async def test_postgres_export_workflow_without_events(
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[PostgresBatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -615,7 +616,7 @@ async def test_postgres_export_workflow_without_events(
PostgresBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
execution_timeout=dt.timedelta(seconds=10),
)
@@ -677,7 +678,7 @@ async def test_postgres_export_workflow_backfill_earliest_persons(
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[PostgresBatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -690,7 +691,7 @@ async def test_postgres_export_workflow_backfill_earliest_persons(
PostgresBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
execution_timeout=dt.timedelta(minutes=10),
)
@@ -736,7 +737,7 @@ async def insert_into_postgres_activity_mocked(_: PostgresInsertInputs) -> str:
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[PostgresBatchExportWorkflow],
activities=[
mocked_start_batch_export_run,
@@ -750,7 +751,7 @@ async def insert_into_postgres_activity_mocked(_: PostgresInsertInputs) -> str:
PostgresBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
)
@@ -788,7 +789,7 @@ class InsufficientPrivilege(Exception):
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[PostgresBatchExportWorkflow],
activities=[
mocked_start_batch_export_run,
@@ -802,7 +803,7 @@ class InsufficientPrivilege(Exception):
PostgresBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
)
@@ -837,7 +838,7 @@ async def never_finish_activity(_: PostgresInsertInputs) -> str:
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[PostgresBatchExportWorkflow],
activities=[
mocked_start_batch_export_run,
@@ -850,7 +851,7 @@ async def never_finish_activity(_: PostgresInsertInputs) -> str:
PostgresBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
)
await asyncio.sleep(5)
diff --git a/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py
index 20c38545490b9..cbb75d8948301 100644
--- a/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py
+++ b/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py
@@ -17,6 +17,7 @@
from temporalio.testing import WorkflowEnvironment
from temporalio.worker import UnsandboxedWorkflowRunner, Worker
+from posthog import constants
from posthog.batch_exports.service import BatchExportModel, BatchExportSchema
from posthog.temporal.batch_exports.batch_exports import (
finish_batch_export_run,
@@ -675,7 +676,7 @@ async def test_redshift_export_workflow(
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[RedshiftBatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -689,7 +690,7 @@ async def test_redshift_export_workflow(
RedshiftBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
execution_timeout=dt.timedelta(seconds=10),
)
@@ -756,7 +757,7 @@ async def insert_into_redshift_activity_mocked(_: RedshiftInsertInputs) -> str:
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[RedshiftBatchExportWorkflow],
activities=[
mocked_start_batch_export_run,
@@ -770,7 +771,7 @@ async def insert_into_redshift_activity_mocked(_: RedshiftInsertInputs) -> str:
RedshiftBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
execution_timeout=dt.timedelta(seconds=20),
)
@@ -809,7 +810,7 @@ class InsufficientPrivilege(Exception):
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[RedshiftBatchExportWorkflow],
activities=[
mocked_start_batch_export_run,
@@ -823,7 +824,7 @@ class InsufficientPrivilege(Exception):
RedshiftBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
)
diff --git a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py
index 85594e7ad4fa0..2195725a5d8ed 100644
--- a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py
+++ b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py
@@ -6,6 +6,7 @@
import json
import os
import uuid
+from dataclasses import asdict
from unittest import mock
import aioboto3
@@ -20,6 +21,7 @@
from temporalio.testing import WorkflowEnvironment
from temporalio.worker import UnsandboxedWorkflowRunner, Worker
+from posthog import constants
from posthog.batch_exports.service import BatchExportModel, BatchExportSchema
from posthog.temporal.batch_exports.batch_exports import (
finish_batch_export_run,
@@ -27,6 +29,7 @@
start_batch_export_run,
)
from posthog.temporal.batch_exports.s3_batch_export import (
+ COMPRESSION_EXTENSIONS,
FILE_FORMAT_EXTENSIONS,
IntermittentUploadPartTimeoutError,
InvalidS3EndpointError,
@@ -155,25 +158,38 @@ async def minio_client(bucket_name):
await minio_client.delete_bucket(Bucket=bucket_name)
-async def assert_file_in_s3(s3_compatible_client, bucket_name, key_prefix, file_format, compression, json_columns):
- """Assert a file is in S3 and return its contents."""
+async def assert_files_in_s3(s3_compatible_client, bucket_name, key_prefix, file_format, compression, json_columns):
+ """Assert that there are files in S3 under key_prefix and return the combined contents, and the keys of files found."""
objects = await s3_compatible_client.list_objects_v2(Bucket=bucket_name, Prefix=key_prefix)
- assert len(objects.get("Contents", [])) == 1
-
- key = objects["Contents"][0].get("Key")
- assert key
+ s3_data = []
+ keys = []
+ assert objects.get("KeyCount", 0) > 0
+ assert "Contents" in objects
+ for obj in objects["Contents"]:
+ key = obj.get("Key")
+ assert key
+ keys.append(key)
+
+ if file_format == "Parquet":
+ s3_data.extend(await read_parquet_from_s3(bucket_name, key, json_columns))
+
+ elif file_format == "JSONLines":
+ s3_object = await s3_compatible_client.get_object(Bucket=bucket_name, Key=key)
+ data = await s3_object["Body"].read()
+ s3_data.extend(read_s3_data_as_json(data, compression))
+ else:
+ raise ValueError(f"Unsupported file format: {file_format}")
- if file_format == "Parquet":
- s3_data = await read_parquet_from_s3(bucket_name, key, json_columns)
+ return s3_data, keys
- elif file_format == "JSONLines":
- s3_object = await s3_compatible_client.get_object(Bucket=bucket_name, Key=key)
- data = await s3_object["Body"].read()
- s3_data = read_s3_data_as_json(data, compression)
- else:
- raise ValueError(f"Unsupported file format: {file_format}")
+async def assert_file_in_s3(s3_compatible_client, bucket_name, key_prefix, file_format, compression, json_columns):
+ """Assert a file is in S3 and return its contents."""
+ s3_data, keys = await assert_files_in_s3(
+ s3_compatible_client, bucket_name, key_prefix, file_format, compression, json_columns
+ )
+ assert len(keys) == 1
return s3_data
@@ -191,6 +207,7 @@ async def assert_clickhouse_records_in_s3(
compression: str | None = None,
file_format: str = "JSONLines",
is_backfill: bool = False,
+ allow_duplicates: bool = False,
):
"""Assert ClickHouse records are written to JSON in key_prefix in S3 bucket_name.
@@ -235,6 +252,7 @@ async def assert_clickhouse_records_in_s3(
"person_version",
"person_distinct_id_version",
"_inserted_at",
+ "created_at",
]
expected_records = []
@@ -248,6 +266,7 @@ async def assert_clickhouse_records_in_s3(
include_events=include_events,
destination_default_fields=s3_default_fields(),
is_backfill=is_backfill,
+ use_latest_schema=True,
):
for record in record_batch.to_pylist():
expected_record = {}
@@ -266,6 +285,9 @@ async def assert_clickhouse_records_in_s3(
assert all(record["team_id"] == team_id for record in s3_data)
assert s3_data[0] == expected_records[0]
+ if allow_duplicates:
+ # de-duplicate based on uuid
+ s3_data = list({record["uuid"]: record for record in s3_data}.values())
assert len(s3_data) == len(expected_records)
assert s3_data == expected_records
@@ -383,6 +405,165 @@ async def test_insert_into_s3_activity_puts_data_into_s3(
)
+@pytest.mark.parametrize("compression", [None, "gzip", "brotli"], indirect=True)
+@pytest.mark.parametrize("model", [BatchExportModel(name="events", schema=None)])
+@pytest.mark.parametrize("file_format", FILE_FORMAT_EXTENSIONS.keys())
+# Use 0 to test that the file is not split up and 6MB since this is slightly
+# larger than the default 5MB chunk size for multipart uploads.
+@pytest.mark.parametrize("max_file_size_mb", [None, 6])
+async def test_insert_into_s3_activity_puts_splitted_files_into_s3(
+ clickhouse_client,
+ bucket_name,
+ minio_client,
+ activity_environment,
+ compression,
+ max_file_size_mb,
+ exclude_events,
+ file_format,
+ data_interval_start,
+ data_interval_end,
+ model: BatchExportModel,
+ ateam,
+):
+ """Test that the insert_into_s3_activity function splits up large files into
+ multiple parts based on the max file size configuration.
+
+ If max file size is set to 0 then the file should not be split up.
+
+ This test needs to generate a lot of data to ensure that the file is large enough to be split up.
+ """
+
+ if file_format == "JSONLines" and compression is not None:
+ pytest.skip("Compressing large JSONLines files takes too long to run; skipping for now")
+
+ prefix = str(uuid.uuid4())
+
+ events_1, _, _ = await generate_test_events_in_clickhouse(
+ client=clickhouse_client,
+ team_id=ateam.pk,
+ start_time=data_interval_start,
+ end_time=data_interval_end,
+ count=100000,
+ count_outside_range=0,
+ count_other_team=0,
+ duplicate=False,
+ properties={"$prop1": 123},
+ )
+
+ events_2, _, _ = await generate_test_events_in_clickhouse(
+ client=clickhouse_client,
+ team_id=ateam.pk,
+ start_time=data_interval_start,
+ end_time=data_interval_end,
+ count=100000,
+ count_outside_range=0,
+ count_other_team=0,
+ duplicate=False,
+ properties={"$prop1": 123},
+ )
+
+ events_to_export_created = events_1 + events_2
+
+ heartbeat_details: list[S3HeartbeatDetails] = []
+
+ def track_hearbeat_details(*details):
+ """Record heartbeat details received."""
+ nonlocal heartbeat_details
+
+ s3_details = S3HeartbeatDetails.from_activity_details(details)
+ heartbeat_details.append(s3_details)
+
+ activity_environment.on_heartbeat = track_hearbeat_details
+
+ insert_inputs = S3InsertInputs(
+ bucket_name=bucket_name,
+ region="us-east-1",
+ prefix=prefix,
+ team_id=ateam.pk,
+ data_interval_start=data_interval_start.isoformat(),
+ data_interval_end=data_interval_end.isoformat(),
+ aws_access_key_id="object_storage_root_user",
+ aws_secret_access_key="object_storage_root_password",
+ endpoint_url=settings.OBJECT_STORAGE_ENDPOINT,
+ compression=compression,
+ exclude_events=exclude_events,
+ file_format=file_format,
+ max_file_size_mb=max_file_size_mb,
+ batch_export_schema=None,
+ batch_export_model=model,
+ )
+
+ with override_settings(
+ # 5MB, the minimum for Multipart uploads
+ BATCH_EXPORT_S3_UPLOAD_CHUNK_SIZE_BYTES=5 * 1024**2,
+ ):
+ records_exported = await activity_environment.run(insert_into_s3_activity, insert_inputs)
+
+ assert records_exported == len(events_to_export_created)
+
+ # Takes a long time to re-read this data from ClickHouse, so we just make sure that:
+ # 1. The file exists in S3.
+ # 2. We can read it (so, it's a valid file).
+ # 3. It has the same length as the events we have created.
+ s3_data, s3_keys = await assert_files_in_s3(
+ s3_compatible_client=minio_client,
+ bucket_name=bucket_name,
+ key_prefix=prefix,
+ file_format=file_format,
+ compression=compression,
+ json_columns=("properties", "person_properties", "set", "set_once"),
+ )
+
+ assert len(s3_data) == len(events_to_export_created)
+ num_files = len(s3_keys)
+
+ def expected_s3_key(
+ file_number: int,
+ data_interval_start: dt.datetime,
+ data_interval_end: dt.datetime,
+ file_format: str,
+ compression: str,
+ max_file_size_mb: int | None,
+ ):
+ file_extension = FILE_FORMAT_EXTENSIONS[file_format]
+ base_key_name = f"{prefix}/{data_interval_start.isoformat()}-{data_interval_end.isoformat()}"
+ # for backwards compatibility with the old file naming scheme
+ if max_file_size_mb is None:
+ key_name = base_key_name
+ else:
+ key_name = f"{base_key_name}-{file_number}"
+ key_name = f"{key_name}.{file_extension}"
+ if compression:
+ compression_extension = COMPRESSION_EXTENSIONS[compression]
+ key_name = f"{key_name}.{compression_extension}"
+ return key_name
+
+ if max_file_size_mb is None:
+ # we only expect 1 file
+ assert num_files == 1
+ else:
+ assert num_files > 1
+
+ for i in range(num_files):
+ assert (
+ expected_s3_key(
+ file_number=i,
+ data_interval_start=data_interval_start,
+ data_interval_end=data_interval_end,
+ file_format=file_format,
+ compression=compression,
+ max_file_size_mb=max_file_size_mb,
+ )
+ in s3_keys
+ )
+
+ # check heartbeat details
+ assert len(heartbeat_details) > 0
+ detail = heartbeat_details[-1]
+ assert detail.files_uploaded == num_files
+ assert detail.upload_state is None
+
+
@pytest.mark.parametrize("compression", [None, "gzip"], indirect=True)
@pytest.mark.parametrize("model", [BatchExportModel(name="events", schema=None)])
@pytest.mark.parametrize("file_format", ["Parquet"])
@@ -709,7 +890,7 @@ async def test_s3_export_workflow_with_minio_bucket(
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[S3BatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -722,7 +903,7 @@ async def test_s3_export_workflow_with_minio_bucket(
S3BatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
execution_timeout=dt.timedelta(minutes=10),
)
@@ -799,7 +980,7 @@ async def test_s3_export_workflow_backfill_earliest_persons_with_minio_bucket(
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[S3BatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -812,7 +993,7 @@ async def test_s3_export_workflow_backfill_earliest_persons_with_minio_bucket(
S3BatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
execution_timeout=dt.timedelta(minutes=10),
)
@@ -882,7 +1063,7 @@ async def test_s3_export_workflow_with_minio_bucket_without_events(
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[S3BatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -895,7 +1076,7 @@ async def test_s3_export_workflow_with_minio_bucket_without_events(
S3BatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
execution_timeout=dt.timedelta(minutes=10),
)
@@ -997,7 +1178,7 @@ async def test_s3_export_workflow_with_s3_bucket(
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[S3BatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -1010,7 +1191,7 @@ async def test_s3_export_workflow_with_s3_bucket(
S3BatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
execution_timeout=dt.timedelta(seconds=10),
)
@@ -1090,7 +1271,7 @@ async def test_s3_export_workflow_with_minio_bucket_and_custom_key_prefix(
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[S3BatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -1103,7 +1284,7 @@ async def test_s3_export_workflow_with_minio_bucket_and_custom_key_prefix(
S3BatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
execution_timeout=dt.timedelta(seconds=10),
)
@@ -1176,7 +1357,7 @@ async def insert_into_s3_activity_mocked(_: S3InsertInputs) -> str:
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[S3BatchExportWorkflow],
activities=[
mocked_start_batch_export_run,
@@ -1190,7 +1371,7 @@ async def insert_into_s3_activity_mocked(_: S3InsertInputs) -> str:
S3BatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
)
@@ -1229,7 +1410,7 @@ class ParamValidationError(Exception):
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[S3BatchExportWorkflow],
activities=[
mocked_start_batch_export_run,
@@ -1243,7 +1424,7 @@ class ParamValidationError(Exception):
S3BatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
)
@@ -1281,7 +1462,7 @@ async def never_finish_activity(_: S3InsertInputs) -> str:
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[S3BatchExportWorkflow],
activities=[
mocked_start_batch_export_run,
@@ -1294,7 +1475,7 @@ async def never_finish_activity(_: S3InsertInputs) -> str:
S3BatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
)
await asyncio.sleep(5)
@@ -1493,6 +1674,16 @@ async def never_finish_activity(_: S3InsertInputs) -> str:
),
"nested/prefix/2023-01-01 00:00:00-2023-01-01 01:00:00.parquet.br",
),
+ (
+ S3InsertInputs(
+ prefix="/",
+ data_interval_start="2023-01-01 00:00:00",
+ data_interval_end="2023-01-01 01:00:00",
+ max_file_size_mb=1,
+ **base_inputs, # type: ignore
+ ),
+ "2023-01-01 00:00:00-2023-01-01 01:00:00-0.jsonl",
+ ),
],
)
def test_get_s3_key(inputs, expected):
@@ -1560,10 +1751,144 @@ def track_hearbeat_details(*details):
detail = heartbeat_details[-1]
+ # we've uploaded 1 file so we expect the files_uploaded to be 1 and the upload_state to be None
+ assert detail.files_uploaded == 1
+ assert detail.upload_state is None
+
+ assert len(detail.done_ranges) == 1
+ assert detail.done_ranges[0] == (data_interval_start, data_interval_end)
+
+ await assert_clickhouse_records_in_s3(
+ s3_compatible_client=minio_client,
+ clickhouse_client=clickhouse_client,
+ bucket_name=bucket_name,
+ key_prefix=s3_key_prefix,
+ team_id=ateam.pk,
+ data_interval_start=data_interval_start,
+ data_interval_end=data_interval_end,
+ )
+
+
+async def test_insert_into_s3_activity_resumes_from_heartbeat(
+ clickhouse_client, ateam, bucket_name, s3_batch_export, minio_client, activity_environment, s3_key_prefix
+):
+ """
+ Test that if the insert_into_s3_activity activity fails, it can resume from a heartbeat.
+
+ We mock the upload_part method to raise a `RequestTimeout` error after the first part has been uploaded.
+ We then resume from the heartbeat and expect the activity to resume from where it left off.
+ """
+ data_interval_end = dt.datetime.fromisoformat("2023-04-20T14:30:00.000000+00:00")
+ data_interval_start = data_interval_end - s3_batch_export.interval_time_delta
+
+ n_expected_parts = 3
+
+ for i in range(1, n_expected_parts + 1):
+ part_inserted_at = data_interval_end - s3_batch_export.interval_time_delta / i
+
+ await generate_test_events_in_clickhouse(
+ client=clickhouse_client,
+ team_id=ateam.pk,
+ start_time=data_interval_start,
+ end_time=data_interval_end,
+ count=1,
+ count_outside_range=0,
+ count_other_team=0,
+ duplicate=False,
+ # We need at least 5MB for a multi-part upload which is what we are testing.
+ properties={"$chonky": ("a" * 5 * 2048**2)},
+ inserted_at=part_inserted_at,
+ )
+
+ attempt = 0
+
+ class FakeSession(aioboto3.Session):
+ @contextlib.asynccontextmanager
+ async def client(self, *args, **kwargs):
+ client = self._session.create_client(*args, **kwargs)
+
+ async with client as client:
+ original_upload_part = client.upload_part
+
+ async def faulty_upload_part(*args, **kwargs):
+ nonlocal attempt
+
+ attempt = attempt + 1
+
+ if attempt >= 2:
+ raise botocore.exceptions.ClientError(
+ error_response={
+ "Error": {"Code": "RequestTimeout", "Message": "Oh no!"},
+ "ResponseMetadata": {"MaxAttemptsReached": True, "RetryAttempts": 2}, # type: ignore
+ },
+ operation_name="UploadPart",
+ )
+ else:
+ return await original_upload_part(*args, **kwargs)
+
+ client.upload_part = faulty_upload_part
+
+ yield client
+
+ heartbeat_details: list[S3HeartbeatDetails] = []
+
+ def track_hearbeat_details(*details):
+ """Record heartbeat details received."""
+ nonlocal heartbeat_details
+
+ s3_details = S3HeartbeatDetails.from_activity_details(details)
+ heartbeat_details.append(s3_details)
+
+ activity_environment.on_heartbeat = track_hearbeat_details
+
+ insert_inputs = S3InsertInputs(
+ bucket_name=bucket_name,
+ region="us-east-1",
+ prefix=s3_key_prefix,
+ team_id=ateam.pk,
+ data_interval_start=data_interval_start.isoformat(),
+ data_interval_end=data_interval_end.isoformat(),
+ aws_access_key_id="object_storage_root_user",
+ aws_secret_access_key="object_storage_root_password",
+ endpoint_url=settings.OBJECT_STORAGE_ENDPOINT,
+ )
+
+ with (
+ override_settings(BATCH_EXPORT_S3_UPLOAD_CHUNK_SIZE_BYTES=1, CLICKHOUSE_MAX_BLOCK_SIZE_DEFAULT=1),
+ mock.patch("posthog.temporal.batch_exports.s3_batch_export.aioboto3.Session", FakeSession),
+ ):
+ with pytest.raises(IntermittentUploadPartTimeoutError):
+ # we expect this to raise an exception
+ await activity_environment.run(insert_into_s3_activity, insert_inputs)
+
+ assert len(heartbeat_details) > 0
+
+ detail = heartbeat_details[-1]
+
+ # we expect to have only uploaded part 1 of first file
+ assert detail.files_uploaded == 0
assert detail.upload_state is not None
- assert len(detail.upload_state.parts) == 3
+ assert detail.upload_state.upload_id is not None
+ assert len(detail.upload_state.parts) == 1
+
assert len(detail.done_ranges) == 1
+ # now we resume from the heartbeat
+ previous_info = asdict(activity_environment.info)
+ previous_info["heartbeat_details"] = detail.serialize_details()
+ new_info = activity.Info(
+ **previous_info,
+ )
+ activity_environment.info = new_info
+ with override_settings(BATCH_EXPORT_S3_UPLOAD_CHUNK_SIZE_BYTES=1, CLICKHOUSE_MAX_BLOCK_SIZE_DEFAULT=1):
+ await activity_environment.run(insert_into_s3_activity, insert_inputs)
+
+ assert len(heartbeat_details) > 0
+ detail = heartbeat_details[-1]
+ # we expect to have uploaded the file now
+ assert detail.files_uploaded == 1
+ assert detail.upload_state is None
+ assert len(detail.done_ranges) == 1
assert detail.done_ranges[0] == (data_interval_start, data_interval_end)
await assert_clickhouse_records_in_s3(
@@ -1574,6 +1899,8 @@ def track_hearbeat_details(*details):
team_id=ateam.pk,
data_interval_start=data_interval_start,
data_interval_end=data_interval_end,
+ # When we resume from a heartbeat, we expect duplicates (the last done range will be re-exported)
+ allow_duplicates=True,
)
@@ -1709,7 +2036,7 @@ def __init__(self, *args, **kwargs):
await WorkflowEnvironment.start_time_skipping() as activity_environment,
Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[S3BatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -1727,7 +2054,7 @@ def __init__(self, *args, **kwargs):
S3BatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=2),
execution_timeout=dt.timedelta(minutes=2),
)
diff --git a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py
index e99ef3f1ca350..cb6f352cb1d6f 100644
--- a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py
+++ b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py
@@ -15,7 +15,6 @@
import pytest_asyncio
import responses
import snowflake.connector
-from django.conf import settings
from django.test import override_settings
from requests.models import PreparedRequest
from temporalio import activity
@@ -25,6 +24,7 @@
from temporalio.testing import WorkflowEnvironment
from temporalio.worker import UnsandboxedWorkflowRunner, Worker
+from posthog import constants
from posthog.batch_exports.service import BatchExportModel, BatchExportSchema
from posthog.temporal.batch_exports.batch_exports import (
finish_batch_export_run,
@@ -423,7 +423,7 @@ async def test_snowflake_export_workflow_exports_events(
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[SnowflakeBatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -446,7 +446,7 @@ async def test_snowflake_export_workflow_exports_events(
inputs,
id=workflow_id,
execution_timeout=dt.timedelta(seconds=10),
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
)
@@ -467,7 +467,6 @@ async def test_snowflake_export_workflow_exports_events(
]
assert all(query.startswith("PUT") for query in execute_calls[0:9])
- assert all(f"_{n}.jsonl" in query for n, query in enumerate(execute_calls[0:9]))
assert execute_async_calls[3].startswith(f'CREATE TABLE IF NOT EXISTS "{table_name}"')
assert execute_async_calls[4].startswith(f"""REMOVE '@%"{table_name}"/{data_interval_end_str}'""")
@@ -495,7 +494,7 @@ async def test_snowflake_export_workflow_without_events(ateam, snowflake_batch_e
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[SnowflakeBatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -516,7 +515,7 @@ async def test_snowflake_export_workflow_without_events(ateam, snowflake_batch_e
SnowflakeBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
)
@@ -581,7 +580,7 @@ async def test_snowflake_export_workflow_raises_error_on_put_fail(
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[SnowflakeBatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -604,7 +603,7 @@ def __init__(self, *args, **kwargs):
SnowflakeBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
)
@@ -647,7 +646,7 @@ async def test_snowflake_export_workflow_raises_error_on_copy_fail(
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[SnowflakeBatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -670,7 +669,7 @@ def __init__(self, *args, **kwargs):
SnowflakeBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
)
@@ -698,7 +697,7 @@ async def insert_into_snowflake_activity_mocked(_: SnowflakeInsertInputs) -> str
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[SnowflakeBatchExportWorkflow],
activities=[
mocked_start_batch_export_run,
@@ -712,7 +711,7 @@ async def insert_into_snowflake_activity_mocked(_: SnowflakeInsertInputs) -> str
SnowflakeBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
)
@@ -745,7 +744,7 @@ class ForbiddenError(Exception):
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[SnowflakeBatchExportWorkflow],
activities=[
mocked_start_batch_export_run,
@@ -759,7 +758,7 @@ class ForbiddenError(Exception):
SnowflakeBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
)
@@ -794,7 +793,7 @@ async def never_finish_activity(_: SnowflakeInsertInputs) -> str:
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[SnowflakeBatchExportWorkflow],
activities=[
mocked_start_batch_export_run,
@@ -807,7 +806,7 @@ async def never_finish_activity(_: SnowflakeInsertInputs) -> str:
SnowflakeBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
)
await asyncio.sleep(5)
@@ -1287,7 +1286,7 @@ async def test_snowflake_export_workflow(
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[SnowflakeBatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -1300,7 +1299,7 @@ async def test_snowflake_export_workflow(
SnowflakeBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
execution_timeout=dt.timedelta(minutes=2),
)
@@ -1371,7 +1370,7 @@ async def test_snowflake_export_workflow_with_many_files(
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[SnowflakeBatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -1385,7 +1384,7 @@ async def test_snowflake_export_workflow_with_many_files(
SnowflakeBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
execution_timeout=dt.timedelta(minutes=2),
)
@@ -1456,7 +1455,7 @@ async def test_snowflake_export_workflow_backfill_earliest_persons(
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[SnowflakeBatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -1470,7 +1469,7 @@ async def test_snowflake_export_workflow_backfill_earliest_persons(
SnowflakeBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
execution_timeout=dt.timedelta(minutes=10),
)
@@ -1527,7 +1526,7 @@ async def test_snowflake_export_workflow_handles_cancellation(
async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
async with Worker(
activity_environment.client,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
workflows=[SnowflakeBatchExportWorkflow],
activities=[
start_batch_export_run,
@@ -1542,7 +1541,7 @@ async def test_snowflake_export_workflow_handles_cancellation(
SnowflakeBatchExportWorkflow.run,
inputs,
id=workflow_id,
- task_queue=settings.TEMPORAL_TASK_QUEUE,
+ task_queue=constants.BATCH_EXPORTS_TASK_QUEUE,
retry_policy=RetryPolicy(maximum_attempts=1),
)
@@ -1634,8 +1633,11 @@ def capture_heartbeat_details(*details):
@pytest.mark.parametrize(
"details",
[
- ([(dt.datetime.now().isoformat(), dt.datetime.now().isoformat())], 1),
- ([(dt.datetime.now().isoformat(), dt.datetime.now().isoformat())],),
+ ([(dt.datetime.now().isoformat(), dt.datetime.now().isoformat())], 10, 1),
+ (
+ [(dt.datetime.now().isoformat(), dt.datetime.now().isoformat())],
+ 10,
+ ),
],
)
def test_snowflake_heartbeat_details_parses_from_tuple(details):
@@ -1656,8 +1658,3 @@ def __init__(self):
dt.datetime.fromisoformat(expected_done_ranges[0][1]),
)
]
-
- if len(details) >= 2:
- assert snowflake_details.file_no == details[1]
- else:
- assert snowflake_details.file_no == 0
diff --git a/posthog/temporal/tests/batch_exports/test_spmc.py b/posthog/temporal/tests/batch_exports/test_spmc.py
index 7fd41dc15de28..566e15b4470aa 100644
--- a/posthog/temporal/tests/batch_exports/test_spmc.py
+++ b/posthog/temporal/tests/batch_exports/test_spmc.py
@@ -5,7 +5,7 @@
import pyarrow as pa
import pytest
-from posthog.temporal.batch_exports.spmc import Producer, RecordBatchQueue
+from posthog.temporal.batch_exports.spmc import Producer, RecordBatchQueue, slice_record_batch
from posthog.temporal.tests.utils.events import generate_test_events_in_clickhouse
pytestmark = [pytest.mark.asyncio, pytest.mark.django_db]
@@ -129,3 +129,36 @@ async def test_record_batch_producer_uses_extra_query_parameters(clickhouse_clie
raise ValueError("Empty properties")
assert record["custom_prop"] == expected["properties"]["custom"]
+
+
+def test_slice_record_batch_into_single_record_slices():
+ """Test we slice a record batch into slices with a single record."""
+ n_legs = pa.array([2, 2, 4, 4, 5, 100])
+ animals = pa.array(["Flamingo", "Parrot", "Dog", "Horse", "Brittle stars", "Centipede"])
+ batch = pa.RecordBatch.from_arrays([n_legs, animals], names=["n_legs", "animals"])
+
+ slices = list(slice_record_batch(batch, max_record_batch_size_bytes=1, min_records_per_batch=1))
+ assert len(slices) == 6
+ assert all(slice.num_rows == 1 for slice in slices)
+
+
+def test_slice_record_batch_into_one_batch():
+ """Test we do not slice a record batch without a bytes limit."""
+ n_legs = pa.array([2, 2, 4, 4, 5, 100])
+ animals = pa.array(["Flamingo", "Parrot", "Dog", "Horse", "Brittle stars", "Centipede"])
+ batch = pa.RecordBatch.from_arrays([n_legs, animals], names=["n_legs", "animals"])
+
+ slices = list(slice_record_batch(batch, max_record_batch_size_bytes=0))
+ assert len(slices) == 1
+ assert all(slice.num_rows == 6 for slice in slices)
+
+
+def test_slice_record_batch_in_half():
+ """Test we can slice a record batch into half size."""
+ n_legs = pa.array([4] * 6)
+ animals = pa.array(["Dog"] * 6)
+ batch = pa.RecordBatch.from_arrays([n_legs, animals], names=["n_legs", "animals"])
+
+ slices = list(slice_record_batch(batch, max_record_batch_size_bytes=batch.nbytes // 2, min_records_per_batch=1))
+ assert len(slices) == 2
+ assert all(slice.num_rows == 3 for slice in slices)
diff --git a/posthog/temporal/tests/data_imports/test_end_to_end.py b/posthog/temporal/tests/data_imports/test_end_to_end.py
index 5c20304085832..d87ae72861fe1 100644
--- a/posthog/temporal/tests/data_imports/test_end_to_end.py
+++ b/posthog/temporal/tests/data_imports/test_end_to_end.py
@@ -1,5 +1,6 @@
from concurrent.futures import ThreadPoolExecutor
import functools
+import os
import uuid
from typing import Any, Optional, cast
from unittest import mock
@@ -219,6 +220,17 @@ def mock_to_object_store_rs_credentials(class_self):
AIRBYTE_BUCKET_REGION="us-east-1",
AIRBYTE_BUCKET_DOMAIN="objectstorage:19000",
),
+ # Mock os.environ for the deltalake subprocess
+ mock.patch.dict(
+ os.environ,
+ {
+ "BUCKET_URL": f"s3://{BUCKET_NAME}",
+ "AIRBYTE_BUCKET_KEY": settings.OBJECT_STORAGE_ACCESS_KEY_ID,
+ "AIRBYTE_BUCKET_SECRET": settings.OBJECT_STORAGE_SECRET_ACCESS_KEY,
+ "AIRBYTE_BUCKET_REGION": "us-east-1",
+ "AIRBYTE_BUCKET_DOMAIN": "objectstorage:19000",
+ },
+ ),
mock.patch.object(AwsCredentials, "to_session_credentials", mock_to_session_credentials),
mock.patch.object(AwsCredentials, "to_object_store_rs_credentials", mock_to_object_store_rs_credentials),
mock.patch("posthog.temporal.data_imports.external_data_job.trigger_pipeline_v2"),
@@ -1174,3 +1186,50 @@ async def test_missing_source(team, stripe_balance_transaction):
assert exc.value.cause.cause.message == "Source or schema no longer exists - deleted temporal schedule"
mock_delete_external_data_schedule.assert_called()
+
+
+@pytest.mark.django_db(transaction=True)
+@pytest.mark.asyncio
+async def test_postgres_nan_numerical_values(team, postgres_config, postgres_connection):
+ await postgres_connection.execute(
+ "CREATE TABLE IF NOT EXISTS {schema}.numerical_nan (id integer, nan_column numeric)".format(
+ schema=postgres_config["schema"]
+ )
+ )
+ await postgres_connection.execute(
+ "INSERT INTO {schema}.numerical_nan (id, nan_column) VALUES (1, 'NaN'::numeric)".format(
+ schema=postgres_config["schema"]
+ )
+ )
+ await postgres_connection.commit()
+
+ await _run(
+ team=team,
+ schema_name="numerical_nan",
+ table_name="postgres_numerical_nan",
+ source_type="Postgres",
+ job_inputs={
+ "host": postgres_config["host"],
+ "port": postgres_config["port"],
+ "database": postgres_config["database"],
+ "user": postgres_config["user"],
+ "password": postgres_config["password"],
+ "schema": postgres_config["schema"],
+ "ssh_tunnel_enabled": "False",
+ },
+ mock_data_response=[],
+ )
+
+ if settings.TEMPORAL_TASK_QUEUE == DATA_WAREHOUSE_TASK_QUEUE:
+ res = await sync_to_async(execute_hogql_query)(f"SELECT * FROM postgres_numerical_nan", team)
+ columns = res.columns
+ results = res.results
+
+ assert columns is not None
+ assert len(columns) == 2
+ assert columns[0] == "id"
+ assert columns[1] == "nan_column"
+
+ assert results is not None
+ assert len(results) == 1
+ assert results[0] == (1, None)
diff --git a/posthog/warehouse/api/external_data_source.py b/posthog/warehouse/api/external_data_source.py
index 28b1ebda1bf2e..47cf23d80b38c 100644
--- a/posthog/warehouse/api/external_data_source.py
+++ b/posthog/warehouse/api/external_data_source.py
@@ -299,6 +299,13 @@ def create(self, request: Request, *args: Any, **kwargs: Any) -> Response:
data={"message": "Monthly sync limit reached. Please increase your billing limit to resume syncing."},
)
+ # Strip leading and trailing whitespace
+ payload = request.data["payload"]
+ if payload is not None:
+ for key, value in payload.items():
+ if isinstance(value, str):
+ payload[key] = value.strip()
+
# TODO: remove dummy vars
if source_type == ExternalDataSource.Type.STRIPE:
new_source_model = self._handle_stripe_source(request, *args, **kwargs)
diff --git a/posthog/warehouse/api/test/test_external_data_source.py b/posthog/warehouse/api/test/test_external_data_source.py
index 3fede72455ebd..bc60e36459f7a 100644
--- a/posthog/warehouse/api/test/test_external_data_source.py
+++ b/posthog/warehouse/api/test/test_external_data_source.py
@@ -810,3 +810,27 @@ def test_source_jobs_pagination(self):
assert response.status_code, status.HTTP_200_OK
assert len(data) == 1
assert data[0]["id"] == str(job3.pk)
+
+ def test_trimming_payload(self):
+ response = self.client.post(
+ f"/api/projects/{self.team.pk}/external_data_sources/",
+ data={
+ "source_type": "Stripe",
+ "payload": {
+ "client_secret": " sk_test_123 ",
+ "account_id": " blah ",
+ "schemas": [
+ {"name": "BalanceTransaction", "should_sync": True, "sync_type": "full_refresh"},
+ ],
+ },
+ },
+ )
+ payload = response.json()
+
+ assert response.status_code == 201
+
+ source = ExternalDataSource.objects.get(id=payload["id"])
+ assert source.job_inputs is not None
+
+ assert source.job_inputs["stripe_secret_key"] == "sk_test_123"
+ assert source.job_inputs["stripe_account_id"] == "blah"
diff --git a/requirements-dev.txt b/requirements-dev.txt
index d005fd81fb9f7..cc0e410715cd7 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -167,6 +167,10 @@ googleapis-common-protos==1.60.0
# via
# -c requirements.txt
# opentelemetry-exporter-otlp-proto-grpc
+greenlet==3.1.1
+ # via
+ # -c requirements.txt
+ # sqlalchemy
grpcio==1.63.2
# via
# -c requirements.txt
diff --git a/requirements.in b/requirements.in
index c4564b0f1ecaf..5ba89d26f6b5c 100644
--- a/requirements.in
+++ b/requirements.in
@@ -12,6 +12,7 @@ boto3==1.28.16
brotli==1.1.0
celery==5.3.4
celery-redbeat==2.1.1
+clickhouse-connect==0.8.11
clickhouse-driver==0.2.7
clickhouse-pool==0.5.3
conditional-cache==1.2
diff --git a/requirements.txt b/requirements.txt
index ec758cf47f4ca..1c9c3e14c7ceb 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -80,6 +80,7 @@ celery-redbeat==2.1.1
# via -r requirements.in
certifi==2019.11.28
# via
+ # clickhouse-connect
# httpcore
# httpx
# requests
@@ -110,6 +111,8 @@ click-plugins==1.1.1
# via celery
click-repl==0.3.0
# via celery
+clickhouse-connect==0.8.11
+ # via -r requirements.in
clickhouse-driver==0.2.7
# via
# -r requirements.in
@@ -270,6 +273,8 @@ googleapis-common-protos==1.60.0
# via
# google-api-core
# grpcio-status
+greenlet==3.1.1
+ # via sqlalchemy
grpcio==1.63.2
# via
# -r requirements.in
@@ -378,6 +383,8 @@ lxml==4.9.4
# toronado
# xmlsec
# zeep
+lz4==4.3.3
+ # via clickhouse-connect
lzstring==1.0.4
# via -r requirements.in
makefun==1.15.2
@@ -562,6 +569,7 @@ python3-saml==1.12.0
pytz==2023.3
# via
# -r requirements.in
+ # clickhouse-connect
# clickhouse-driver
# dlt
# infi-clickhouse-orm
@@ -776,6 +784,7 @@ uritemplate==4.1.1
urllib3==1.26.18
# via
# botocore
+ # clickhouse-connect
# geoip2
# google-auth
# pdpyras
@@ -811,6 +820,8 @@ yarl==1.18.3
# via aiohttp
zeep==4.2.1
# via simple-salesforce
+zstandard==0.23.0
+ # via clickhouse-connect
zstd==1.5.5.1
# via -r requirements.in
zxcvbn==4.4.28
diff --git a/rust/.sqlx/query-04abdef9c07ae1a30bb6f22abcfb4dcdf2e218e48e0fd8a247e1b7ae0f04aee3.json b/rust/.sqlx/query-04abdef9c07ae1a30bb6f22abcfb4dcdf2e218e48e0fd8a247e1b7ae0f04aee3.json
new file mode 100644
index 0000000000000..5c8b96e695c28
--- /dev/null
+++ b/rust/.sqlx/query-04abdef9c07ae1a30bb6f22abcfb4dcdf2e218e48e0fd8a247e1b7ae0f04aee3.json
@@ -0,0 +1,21 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n INSERT INTO posthog_propertydefinition (id, name, type, group_type_index, is_numerical, volume_30_day, query_usage_30_day, team_id, project_id, property_type)\n VALUES ($1, $2, $3, $4, $5, NULL, NULL, $6, $7, $8)\n ON CONFLICT (team_id, name, type, coalesce(group_type_index, -1))\n DO UPDATE SET property_type=EXCLUDED.property_type WHERE posthog_propertydefinition.property_type IS NULL\n ",
+ "describe": {
+ "columns": [],
+ "parameters": {
+ "Left": [
+ "Uuid",
+ "Varchar",
+ "Int2",
+ "Int2",
+ "Bool",
+ "Int4",
+ "Int8",
+ "Varchar"
+ ]
+ },
+ "nullable": []
+ },
+ "hash": "04abdef9c07ae1a30bb6f22abcfb4dcdf2e218e48e0fd8a247e1b7ae0f04aee3"
+}
diff --git a/rust/.sqlx/query-2b9a8c4b8d323e1673d805125b4073799ecba84594ca04cfb24481cffbf6f6ca.json b/rust/.sqlx/query-2b9a8c4b8d323e1673d805125b4073799ecba84594ca04cfb24481cffbf6f6ca.json
new file mode 100644
index 0000000000000..785a13a6d1ce7
--- /dev/null
+++ b/rust/.sqlx/query-2b9a8c4b8d323e1673d805125b4073799ecba84594ca04cfb24481cffbf6f6ca.json
@@ -0,0 +1,18 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n INSERT INTO posthog_eventdefinition (id, name, volume_30_day, query_usage_30_day, team_id, project_id, last_seen_at, created_at)\n VALUES ($1, $2, NULL, NULL, $3, $4, $5, NOW()) ON CONFLICT\n ON CONSTRAINT posthog_eventdefinition_team_id_name_80fa0b87_uniq\n DO UPDATE SET last_seen_at = $5\n ",
+ "describe": {
+ "columns": [],
+ "parameters": {
+ "Left": [
+ "Uuid",
+ "Varchar",
+ "Int4",
+ "Int8",
+ "Timestamptz"
+ ]
+ },
+ "nullable": []
+ },
+ "hash": "2b9a8c4b8d323e1673d805125b4073799ecba84594ca04cfb24481cffbf6f6ca"
+}
diff --git a/rust/.sqlx/query-917e3d14c15558a1e0bb1d7015ed687eb545ee9d4ccbb8b69c958a357d49f687.json b/rust/.sqlx/query-917e3d14c15558a1e0bb1d7015ed687eb545ee9d4ccbb8b69c958a357d49f687.json
deleted file mode 100644
index 6f3e42a0a8b9b..0000000000000
--- a/rust/.sqlx/query-917e3d14c15558a1e0bb1d7015ed687eb545ee9d4ccbb8b69c958a357d49f687.json
+++ /dev/null
@@ -1,17 +0,0 @@
-{
- "db_name": "PostgreSQL",
- "query": "\n INSERT INTO posthog_eventdefinition (id, name, volume_30_day, query_usage_30_day, team_id, last_seen_at, created_at)\n VALUES ($1, $2, NULL, NULL, $3, $4, NOW()) ON CONFLICT\n ON CONSTRAINT posthog_eventdefinition_team_id_name_80fa0b87_uniq\n DO UPDATE SET last_seen_at = $4\n ",
- "describe": {
- "columns": [],
- "parameters": {
- "Left": [
- "Uuid",
- "Varchar",
- "Int4",
- "Timestamptz"
- ]
- },
- "nullable": []
- },
- "hash": "917e3d14c15558a1e0bb1d7015ed687eb545ee9d4ccbb8b69c958a357d49f687"
-}
diff --git a/rust/.sqlx/query-42e393046a686e6a69daa920dc2ab521aa6f393027c399a0c40139f5f8a0a45e.json b/rust/.sqlx/query-9e0e25b9966a23792427c27a80888a75efdb8abe195339e0a1676ebed6fc61ef.json
similarity index 57%
rename from rust/.sqlx/query-42e393046a686e6a69daa920dc2ab521aa6f393027c399a0c40139f5f8a0a45e.json
rename to rust/.sqlx/query-9e0e25b9966a23792427c27a80888a75efdb8abe195339e0a1676ebed6fc61ef.json
index 890675aa24d0d..f2582dca5c9b4 100644
--- a/rust/.sqlx/query-42e393046a686e6a69daa920dc2ab521aa6f393027c399a0c40139f5f8a0a45e.json
+++ b/rust/.sqlx/query-9e0e25b9966a23792427c27a80888a75efdb8abe195339e0a1676ebed6fc61ef.json
@@ -1,16 +1,17 @@
{
"db_name": "PostgreSQL",
- "query": "INSERT INTO posthog_eventproperty (event, property, team_id) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING",
+ "query": "INSERT INTO posthog_eventproperty (event, property, team_id, project_id) VALUES ($1, $2, $3, $4) ON CONFLICT DO NOTHING",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Varchar",
"Varchar",
- "Int4"
+ "Int4",
+ "Int8"
]
},
"nullable": []
},
- "hash": "42e393046a686e6a69daa920dc2ab521aa6f393027c399a0c40139f5f8a0a45e"
+ "hash": "9e0e25b9966a23792427c27a80888a75efdb8abe195339e0a1676ebed6fc61ef"
}
diff --git a/rust/.sqlx/query-eecef0ce664dfe65dff4452d92a29c948a291ea8218bbbb4e25cd1ad36dbe9f4.json b/rust/.sqlx/query-eecef0ce664dfe65dff4452d92a29c948a291ea8218bbbb4e25cd1ad36dbe9f4.json
deleted file mode 100644
index a54bb9565ea4f..0000000000000
--- a/rust/.sqlx/query-eecef0ce664dfe65dff4452d92a29c948a291ea8218bbbb4e25cd1ad36dbe9f4.json
+++ /dev/null
@@ -1,20 +0,0 @@
-{
- "db_name": "PostgreSQL",
- "query": "\n INSERT INTO posthog_propertydefinition (id, name, type, group_type_index, is_numerical, volume_30_day, query_usage_30_day, team_id, property_type)\n VALUES ($1, $2, $3, $4, $5, NULL, NULL, $6, $7)\n ON CONFLICT (team_id, name, type, coalesce(group_type_index, -1))\n DO UPDATE SET property_type=EXCLUDED.property_type WHERE posthog_propertydefinition.property_type IS NULL\n ",
- "describe": {
- "columns": [],
- "parameters": {
- "Left": [
- "Uuid",
- "Varchar",
- "Int2",
- "Int2",
- "Bool",
- "Int4",
- "Varchar"
- ]
- },
- "nullable": []
- },
- "hash": "eecef0ce664dfe65dff4452d92a29c948a291ea8218bbbb4e25cd1ad36dbe9f4"
-}
diff --git a/rust/common/kafka/src/config.rs b/rust/common/kafka/src/config.rs
index 81ef7402ab429..8096efce9d6f4 100644
--- a/rust/common/kafka/src/config.rs
+++ b/rust/common/kafka/src/config.rs
@@ -25,6 +25,10 @@ pub struct KafkaConfig {
pub struct ConsumerConfig {
pub kafka_consumer_group: String,
pub kafka_consumer_topic: String,
+
+ // We default to "earliest" for this, but if you're bringing up a new service, you probably want "latest"
+ #[envconfig(default = "earliest")]
+ pub kafka_consumer_offset_reset: String, // earliest, latest
}
impl ConsumerConfig {
diff --git a/rust/common/kafka/src/kafka_consumer.rs b/rust/common/kafka/src/kafka_consumer.rs
index 79c3be7f986d5..1dcbf21a206a1 100644
--- a/rust/common/kafka/src/kafka_consumer.rs
+++ b/rust/common/kafka/src/kafka_consumer.rs
@@ -47,7 +47,11 @@ impl SingleTopicConsumer {
client_config
.set("bootstrap.servers", &common_config.kafka_hosts)
.set("statistics.interval.ms", "10000")
- .set("group.id", consumer_config.kafka_consumer_group);
+ .set("group.id", consumer_config.kafka_consumer_group)
+ .set(
+ "auto.offset.reset",
+ &consumer_config.kafka_consumer_offset_reset,
+ );
client_config.set("enable.auto.offset.store", "false");
diff --git a/rust/cymbal/src/frames/resolver.rs b/rust/cymbal/src/frames/resolver.rs
index c1d4fc34633f4..01b065a038fa8 100644
--- a/rust/cymbal/src/frames/resolver.rs
+++ b/rust/cymbal/src/frames/resolver.rs
@@ -6,6 +6,7 @@ use sqlx::PgPool;
use crate::{
config::Config,
error::UnhandledError,
+ metric_consts::{FRAME_CACHE_HITS, FRAME_CACHE_MISSES, FRAME_DB_HITS},
symbol_store::{saving::SymbolSetRecord, Catalog},
};
@@ -35,13 +36,16 @@ impl Resolver {
catalog: &Catalog,
) -> Result {
if let Some(result) = self.cache.get(&frame.frame_id()) {
+ metrics::counter!(FRAME_CACHE_HITS).increment(1);
return Ok(result.contents);
}
+ metrics::counter!(FRAME_CACHE_MISSES).increment(1);
if let Some(result) =
ErrorTrackingStackFrame::load(pool, team_id, &frame.frame_id(), self.result_ttl).await?
{
self.cache.insert(frame.frame_id(), result.clone());
+ metrics::counter!(FRAME_DB_HITS).increment(1);
return Ok(result.contents);
}
diff --git a/rust/cymbal/src/hack/js_data.rs b/rust/cymbal/src/hack/js_data.rs
index cb5ed93187092..d698829bfcb8f 100644
--- a/rust/cymbal/src/hack/js_data.rs
+++ b/rust/cymbal/src/hack/js_data.rs
@@ -4,7 +4,7 @@ use thiserror::Error;
use crate::symbol_store::sourcemap::OwnedSourceMapCache;
-// NOTE: see psothog/api/error_tracking.py
+// NOTE: see posthog/api/error_tracking.py
pub struct JsData {
data: Vec,
// For legacy reasons, before we has this serialisation format,
diff --git a/rust/cymbal/src/hack/kafka.rs b/rust/cymbal/src/hack/kafka.rs
index cb26faede2165..977928af79b80 100644
--- a/rust/cymbal/src/hack/kafka.rs
+++ b/rust/cymbal/src/hack/kafka.rs
@@ -44,6 +44,10 @@ pub struct KafkaConfig {
pub struct ConsumerConfig {
pub kafka_consumer_group: String,
pub kafka_consumer_topic: String,
+
+ // We default to "earliest" for this, but if you're bringing up a new service, you probably want "latest"
+ #[envconfig(default = "earliest")]
+ pub kafka_consumer_offset_reset: String, // earliest, latest
}
impl ConsumerConfig {
@@ -97,7 +101,11 @@ impl SingleTopicConsumer {
client_config
.set("bootstrap.servers", &common_config.kafka_hosts)
.set("statistics.interval.ms", "10000")
- .set("group.id", consumer_config.kafka_consumer_group);
+ .set("group.id", consumer_config.kafka_consumer_group)
+ .set(
+ "auto.offset.reset",
+ &consumer_config.kafka_consumer_offset_reset,
+ );
client_config.set("enable.auto.offset.store", "false");
diff --git a/rust/cymbal/src/issue_resolution.rs b/rust/cymbal/src/issue_resolution.rs
index b80c8d17d9b36..e1656ffb367a0 100644
--- a/rust/cymbal/src/issue_resolution.rs
+++ b/rust/cymbal/src/issue_resolution.rs
@@ -3,6 +3,7 @@ use uuid::Uuid;
use crate::{
error::UnhandledError,
+ metric_consts::ISSUE_CREATED,
types::{FingerprintedErrProps, OutputErrProps},
};
@@ -74,12 +75,17 @@ impl Issue {
self.description
)
.fetch_one(executor)
- .await?;
-
+ .await?
// TODO - I'm fairly sure the Option here is a bug in sqlx, so the unwrap will
// never be hit, but nonetheless I'm not 100% sure the "no rows" case actually
// means the insert was not done.
- Ok(did_insert.unwrap_or(false))
+ .unwrap_or(false);
+
+ if did_insert {
+ metrics::counter!(ISSUE_CREATED).increment(1);
+ }
+
+ Ok(did_insert)
}
}
diff --git a/rust/cymbal/src/lib.rs b/rust/cymbal/src/lib.rs
index ed5720148fc9c..ba3067359d3ad 100644
--- a/rust/cymbal/src/lib.rs
+++ b/rust/cymbal/src/lib.rs
@@ -5,6 +5,7 @@ use common_types::ClickHouseEvent;
use error::{EventError, UnhandledError};
use fingerprinting::generate_fingerprint;
use issue_resolution::resolve_issue;
+use metric_consts::FRAME_RESOLUTION;
use tracing::warn;
use types::{Exception, RawErrProps, Stacktrace};
@@ -114,6 +115,7 @@ async fn process_exception(
// thrown at the wall), with some cross-group concurrency.
handles.push(tokio::spawn(async move {
context.worker_liveness.report_healthy().await;
+ metrics::counter!(FRAME_RESOLUTION).increment(1);
let res = context
.resolver
.resolve(&frame, team_id, &context.pool, &context.catalog)
diff --git a/rust/cymbal/src/main.rs b/rust/cymbal/src/main.rs
index 2146c4f46ee6d..ce494b72f05fd 100644
--- a/rust/cymbal/src/main.rs
+++ b/rust/cymbal/src/main.rs
@@ -8,7 +8,7 @@ use cymbal::{
config::Config,
hack::kafka::{send_keyed_iter_to_kafka, RecvErr},
handle_event,
- metric_consts::{ERRORS, EVENT_RECEIVED, MAIN_LOOP_TIME, STACK_PROCESSED},
+ metric_consts::{ERRORS, EVENT_PROCESSED, EVENT_RECEIVED, MAIN_LOOP_TIME},
};
use envconfig::Envconfig;
use tokio::task::JoinHandle;
@@ -100,6 +100,8 @@ async fn main() {
}
};
+ metrics::counter!(EVENT_PROCESSED).increment(1);
+
output.push(event);
offsets.push(offset);
}
@@ -117,7 +119,6 @@ async fn main() {
offset.store().unwrap();
}
- metrics::counter!(STACK_PROCESSED).increment(1);
whole_loop.label("finished", "true").fin();
}
}
diff --git a/rust/cymbal/src/metric_consts.rs b/rust/cymbal/src/metric_consts.rs
index d0ec3cdab31b3..51fece1237680 100644
--- a/rust/cymbal/src/metric_consts.rs
+++ b/rust/cymbal/src/metric_consts.rs
@@ -1,7 +1,6 @@
pub const ERRORS: &str = "cymbal_errors";
pub const EVENT_RECEIVED: &str = "cymbal_event_received";
-pub const STACK_PROCESSED: &str = "cymbal_stack_track_processed";
-pub const BASIC_FETCHES: &str = "cymbal_basic_fetches";
+pub const EVENT_PROCESSED: &str = "cymbal_event_processed";
pub const SOURCEMAP_HEADER_FOUND: &str = "cymbal_sourcemap_header_found";
pub const SOURCEMAP_BODY_REF_FOUND: &str = "cymbal_sourcemap_body_ref_found";
pub const SOURCEMAP_NOT_FOUND: &str = "cymbal_sourcemap_not_found";
@@ -9,21 +8,26 @@ pub const SOURCEMAP_BODY_FETCHES: &str = "cymbal_sourcemap_body_fetches";
pub const STORE_CACHE_HITS: &str = "cymbal_store_cache_hits";
pub const STORE_CACHE_MISSES: &str = "cymbal_store_cache_misses";
pub const STORE_CACHED_BYTES: &str = "cymbal_store_cached_bytes";
-pub const STORE_CACHE_SIZE: &str = "cymbal_store_cache_size";
pub const STORE_CACHE_EVICTIONS: &str = "cymbal_store_cache_evictions";
pub const STORE_CACHE_EVICTION_RUNS: &str = "cymbal_store_cache_eviction_runs";
pub const MAIN_LOOP_TIME: &str = "cymbal_main_loop_time";
pub const PER_FRAME_TIME: &str = "cymbal_per_frame_time";
-pub const PER_STACK_TIME: &str = "cymbal_per_stack_time";
-pub const PER_FRAME_GROUP_TIME: &str = "cymbal_per_frame_group_time";
+pub const SYMBOL_SET_DB_FETCHES: &str = "cymbal_symbol_set_db_fetches";
+pub const SYMBOL_SET_DB_HITS: &str = "cymbal_symbol_set_db_hits";
+pub const SYMBOL_SET_DB_MISSES: &str = "cymbal_symbol_set_db_misses";
pub const SYMBOL_SET_SAVED: &str = "cymbal_symbol_set_saved";
pub const SAVED_SYMBOL_SET_LOADED: &str = "cymbal_saved_symbol_set_loaded";
pub const SAVED_SYMBOL_SET_ERROR_RETURNED: &str = "cymbal_saved_symbol_set_error_returned";
pub const SYMBOL_SET_FETCH_RETRY: &str = "cymbal_symbol_set_fetch_retry";
+pub const FRAME_RESOLUTION: &str = "cymbal_frame_resolution";
pub const FRAME_RESOLVED: &str = "cymbal_frame_resolved";
+pub const FRAME_CACHE_HITS: &str = "cymbal_frame_cache_hits";
+pub const FRAME_CACHE_MISSES: &str = "cymbal_frame_cache_misses";
+pub const FRAME_DB_HITS: &str = "cymbal_frame_db_hits";
pub const FRAME_NOT_RESOLVED: &str = "cymbal_frame_not_resolved";
pub const S3_FETCH: &str = "cymbal_s3_fetch";
pub const S3_PUT: &str = "cymbal_s3_put";
pub const SOURCEMAP_FETCH: &str = "cymbal_sourcemap_fetch";
pub const SAVE_SYMBOL_SET: &str = "cymbal_save_symbol_set";
pub const SOURCEMAP_PARSE: &str = "cymbal_sourcemap_parse";
+pub const ISSUE_CREATED: &str = "cymbal_issue_created";
diff --git a/rust/cymbal/src/symbol_store/saving.rs b/rust/cymbal/src/symbol_store/saving.rs
index eed2144669a0a..27cf9394b67ad 100644
--- a/rust/cymbal/src/symbol_store/saving.rs
+++ b/rust/cymbal/src/symbol_store/saving.rs
@@ -10,7 +10,8 @@ use crate::{
error::{Error, FrameError, UnhandledError},
metric_consts::{
SAVED_SYMBOL_SET_ERROR_RETURNED, SAVED_SYMBOL_SET_LOADED, SAVE_SYMBOL_SET,
- SYMBOL_SET_FETCH_RETRY, SYMBOL_SET_SAVED,
+ SYMBOL_SET_DB_FETCHES, SYMBOL_SET_DB_HITS, SYMBOL_SET_DB_MISSES, SYMBOL_SET_FETCH_RETRY,
+ SYMBOL_SET_SAVED,
},
};
@@ -136,7 +137,10 @@ where
async fn fetch(&self, team_id: i32, r: Self::Ref) -> Result {
let set_ref = r.to_string();
info!("Fetching symbol set data for {}", set_ref);
+ metrics::counter!(SYMBOL_SET_DB_FETCHES).increment(1);
+
if let Some(record) = SymbolSetRecord::load(&self.pool, team_id, &set_ref).await? {
+ metrics::counter!(SYMBOL_SET_DB_HITS).increment(1);
if let Some(storage_ptr) = record.storage_ptr {
info!("Found s3 saved symbol set data for {}", set_ref);
let data = self.s3_client.get(&self.bucket, &storage_ptr).await?;
@@ -171,6 +175,8 @@ where
metrics::counter!(SYMBOL_SET_FETCH_RETRY).increment(1);
}
+ metrics::counter!(SYMBOL_SET_DB_MISSES).increment(1);
+
match self.inner.fetch(team_id, r).await {
// NOTE: We don't save the data here, because we want to save it only after parsing
Ok(data) => {
diff --git a/rust/property-defs-rs/src/app_context.rs b/rust/property-defs-rs/src/app_context.rs
index 7492609bdeb55..f83484f9b3938 100644
--- a/rust/property-defs-rs/src/app_context.rs
+++ b/rust/property-defs-rs/src/app_context.rs
@@ -7,8 +7,8 @@ use tracing::warn;
use crate::{
config::Config,
metrics_consts::{
- CACHE_WARMING_STATE, GROUP_TYPE_READS, GROUP_TYPE_RESOLVE_TIME, UPDATES_ISSUED,
- UPDATE_TRANSACTION_TIME,
+ CACHE_WARMING_STATE, GROUP_TYPE_READS, GROUP_TYPE_RESOLVE_TIME, SINGLE_UPDATE_ISSUE_TIME,
+ UPDATES_SKIPPED, UPDATE_TRANSACTION_TIME,
},
types::{GroupType, Update},
};
@@ -61,8 +61,6 @@ impl AppContext {
metrics::gauge!(CACHE_WARMING_STATE, &[("state", "hot")]).set(1.0);
}
- let update_count = updates.len();
-
let group_type_resolve_time = common_metrics::timing_guard(GROUP_TYPE_RESOLVE_TIME, &[]);
self.resolve_group_types_indexes(updates).await?;
group_type_resolve_time.fin();
@@ -72,25 +70,30 @@ impl AppContext {
let mut tx = self.pool.begin().await?;
for update in updates {
+ let issue_time = common_metrics::timing_guard(SINGLE_UPDATE_ISSUE_TIME, &[]);
match update.issue(&mut *tx).await {
- Ok(_) => {}
+ Ok(_) => issue_time.label("outcome", "success"),
Err(sqlx::Error::Database(e)) if e.constraint().is_some() => {
// If we hit a constraint violation, we just skip the update. We see
// this in production for group-type-indexes not being resolved, and it's
// not worth aborting the whole batch for.
+ metrics::counter!(UPDATES_SKIPPED, &[("reason", "constraint_violation")])
+ .increment(1);
warn!("Failed to issue update: {:?}", e);
+ issue_time.label("outcome", "skipped")
}
Err(e) => {
tx.rollback().await?;
+ issue_time.label("outcome", "abort");
return Err(e);
}
}
+ .fin();
}
tx.commit().await?;
}
transaction_time.fin();
- metrics::counter!(UPDATES_ISSUED).increment(update_count as u64);
Ok(())
}
diff --git a/rust/property-defs-rs/src/metrics_consts.rs b/rust/property-defs-rs/src/metrics_consts.rs
index 4b29b615c4b47..d0dd725df0e7a 100644
--- a/rust/property-defs-rs/src/metrics_consts.rs
+++ b/rust/property-defs-rs/src/metrics_consts.rs
@@ -22,3 +22,4 @@ pub const SKIPPED_DUE_TO_TEAM_FILTER: &str = "prop_defs_skipped_due_to_team_filt
pub const ISSUE_FAILED: &str = "prop_defs_issue_failed";
pub const CHUNK_SIZE: &str = "prop_defs_chunk_size";
pub const DUPLICATES_IN_BATCH: &str = "prop_defs_duplicates_in_batch";
+pub const SINGLE_UPDATE_ISSUE_TIME: &str = "prop_defs_single_update_issue_time_ms";
diff --git a/rust/property-defs-rs/src/types.rs b/rust/property-defs-rs/src/types.rs
index d437c62849f0e..3b741add5f317 100644
--- a/rust/property-defs-rs/src/types.rs
+++ b/rust/property-defs-rs/src/types.rs
@@ -7,7 +7,7 @@ use sqlx::{Executor, Postgres};
use tracing::warn;
use uuid::Uuid;
-use crate::metrics_consts::{EVENTS_SKIPPED, UPDATES_SKIPPED};
+use crate::metrics_consts::{EVENTS_SKIPPED, UPDATES_ISSUED, UPDATES_SKIPPED};
// We skip updates for events we generate
pub const EVENTS_WITHOUT_PROPERTIES: [&str; 1] = ["$$plugin_metrics"];
@@ -424,7 +424,7 @@ impl EventDefinition {
where
E: Executor<'c, Database = Postgres>,
{
- sqlx::query!(
+ let res = sqlx::query!(
r#"
INSERT INTO posthog_eventdefinition (id, name, volume_30_day, query_usage_30_day, team_id, project_id, last_seen_at, created_at)
VALUES ($1, $2, NULL, NULL, $3, $4, $5, NOW()) ON CONFLICT
@@ -436,7 +436,11 @@ impl EventDefinition {
self.team_id,
self.project_id,
Utc::now() // We floor the update datetime to the nearest day for cache purposes, but can insert the exact time we see the event
- ).execute(executor).await.map(|_| ())
+ ).execute(executor).await.map(|_| ());
+
+ metrics::counter!(UPDATES_ISSUED, &[("type", "event_definition")]).increment(1);
+
+ res
}
}
@@ -468,7 +472,7 @@ impl PropertyDefinition {
return Ok(());
}
- sqlx::query!(
+ let res = sqlx::query!(
r#"
INSERT INTO posthog_propertydefinition (id, name, type, group_type_index, is_numerical, volume_30_day, query_usage_30_day, team_id, project_id, property_type)
VALUES ($1, $2, $3, $4, $5, NULL, NULL, $6, $7, $8)
@@ -483,7 +487,11 @@ impl PropertyDefinition {
self.team_id,
self.project_id,
self.property_type.as_ref().map(|t| t.to_string())
- ).execute(executor).await.map(|_| ())
+ ).execute(executor).await.map(|_| ());
+
+ metrics::counter!(UPDATES_ISSUED, &[("type", "property_definition")]).increment(1);
+
+ res
}
}
@@ -492,7 +500,7 @@ impl EventProperty {
where
E: Executor<'c, Database = Postgres>,
{
- sqlx::query!(
+ let res = sqlx::query!(
r#"INSERT INTO posthog_eventproperty (event, property, team_id, project_id) VALUES ($1, $2, $3, $4) ON CONFLICT DO NOTHING"#,
self.event,
self.property,
@@ -501,7 +509,11 @@ impl EventProperty {
)
.execute(executor)
.await
- .map(|_| ())
+ .map(|_| ());
+
+ metrics::counter!(UPDATES_ISSUED, &[("type", "event_property")]).increment(1);
+
+ res
}
}