Skip to content

Commit

Permalink
Merge branch 'master' into livestream-host
Browse files Browse the repository at this point in the history
  • Loading branch information
Yiidiir authored Feb 8, 2025
2 parents d98d405 + 92c68a3 commit d848ae0
Show file tree
Hide file tree
Showing 2,185 changed files with 15,255 additions and 7,401 deletions.
424 changes: 204 additions & 220 deletions .flox/env/manifest.lock

Large diffs are not rendered by default.

59 changes: 57 additions & 2 deletions .github/actions/build-n-cache-image/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,26 @@ inputs:
required: false
default: 'false'
description: Whether to save the image in the Depot ephemeral registry after building it
push-image:
required: false
default: false
description: Whether to push the built image - requires aws-access-key and aws-access-secret
aws-access-key:
required: false
description: AWS key to log into ECR (required when push-image is true)
aws-access-secret:
required: false
description: AWS secret to log into ECR (required when push-image is true)
dockerhub-username:
required: false
description: Dockerhub username for pushing image
dockerhub-password:
required: false
description: Dockerhub password for pushing image
pr-number:
required: false
default: ''
description: PR number for tagging the image

outputs:
tag:
Expand All @@ -23,15 +43,49 @@ runs:
- name: Checkout
uses: actions/checkout@v4

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

- name: Set up QEMU
uses: docker/setup-qemu-action@v3

- name: Set up Depot CLI
uses: depot/setup-action@v1

- name: Configure AWS credentials
if: ${{ inputs.push-image == 'true' }}
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ inputs.aws-access-key }}
aws-secret-access-key: ${{ inputs.aws-access-secret }}
aws-region: us-east-1

- name: Login to DockerHub
if: ${{ inputs.push-image == 'true' }}
uses: docker/login-action@v3
with:
username: ${{ inputs.dockerhub-username }}
password: ${{ inputs.dockerhub-password }}

- name: Login to Amazon ECR
if: ${{ inputs.push-image == 'true' }}
id: aws-ecr
uses: aws-actions/amazon-ecr-login@v2

- name: Emit image tag
id: emit
shell: bash
run: echo "tag=posthog/posthog:${{ github.sha }}" >> $GITHUB_OUTPUT
run: |
TAGS="posthog/posthog:${{ github.sha }}"
if [[ "${{ inputs.push-image }}" == "true" ]]; then
TAGS="$TAGS,${{ steps.aws-ecr.outputs.registry }}/posthog-cloud:pr-commit-${{ github.sha }}"
fi
if [[ "${{ inputs.pr-number }}" != "" ]]; then
TAGS="$TAGS,${{ steps.aws-ecr.outputs.registry }}/posthog-cloud:pr-${{ inputs.pr-number }}"
fi
echo "tag=$TAGS" >> $GITHUB_OUTPUT
- name: Build image # We don't push this because we use Depot cache as the communication channel
- name: Build image
id: build
uses: depot/build-push-action@v1
with:
Expand All @@ -41,5 +95,6 @@ runs:
platforms: linux/amd64,linux/arm64
build-args: COMMIT_HASH=${{ github.sha }}
save: ${{ inputs.save }}
push: ${{ inputs.push-image }}
env:
ACTIONS_ID_TOKEN_REQUEST_URL: ${{ inputs.actions-id-token-request-url }}
6 changes: 6 additions & 0 deletions .github/workflows/container-images-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,12 @@ jobs:
uses: ./.github/actions/build-n-cache-image
with:
actions-id-token-request-url: ${{ env.ACTIONS_ID_TOKEN_REQUEST_URL }}
push-image: true
aws-access-key: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-access-secret: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
dockerhub-username: ${{ secrets.DOCKERHUB_USER }}
dockerhub-password: ${{ secrets.DOCKERHUB_TOKEN }}
pr-number: ${{ github.event.number }}

deploy_preview:
name: Deploy preview environment
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/rust.yml
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ jobs:
- name: Setup dependencies
if: needs.changes.outputs.rust == 'true' && matrix.package == 'others'
run: |
docker compose up kafka redis db echo_server -d --wait
docker compose up kafka redis db echo_server objectstorage -d --wait
docker compose up setup_test_db
echo "127.0.0.1 kafka" | sudo tee -a /etc/hosts
Expand Down
2 changes: 1 addition & 1 deletion .stylelintrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -52,4 +52,4 @@ module.exports = {
'order/order': ['dollar-variables', 'custom-properties', 'declarations', 'rules', 'at-rules'],
'color-function-notation': ['modern', { ignore: ['with-var-inside'] }],
},
}
}
12 changes: 8 additions & 4 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ RUN pnpm build
#
# ---------------------------------------------------------
#
FROM ghcr.io/posthog/rust-node-container:bookworm_rust_1.80.1-node_18.19.1 AS plugin-server-build
FROM ghcr.io/posthog/rust-node-container:bookworm_rust_1.82-node_18.19.1 AS plugin-server-build

# Compile and install system dependencies
RUN apt-get update && \
Expand Down Expand Up @@ -80,9 +80,13 @@ WORKDIR /code/plugin-server
# the cache hit ratio of the layers above.
COPY ./plugin-server/src/ ./src/
COPY ./plugin-server/tests/ ./tests/
RUN pnpm run build:cyclotron && pnpm build

# As the plugin-server is now built, let’s keep
# Build cyclotron first with increased memory
RUN NODE_OPTIONS="--max-old-space-size=4096" pnpm run build:cyclotron

# Then build the plugin server with increased memory
RUN NODE_OPTIONS="--max-old-space-size=4096" pnpm build

# only prod dependencies in the node_module folder
# as we will copy it to the last image.
RUN --mount=type=cache,id=pnpm,target=/tmp/pnpm-store \
Expand Down Expand Up @@ -243,4 +247,4 @@ EXPOSE 8000
EXPOSE 8001
COPY unit.json.tpl /docker-entrypoint.d/unit.json.tpl
USER root
CMD ["./bin/docker"]
CMD ["./bin/docker"]
3 changes: 2 additions & 1 deletion common/hogvm/__tests__/__snapshots__/lambdas.hoge
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@
53, 0, 33, 2, 36, 3, 54, 1, 35, 32, "--------", 2, "print", 1, 35, 52, "lambda", 0, 0, 14, 32, "moo", 2, "print", 1, 35,
32, "cow", 2, "print", 1, 35, 31, 38, 53, 0, 36, 4, 54, 0, 35, 32, "-------- lambdas do not survive json --------", 2,
"print", 1, 35, 36, 0, 2, "print", 1, 35, 36, 0, 2, "jsonStringify", 1, 2, "print", 1, 35, 36, 0, 2, "jsonStringify", 1,
2, "jsonParse", 1, 36, 5, 2, "print", 1, 35, 35, 35, 35, 35, 35, 35]
2, "jsonParse", 1, 36, 5, 2, "print", 1, 35, 32, "--------", 2, "print", 1, 35, 52, "lambda", 1, 0, 7, 32, "a", 36, 0,
42, 1, 38, 53, 0, 33, 1, 33, 2, 33, 3, 43, 3, 2, "arrayMap", 2, 36, 6, 2, "print", 1, 35, 35, 35, 35, 35, 35, 35, 35]
5 changes: 5 additions & 0 deletions common/hogvm/__tests__/__snapshots__/lambdas.js
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ function jsonParse (str) {
const obj = {}; for (const key in x) { obj[key] = convert(x[key]) }; return obj }
return x }
return convert(JSON.parse(str)) }
function arrayMap (func, arr) { let result = []; for (let i = 0; i < arr.length; i++) { result = arrayPushBack(result, func(arr[i])) } return result }
function arrayPushBack (arr, item) { if (!Array.isArray(arr)) { return [item] } return [...arr, item] }
function __toHogDateTime(timestamp, zone) {
if (__isHogDate(timestamp)) {
const date = new Date(Date.UTC(timestamp.year, timestamp.month - 1, timestamp.day));
Expand Down Expand Up @@ -124,3 +126,6 @@ print(b);
print(jsonStringify(b));
let c = jsonParse(jsonStringify(b));
print(c);
print("--------");
let arrayMapObjects = arrayMap(__lambda((a) => ({"a": a})), [1, 2, 3]);
print(arrayMapObjects);
2 changes: 2 additions & 0 deletions common/hogvm/__tests__/__snapshots__/lambdas.stdout
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,5 @@ cow
fn<lambda(1)>
"fn<lambda(1)>"
fn<lambda(1)>
--------
[{'a': 1}, {'a': 2}, {'a': 3}]
5 changes: 5 additions & 0 deletions common/hogvm/__tests__/lambdas.hog
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,8 @@ print(b)
print(jsonStringify(b)) // just a json string "<lambda:0>"
let c := jsonParse(jsonStringify(b))
print(c) // prints a string, can't be called

print('--------')

let arrayMapObjects := arrayMap(a -> ({'a': a}), [1, 2, 3])
print(arrayMapObjects)
37 changes: 37 additions & 0 deletions ee/api/core_memory.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
from django.db import IntegrityError
from rest_framework import mixins, serializers
from rest_framework.viewsets import GenericViewSet

from ee.models.assistant import CoreMemory
from posthog.api.routing import TeamAndOrgViewSetMixin
from posthog.exceptions import Conflict


class MaxCoreMemorySerializer(serializers.ModelSerializer):
class Meta:
model = CoreMemory
fields = ["id", "text"]

text = serializers.CharField(allow_blank=True)

def create(self, validated_data):
try:
validated_data["team"] = self.context["get_team"]()
validated_data["initial_text"] = validated_data["text"]
validated_data["scraping_status"] = CoreMemory.ScrapingStatus.COMPLETED
return super().create(validated_data)
except IntegrityError:
raise Conflict("Core memory already exists for this environment.")


class MaxCoreMemoryViewSet(
TeamAndOrgViewSetMixin,
mixins.CreateModelMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
GenericViewSet,
):
scope_object = "INTERNAL"
serializer_class = MaxCoreMemorySerializer
queryset = CoreMemory.objects.all()
124 changes: 124 additions & 0 deletions ee/api/test/test_core_memory.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
from uuid import uuid4

from django.db import transaction
from rest_framework import status

from ee.models.assistant import CoreMemory
from posthog.models.team.team import Team
from posthog.test.base import APIBaseTest


class TestCoreMemoryAPI(APIBaseTest):
def setUp(self):
super().setUp()
self.core_memory = CoreMemory.objects.create(team=self.team, text="Initial memory")
self.other_team = Team.objects.create(organization=self.organization, name="other team")
self.other_core_memory = CoreMemory.objects.create(team=self.other_team, text="Other team memory")

def test_list_core_memories(self):
response = self.client.get(f"/api/environments/{self.team.pk}/core_memory")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.json()["count"], 1)
self.assertEqual(response.json()["results"][0]["text"], "Initial memory")
self.assertEqual(response.json()["results"][0]["id"], str(self.core_memory.id))

def test_retrieve_core_memory(self):
response = self.client.get(f"/api/environments/{self.team.pk}/core_memory/{self.core_memory.pk}")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.json()["text"], "Initial memory")
self.assertEqual(response.json()["id"], str(self.core_memory.id))

def test_create_core_memory(self):
self.core_memory.delete()

response = self.client.post(f"/api/environments/{self.team.pk}/core_memory", {"text": "New memory"})
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.json()["text"], "New memory")

created_memory = CoreMemory.objects.get(team=self.team, text="New memory")
self.assertTrue(created_memory)
self.assertEqual(created_memory.initial_text, "New memory")
self.assertEqual(created_memory.scraping_status, "completed")

def test_cannot_create_duplicate_core_memory(self):
count = CoreMemory.objects.count()
with transaction.atomic():
response = self.client.post(f"/api/environments/{self.team.pk}/core_memory", {"text": "Initial memory"})
self.assertEqual(response.status_code, status.HTTP_409_CONFLICT)
self.assertEqual(response.json()["detail"], "Core memory already exists for this environment.")
self.assertEqual(CoreMemory.objects.count(), count)

def test_patch_core_memory(self):
response = self.client.patch(
f"/api/environments/{self.team.pk}/core_memory/{self.core_memory.pk}", {"text": "Updated memory"}
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.json()["text"], "Updated memory")
self.core_memory.refresh_from_db()
self.assertEqual(self.core_memory.text, "Updated memory")

def test_patch_core_memory_id_is_immutable(self):
pk = self.core_memory.pk
response = self.client.patch(
f"/api/environments/{self.team.pk}/core_memory/{self.core_memory.pk}",
{"text": "Updated memory", "id": uuid4()},
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.json()["id"], str(pk))
self.core_memory.refresh_from_db()
self.assertEqual(self.core_memory.pk, pk)

def test_patch_blank_memory(self):
response = self.client.patch(
f"/api/environments/{self.team.pk}/core_memory/{self.core_memory.pk}", {"text": ""}
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.json()["text"], "")

def test_cannot_patch_null_memory(self):
response = self.client.patch(
f"/api/environments/{self.team.pk}/core_memory/{self.core_memory.pk}", {"text": None}
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)

def test_post_blank_memory(self):
self.core_memory.delete()
response = self.client.post(f"/api/environments/{self.team.pk}/core_memory", {"text": ""})
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.json()["text"], "")

def test_cannot_post_null_memory(self):
self.core_memory.delete()
response = self.client.post(f"/api/environments/{self.team.pk}/core_memory", {"text": None})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)

def test_cannot_retrieve_other_team_memory(self):
response = self.client.get(f"/api/environments/{self.team.pk}/core_memory/{self.other_core_memory.pk}")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)

def test_cannot_update_other_team_memory(self):
response = self.client.patch(
f"/api/environments/{self.team.pk}/core_memory/{self.other_core_memory.pk}",
{"text": "Trying to update other team's memory"},
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.other_core_memory.refresh_from_db()
self.assertEqual(self.other_core_memory.text, "Other team memory")

def test_cannot_edit_fields_except_text(self):
response = self.client.patch(
f"/api/environments/{self.team.pk}/core_memory/{self.core_memory.pk}",
{
"text": "Valid update",
"team": self.other_team.pk, # Attempting to change team
"initial_text": "Trying to change initial text",
"scraping_status": "completed",
},
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.core_memory.refresh_from_db()
# Only text should be updated
self.assertEqual(self.core_memory.text, "Valid update")
self.assertEqual(self.core_memory.team, self.team)
self.assertEqual(self.core_memory.initial_text, "")
self.assertIsNone(self.core_memory.scraping_status)
2 changes: 1 addition & 1 deletion ee/billing/billing_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from sentry_sdk import capture_message
from requests import JSONDecodeError # type: ignore[attr-defined]
from rest_framework.exceptions import NotAuthenticated
from sentry_sdk import capture_exception
from posthog.exceptions_capture import capture_exception

from ee.billing.billing_types import BillingStatus
from ee.billing.quota_limiting import set_org_usage_summary, update_org_billing_quotas
Expand Down
2 changes: 1 addition & 1 deletion ee/billing/quota_limiting.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import posthoganalytics
from django.db.models import Q
from django.utils import timezone
from sentry_sdk import capture_exception
from posthog.exceptions_capture import capture_exception

from posthog.cache_utils import cache_for
from posthog.event_usage import report_organization_action
Expand Down
Loading

0 comments on commit d848ae0

Please sign in to comment.