diff --git a/helm-charts/common/lvm-uservice/templates/configmap.yaml b/helm-charts/common/lvm-uservice/templates/configmap.yaml index f65d5f494..616efe956 100644 --- a/helm-charts/common/lvm-uservice/templates/configmap.yaml +++ b/helm-charts/common/lvm-uservice/templates/configmap.yaml @@ -10,6 +10,7 @@ metadata: data: {{- if eq "TGI" .Values.LVM_BACKEND }} LVM_COMPONENT_NAME: "OPEA_TGI_LLAVA_LVM" + MAX_IMAGES: {{ .Values.MAX_IMAGES | default 1 | quote }} {{- if not .Values.LVM_ENDPOINT }} LVM_ENDPOINT: "http://{{ .Release.Name }}-tgi" {{- end }} @@ -19,10 +20,11 @@ data: {{- end }} {{- if eq "LLaVA" .Values.LVM_BACKEND }} LVM_COMPONENT_NAME: "OPEA_LLAVA_LVM" + MAX_IMAGES: {{ .Values.MAX_IMAGES | default 1 | quote }} {{- else if eq "VideoLlama" .Values.LVM_BACKEND }} LVM_COMPONENT_NAME: "OPEA_VIDEO_LLAMA_LVM" {{- else if eq "LlamaVision" .Values.LVM_BACKEND }} - LVM_COMPONENT_NAME: "OPEA_LLAVA_VISION_LVM" + LVM_COMPONENT_NAME: "OPEA_LLAMA_VISION_LVM" {{- else if eq "PredictionGuard" .Values.LVM_BACKEND }} LVM_COMPONENT_NAME: "OPEA_PREDICTION_GUARD_LVM" {{- else }} diff --git a/helm-charts/common/lvm-uservice/values.yaml b/helm-charts/common/lvm-uservice/values.yaml index ff1c16f72..988f19d9e 100644 --- a/helm-charts/common/lvm-uservice/values.yaml +++ b/helm-charts/common/lvm-uservice/values.yaml @@ -9,6 +9,8 @@ LOGFLAG: "" # backend inference engine to use, i.e. TGI, LLaVA, VideoLlama, LlamaVision, PredictionGuard LVM_BACKEND: "TGI" +# maximum image number sent to backend, only valid for TGI, LLaVa backend +MAX_IMAGES: 1 # inference engine service URL, e.g. http://tgi:80 LVM_ENDPOINT: ""