Skip to content

Commit

Permalink
Merge pull request #103 from MuleSoft-AI-Chain-Project/v1.2.0-revisit
Browse files Browse the repository at this point in the history
v1.2.0 release
  • Loading branch information
amirkhan-ak-sf authored Nov 21, 2024
2 parents 73d092c + 4de3045 commit 286db79
Show file tree
Hide file tree
Showing 14 changed files with 691 additions and 258 deletions.
5 changes: 2 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,12 @@ MuleSoft AI Chain is a MuleSoft custom connector (𝘣𝘢𝘴𝘦𝘥 on 𝘓
- The maximum supported version for Java SDK is JDK 17. You can use JDK 17 only for running your application.
- Compilation with Java SDK must be done with JDK 8.


### Installation (using Cloud.Anypoint Dependency)

```xml
<dependency>
<groupId>cloud.anypoint</groupId>
<artifactId>mule-aichain-connector</artifactId>
<artifactId>mule4-aichain-connector</artifactId>
<version>1.0.0</version>
<classifier>mule-plugin</classifier>
</dependency>
Expand All @@ -32,7 +31,7 @@ Then add the following dependency to your application's `pom.xml`:
```xml
<dependency>
<groupId>com.mulesoft.connectors</groupId>
<artifactId>mule4-aichain-connector</artifactId>
<artifactId>mule-aichain-connector</artifactId>
<version>{version}</version>
<classifier>mule-plugin</classifier>
</dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@
"header": [],
"body": {
"mode": "raw",
"raw": "{\n \"filePath\": \"<pdf_file>\",\n \"fileType\": \"pdf\"\n}",
"raw": "{\n \"filePath\": \"<pdf_file>\",\n \"fileType\": \"any\"\n}",
"options": {
"raw": {
"language": "json"
Expand Down Expand Up @@ -289,34 +289,6 @@
}
},
"response": []
},
{
"name": "Get Info Legacy",
"request": {
"method": "POST",
"header": [],
"body": {
"mode": "raw",
"raw": "{\n \"question\": \"What is MuleChain\"\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "http://localhost:8081/embeddingInfoLegacy",
"protocol": "http",
"host": [
"localhost"
],
"port": "8081",
"path": [
"embeddingInfoLegacy"
]
}
},
"response": []
}
]
},
Expand Down Expand Up @@ -419,7 +391,7 @@
"header": [],
"body": {
"mode": "raw",
"raw": "\n{\n \"data\": \"Is Noise cancellation supported? \",\n \"filePath\":\"<some_pdf>\",\n \"fileType\": \"pdf\"\n}",
"raw": "\n{\n \"data\": \"Is Noise cancellation supported? \",\n \"filePath\":\"<some_pdf>\",\n \"fileType\": \"any\"\n}",
"options": {
"raw": {
"language": "json"
Expand Down Expand Up @@ -533,30 +505,37 @@
}
},
"response": []
},
}
]
},
{
"name": "Toxicity Detection",
"item": [
{
"name": "Legacy Chains",
"name": "Toxicity Detection",
"protocolProfileBehavior": {
"disableBodyPruning": true
},
"request": {
"method": "POST",
"method": "GET",
"header": [],
"body": {
"mode": "raw",
"raw": "{\n \"question\":\"Check Inventory for MULETEST0\"\n}",
"raw": "{\n \"prompt\": \"You don't know anything. don't act like a car washer, other wise i will kill you\"\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "http://localhost:8081/chains",
"protocol": "http",
"raw": "localhost:8081/toxictydetection",
"host": [
"localhost"
],
"port": "8081",
"path": [
"chains"
"toxictydetection"
]
}
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ http://www.mulesoft.org/schema/mule/ee/core http://www.mulesoft.org/schema/mule/
<http:listener-connection host="0.0.0.0" port="8081" />
</http:listener-config>
<ms-aichain:config name="MISTRAL_AI" llmType="MISTRAL_AI" configType="Configuration Json" doc:name="MuleSoft AI Chain configuration" doc:id="bf1ef7ec-4aa1-41c8-a184-a13ca165c925" filePath='#[mule.home ++ "/apps/" ++ app.name ++ "/envVars.json"]' modelName="mistral-large-latest" temperature="0.1"/>
<ms-aichain:config name="OPENAI" llmType="OPENAI" configType="Configuration Json" doc:name="MuleSoft AI Chain configuration" doc:id="edb0d5a6-97c5-4d93-8098-4e197e563827" filePath='#[mule.home ++ "/apps/" ++ app.name ++ "/envVars.json"]' temperature="0.1" modelName="gpt-3.5-turbo"/>
<ms-aichain:config name="OPENAI" llmType="OPENAI" configType="Configuration Json" doc:name="MuleSoft AI Chain configuration" doc:id="edb0d5a6-97c5-4d93-8098-4e197e563827" filePath='#[mule.home ++ "/apps/" ++ app.name ++ "/envVars.json"]' temperature="0.1" modelName="gpt-4o-mini"/>
<ms-aichain:config name="OPENAI-GPT4-TURBO" llmType="OPENAI" configType="Configuration Json" doc:name="MuleSoft AI Chain configuration" doc:id="74b85066-1569-4f10-a06b-e49e854eeef2" filePath='#[mule.home ++ "/apps/" ++ app.name ++ "/envVars.json"]' modelName="gpt-4o" />
<flow name="PromptTemplate" doc:id="cff3a8ed-3799-424a-becf-9d7387729bd0" >
<http:listener doc:name="Listener" doc:id="dd18126e-81f5-48ef-8f35-9dd19afdfaf0" config-ref="HTTP_Listener_config" path="/agent">
Expand Down Expand Up @@ -150,24 +150,6 @@ output application/json
</ms-aichain:data>
</ms-aichain:embedding-get-info-from-store>
</flow>
<flow name="EmbeddingAdd5" doc:id="347caa3d-ed7b-4ba8-bf74-4f184e8727d3" >
<http:listener doc:name="Listener" doc:id="6720bf73-6b8c-4bc7-8153-f21a5df4f7d8" config-ref="HTTP_Listener_config" path="/embeddingInfoLegacy">
<http:response>
<http:body ><![CDATA[#[%dw 2.0
output application/json
---
{
payload: payload,
attributes: attributes
}]]]></http:body>
</http:response>
</http:listener>
<ms-aichain:embedding-get-info-from-store-legacy doc:name="Embedding get info from store legacy" doc:id="1f9e0410-4357-4ad8-bf17-29112f20e2f6" config-ref="OPENAI" storeName='#["embedding.store"]'>
<ms-aichain:data>
#[payload.question]
</ms-aichain:data>
</ms-aichain:embedding-get-info-from-store-legacy>
</flow>
<flow name="ImageGenerate" doc:id="d9dfb328-65d4-499b-8eaf-282e21a245cf" >
<http:listener doc:name="Listener" doc:id="ebd04ee3-0e40-456e-ad6e-8be37cd9f706" config-ref="HTTP_Listener_config" path="/image">
<http:response>
Expand Down Expand Up @@ -266,24 +248,6 @@ output application/json
</ms-aichain:data>
</ms-aichain:tools-use-ai-service>
</flow>
<flow name="ms-aichain-testingFlow1" doc:id="c5236fd3-d578-4f91-b1f9-16c334e1cd7c" >
<http:listener doc:name="Listener" doc:id="30727c05-9524-4011-9d3a-6658f4706d4e" config-ref="HTTP_Listener_config" path="/chains">
<http:response>
<http:body ><![CDATA[#[%dw 2.0
output application/json
---
{
payload: payload,
attributes: attributes
}]]]></http:body>
</http:response>
</http:listener>
<ms-aichain:tools-use-ai-service-legacy doc:name="Tools use ai service legacy" doc:id="51aa98dc-0ad2-42ad-9fc0-0883bdb766a4" config-ref="OPENAI-GPT4-TURBO" toolConfig='#[mule.home ++ "/apps/" ++ app.name ++ "/tools.config.json"]'>
<ms-aichain:data>
#[payload.question ++ ", dont assume things and invent answers. Answer with the most probable statement."]
</ms-aichain:data>
</ms-aichain:tools-use-ai-service-legacy>
</flow>
<flow name="ms-aichain-testingFlow2" doc:id="2f02e10e-6422-4b94-ab34-9584e5ee0ed6" >
<http:listener doc:name="Listener" doc:id="0f940ad8-5210-4433-9651-79af3fbd88c4" config-ref="HTTP_Listener_config" path="/scanned">
<http:response>
Expand All @@ -302,4 +266,16 @@ output application/json
</ms-aichain:data>
</ms-aichain:image-read-scanned-documents>
</flow>
<flow name="mulechain-testingFlow5" doc:id="cae40e72-55bd-4c51-9d8f-22337ff9b3a1" >
<http:listener doc:name="Listener" doc:id="93492920-a574-40a3-b1d0-31a9bd6e377f" config-ref="HTTP_Listener_config" path="/toxictydetection" />
<ms-aichain:toxicity-detection doc:name="Toxicity detection" doc:id="c7e47148-ef40-4ec4-9b54-65f5c0a03b15" config-ref="OPENAI" input="#[payload.prompt]"/>
<ee:transform doc:name="Transform Message" doc:id="cdc3042e-d7f7-46c1-9667-73ff8281dab2">
<ee:message>
<ee:set-payload><![CDATA[%dw 2.0
output application/json
---
read(payload.response default "{}","application/json")]]></ee:set-payload>
</ee:message>
</ee:transform>
</flow>
</mule>
52 changes: 50 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>com.mulesoft.connectors</groupId>
<artifactId>mule4-aichain-connector</artifactId>
<version>1.0.0</version>
<version>1.2.0</version>
<packaging>mule-extension</packaging>
<name>MuleSoft AI Chain Connector - Mule 4</name>
<description>The MuleSoft AI Chain Connector is designed to help developers easily build and manage AI-driven agents within the MuleSoft Anypoint Platform. It provides the tools and support needed to integrate Large Language Models (LLMs), embeddings, and other advanced AI services into MuleSoft applications.</description>
Expand Down Expand Up @@ -49,6 +49,48 @@
</developer>
</developers>

<description>The MuleSoft AI Chain Connector is designed to help developers easily build and manage AI-driven agents within the MuleSoft Anypoint Platform. It provides the tools and support needed to integrate Large Language Models (LLMs), embeddings, and other advanced AI services into MuleSoft applications.</description>
<url>https://github.com/anypointcloud/mulesoft-ai-chain-connector</url>
<licenses>
<license>
<name>MIT License</name>
<url>https://www.opensource.org/licenses/mit-license.php</url>
</license>
</licenses>
<developers>
<developer>
<name>Amir Khan</name>
<email>[email protected]</email>
<organization>Salesforce</organization>
<organizationUrl>https://www.mulesoft.com</organizationUrl>
</developer>
<developer>
<name>Mihael Bosnjak</name>
<email>[email protected]</email>
<organization>Salesforce</organization>
<organizationUrl>https://www.salesforce.com</organizationUrl>
</developer>
<developer>
<name>Arpit Gupta</name>
<email>[email protected]</email>
<organization>Salesforce</organization>
<organizationUrl>https://www.mulesoft.com</organizationUrl>
</developer>
<developer>
<name>Dipesh Kumar Dutta</name>
<email>[email protected]</email>
<organization>Salesforce</organization>
<organizationUrl>https://www.salesforce.com</organizationUrl>
</developer>
<developer>
<name>Ryan Hoegg</name>
<email>[email protected]</email>
<organization>Hoegg Software, Co.</organization>
<organizationUrl>https://hoegg.software</organizationUrl>
</developer>
</developers>


<parent>
<groupId>org.mule.extensions</groupId>
<artifactId>mule-modules-parent</artifactId>
Expand All @@ -62,7 +104,7 @@
<formatterGoal>validate</formatterGoal>
<slf4jApi.version>2.0.7</slf4jApi.version>
<mule.sdk.api.version>0.9.0-rc1</mule.sdk.api.version>
<langchain4j.version>0.34.0</langchain4j.version> <!-- upgrade to support tools in Ollama -->
<langchain4j.version>0.35.0</langchain4j.version> <!-- upgrade to support tools in Ollama -->
<mapdb.version>3.1.0</mapdb.version>
<json.version>20240303</json.version>
<munit.input.directory>src/test/munit</munit.input.directory>
Expand Down Expand Up @@ -240,6 +282,7 @@
<artifactId>langchain4j-mistral-ai</artifactId>
<version>${langchain4j.version}</version>
</dependency>

<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-hugging-face</artifactId>
Expand Down Expand Up @@ -287,6 +330,11 @@
<artifactId>langchain4j-azure-open-ai</artifactId>
<version>${langchain4j.version}</version>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-google-ai-gemini</artifactId>
<version>${langchain4j.version}</version>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-easy-rag</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,19 +68,25 @@ public class LangchainLLMConfiguration implements Initialisable {

@Parameter
@Placement(order = 6, tab = Placement.DEFAULT_TAB)
@Optional(defaultValue = "0.95")
private double topP = 0.95;


@Parameter
@Placement(order = 7, tab = Placement.DEFAULT_TAB)
@Optional(defaultValue = "60")
@DisplayName("LLM timeout")
private int llmTimeout = 60;

@Parameter
@Optional(defaultValue = "SECONDS")
@Placement(order = 7, tab = Placement.DEFAULT_TAB)
@Placement(order = 8, tab = Placement.DEFAULT_TAB)
@DisplayName("LLM timeout unit")
@Summary("Time unit to be used in the LLM Timeout")
private TimeUnit llmTimeoutUnit = TimeUnit.SECONDS;

@Parameter
@Placement(order = 8, tab = Placement.DEFAULT_TAB)
@Placement(order = 9, tab = Placement.DEFAULT_TAB)
@Expression(ExpressionSupport.SUPPORTED)
@Optional(defaultValue = "500")
private int maxTokens = 500;
Expand Down Expand Up @@ -109,6 +115,11 @@ public double getTemperature() {
return temperature;
}

public double getTopP() {
return topP;
}


public int getLlmTimeout() {
return llmTimeout;
}
Expand Down
Loading

0 comments on commit 286db79

Please sign in to comment.