-
Notifications
You must be signed in to change notification settings - Fork 1
/
ask.cs
47 lines (43 loc) · 1.75 KB
/
ask.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using Microsoft.AspNetCore.Mvc;
using Microsoft.Azure.Functions.Worker;
using Microsoft.Azure.Functions.Worker.Extensions.OpenAI.TextCompletion;
using Microsoft.Azure.Functions.Worker.Http;
using Microsoft.Extensions.Logging;
namespace chatSample;
/// <summary>
/// These samples show how to use the OpenAI Chat Completions API for Text Completion. For more details on the Completions APIs, see
/// https://platform.openai.com/docs/guides/text-generation/chat-completions-vs-completions.
/// </summary>
public static class TextCompletions
{
/// <summary>
/// This sample demonstrates the "templating" pattern, where the function takes a parameter
/// and embeds it into a text prompt, which is then sent to the OpenAI completions API.
/// </summary>
[Function("whois")]
public static IActionResult WhoIs(
[HttpTrigger(AuthorizationLevel.Function, Route = "whois/{name}")] HttpRequestData req,
[TextCompletionInput("Who is {name}?", Model = "%CHAT_MODEL_DEPLOYMENT_NAME%")]
TextCompletionResponse response
)
{
return new OkObjectResult(response.Content);
}
/// <summary>
/// This sample takes a prompt as input, sends it directly to the OpenAI completions API, and results the
/// response as the output.
/// </summary>
[Function("ask")]
public static IActionResult GenericCompletion(
[HttpTrigger(AuthorizationLevel.Function, "post")] HttpRequestData req,
[TextCompletionInput("{Prompt}", Model = "%CHAT_MODEL_DEPLOYMENT_NAME%")]
TextCompletionResponse response,
ILogger log
)
{
string text = response.Content;
return new OkObjectResult(text);
}
}