Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ Open Chat Playground (OCP) is a web UI that is able to connect virtually any LLM
- [x] [GitHub Models](https://docs.github.com/github-models/about-github-models)
- [ ] [Google Vertex AI](https://cloud.google.com/vertex-ai/docs)
- [ ] [Docker Model Runner](https://docs.docker.com/ai/model-runner)
- [ ] [Foundry Local](https://learn.microsoft.com/azure/ai-foundry/foundry-local/what-is-foundry-local)
- [x] [Foundry Local](https://learn.microsoft.com/azure/ai-foundry/foundry-local/what-is-foundry-local)
- [x] [Hugging Face](https://huggingface.co/docs)
- [ ] [Ollama](https://github.com/ollama/ollama/tree/main/docs)
- [ ] [Anthropic](https://docs.anthropic.com)
Expand Down
1 change: 1 addition & 0 deletions docs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

- [Azure AI Foundry](azure-ai-foundry.md)
- [GitHub Models](github-models.md)
- [Foundry Local](foundry-local.md)
- [Hugging Face](hugging-face.md)
- [LG](lg.md)
- [OpenAI](openai.md)
Expand Down
77 changes: 77 additions & 0 deletions docs/foundry-local.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
# OpenChat Playground with Foundry Local

This page describes how to run OpenChat Playground (OCP) with Foundry Local models integration.

## Get the repository root

1. Get the repository root.

```bash
# bash/zsh
REPOSITORY_ROOT=$(git rev-parse --show-toplevel)
```

```powershell
# PowerShell
$REPOSITORY_ROOT = git rev-parse --show-toplevel
```

## Run on local machine

1. Make sure the Foundry Local server is up and running.

```bash
foundry service start
```

1. Download the Foundry Local model. The default model OCP uses is `phi-4-mini`.

```bash
foundry model download phi-4-mini
```

Alternatively, if you want to run with a different model, say `qwen2.5-7b`, other than the default one, download it first by running the following command.

```bash
foundry model download qwen2.5-7b
```

Make sure to follow the model MUST be selected from the CLI output of `foundry model list`.

1. Make sure you are at the repository root.

```bash
cd $REPOSITORY_ROOT
```

1. Run the app.

```bash
# bash/zsh
dotnet run --project $REPOSITORY_ROOT/src/OpenChat.PlaygroundApp -- \
--connector-type FoundryLocal
```

```powershell
# PowerShell
dotnet run --project $REPOSITORY_ROOT\src\OpenChat.PlaygroundApp -- `
--connector-type FoundryLocal
```

Alternatively, if you want to run with a different model, say `qwen2.5-7b`, make sure you've already downloaded the model by running the `foundry model download qwen2.5-7b` command.

```bash
# bash/zsh
dotnet run --project $REPOSITORY_ROOT/src/OpenChat.PlaygroundApp -- \
--connector-type FoundryLocal \
--alias qwen2.5-7b
```

```powershell
# PowerShell
dotnet run --project $REPOSITORY_ROOT\src\OpenChat.PlaygroundApp -- `
--connector-type FoundryLocal `
--alias qwen2.5-7b
```

1. Open your web browser, navigate to `http://localhost:5280`, and enter prompts.
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ public static async Task<IChatClient> CreateChatClientAsync(AppSettings settings
{
ConnectorType.AzureAIFoundry => new AzureAIFoundryConnector(settings),
ConnectorType.GitHubModels => new GitHubModelsConnector(settings),
ConnectorType.FoundryLocal => new FoundryLocalConnector(settings),
ConnectorType.HuggingFace => new HuggingFaceConnector(settings),
ConnectorType.LG => new LGConnector(settings),
ConnectorType.OpenAI => new OpenAIConnector(settings),
Expand Down
60 changes: 60 additions & 0 deletions src/OpenChat.PlaygroundApp/Connectors/FoundryLocalConnector.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
using System.ClientModel;

using Microsoft.AI.Foundry.Local;
using Microsoft.Extensions.AI;

using OpenAI;

using OpenChat.PlaygroundApp.Abstractions;
using OpenChat.PlaygroundApp.Configurations;

namespace OpenChat.PlaygroundApp.Connectors;

/// <summary>
/// This represents the connector entity for Foundry Local.
/// </summary>
/// <param name="settings"><see cref="AppSettings"/> instance.</param>
public class FoundryLocalConnector(AppSettings settings) : LanguageModelConnector(settings.FoundryLocal)
{
private readonly AppSettings _appSettings = settings ?? throw new ArgumentNullException(nameof(settings));

/// <inheritdoc/>
public override bool EnsureLanguageModelSettingsValid()
{
if (this.Settings is not FoundryLocalSettings settings)
{
throw new InvalidOperationException("Missing configuration: FoundryLocal.");
}

if (string.IsNullOrWhiteSpace(settings.Alias!.Trim()))
{
throw new InvalidOperationException("Missing configuration: FoundryLocal:Alias.");
}

return true;
}

/// <inheritdoc/>
public override async Task<IChatClient> GetChatClientAsync()
{
var settings = this.Settings as FoundryLocalSettings;
var alias = settings!.Alias!;

var manager = await FoundryLocalManager.StartModelAsync(aliasOrModelId: alias).ConfigureAwait(false);
var model = await manager.GetModelInfoAsync(aliasOrModelId: alias).ConfigureAwait(false);

var credential = new ApiKeyCredential(manager.ApiKey);
var options = new OpenAIClientOptions()
{
Endpoint = manager.Endpoint,
};

var client = new OpenAIClient(credential, options);
var chatClient = client.GetChatClient(model?.ModelId)
.AsIChatClient();

Console.WriteLine($"The {this._appSettings.ConnectorType} connector created with model: {settings.Alias}");

return chatClient;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,6 @@ public void Given_Null_Settings_When_CreateChatClient_Invoked_Then_It_Should_Thr
[InlineData(ConnectorType.AmazonBedrock)]
[InlineData(ConnectorType.GoogleVertexAI)]
[InlineData(ConnectorType.DockerModelRunner)]
[InlineData(ConnectorType.FoundryLocal)]
[InlineData(ConnectorType.Ollama)]
[InlineData(ConnectorType.Anthropic)]
[InlineData(ConnectorType.Naver)]
Expand All @@ -93,4 +92,31 @@ public void Given_Unsupported_ConnectorType_When_CreateChatClient_Invoked_Then_I
func.ShouldThrow<NotSupportedException>()
.Message.ShouldContain($"Connector type '{connectorType}'");
}
}

[Trait("Category", "UnitTest")]
[Theory]
// [InlineData(typeof(AmazonBedrockConnector))]
// [InlineData(typeof(AzureAIFoundryConnector))]
[InlineData(typeof(GitHubModelsConnector))]
// [InlineData(typeof(GoogleVertexAIConnector))]
// [InlineData(typeof(DockerModelRunnerConnector))]
[InlineData(typeof(FoundryLocalConnector))]
[InlineData(typeof(HuggingFaceConnector))]
// [InlineData(typeof(OllamaConnector))]
// [InlineData(typeof(AnthropicConnector))]
// [InlineData(typeof(LGConnector))]
// [InlineData(typeof(NaverConnector))]
[InlineData(typeof(OpenAIConnector))]
// [InlineData(typeof(UpstageConnector))]
public void Given_Concrete_Connectors_When_Checking_Inheritance_Then_Should_Inherit_From_LanguageModelConnector(Type derivedType)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

오.. 이 테스트 메서드가 없었나요?

{
// Arrange
var baseType = typeof(LanguageModelConnector);

// Act
var result = baseType.IsAssignableFrom(derivedType);

// Assert
result.ShouldBeTrue();
}
}
Loading
Loading