-
Notifications
You must be signed in to change notification settings - Fork 23
Connector Implementation & Inheritance Ollama #456
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 3 commits
9598325
a1f9683
8add602
22eafbd
ae1f2a5
0d42437
8bb60fa
61b73b0
791da59
f7c1d84
eebb2cf
a3a109a
e78460d
52417c2
226b5fc
68302e9
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,129 @@ | ||
# OpenChat Playground with Ollama | ||
|
||
This page describes to run OpenChat Playground (OCP) with Ollama integration. | ||
|
||
## Get the repository root | ||
|
||
1. Get the repository root. | ||
|
||
```bash | ||
# bash/zsh | ||
REPOSITORY_ROOT=$(git rev-parse --show-toplevel) | ||
``` | ||
|
||
```powershell | ||
# PowerShell | ||
$REPOSITORY_ROOT = git rev-parse --show-toplevel | ||
``` | ||
|
||
## Run on local machine | ||
|
||
1. Make sure you are at the repository root. | ||
|
||
```bash | ||
cd $REPOSITORY_ROOT | ||
``` | ||
|
||
1. Make sure Ollama is installed and running on your local machine. If not, install Ollama from [ollama.com](https://ollama.com/) and start the service. | ||
|
||
```bash | ||
# Start Ollama service | ||
ollama serve | ||
``` | ||
|
||
1. Pull the model you want to use. Replace `{{MODEL_NAME}}` with your desired model. | ||
|
||
```bash | ||
# Example: Pull llama3.2 model | ||
ollama pull llama3.2 | ||
|
||
# Or pull other models | ||
ollama pull mistral | ||
ollama pull phi3 | ||
``` | ||
|
||
1. Run the app. | ||
|
||
```bash | ||
dotnet run --project $REPOSITORY_ROOT/src/OpenChat.PlaygroundApp -- --connector-type Ollama | ||
``` | ||
|
||
1. Open your web browser, navigate to `http://localhost:5280`, and enter prompts. | ||
|
||
## Run in local container | ||
|
||
1. Make sure you are at the repository root. | ||
|
||
```bash | ||
cd $REPOSITORY_ROOT | ||
``` | ||
|
||
1. Build a container. | ||
|
||
``` | ||
docker build -f Dockerfile -t openchat-playground:latest . | ||
``` | ||
|
||
1. Run the app. | ||
|
||
``` | ||
# From locally built container | ||
docker run -i --rm -p 8080:8080 openchat-playground:latest --connector-type Ollama --base-url http://host.docker.internal:11434 --model llama3.2 | ||
``` | ||
|
||
> **NOTE**: Use `host.docker.internal:11434` to connect to Ollama running on the host machine from inside the container. | ||
|
||
1. Open your web browser, navigate to `http://localhost:8080`, and enter prompts. | ||
|
||
## Run on Azure | ||
|
||
1. Make sure you are at the repository root. | ||
|
||
```bash | ||
cd $REPOSITORY_ROOT | ||
``` | ||
|
||
1. Login to Azure. | ||
|
||
```bash | ||
# Login to Azure Dev CLI | ||
azd auth login | ||
``` | ||
|
||
1. Check login status. | ||
|
||
```bash | ||
# Azure Dev CLI | ||
azd auth login --check-status | ||
``` | ||
|
||
1. Initialize `azd` template. | ||
|
||
```bash | ||
azd init | ||
``` | ||
|
||
> **NOTE**: You will be asked to provide environment name for provisioning. | ||
|
||
1. Set Ollama configuration to azd environment variables. | ||
```bash | ||
# Set connector type to Ollama | ||
azd env set CONNECTOR_TYPE "Ollama" | ||
|
||
# Optionally, set a specific model (default is llama3.2) | ||
azd env set OLLAMA_MODEL "llama3.2" | ||
``` | ||
|
||
1. Run the following commands in order to provision and deploy the app. | ||
|
||
```bash | ||
azd up | ||
``` | ||
|
||
> **NOTE**: You will be asked to provide Azure subscription and location for deployment. | ||
|
||
1. Clean up all the resources. | ||
|
||
```bash | ||
azd down --force --purge | ||
``` |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -17,6 +17,9 @@ | |
"githubModelsToken": { | ||
"value": "${GH_MODELS_TOKEN}" | ||
}, | ||
"ollamaModel": { | ||
"value": "${OLLAMA_MODEL}" | ||
|
||
}, | ||
"openchatPlaygroundappExists": { | ||
"value": "${SERVICE_OPENCHAT_PLAYGROUNDAPP_RESOURCE_EXISTS=false}" | ||
}, | ||
|
Original file line number | Diff line number | Diff line change | ||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
@@ -0,0 +1,53 @@ | ||||||||||||||||
using Microsoft.Extensions.AI; | ||||||||||||||||
using OllamaSharp; | ||||||||||||||||
tae0y marked this conversation as resolved.
Show resolved
Hide resolved
|
||||||||||||||||
|
||||||||||||||||
using OpenChat.PlaygroundApp.Abstractions; | ||||||||||||||||
using OpenChat.PlaygroundApp.Configurations; | ||||||||||||||||
|
||||||||||||||||
namespace OpenChat.PlaygroundApp.Connectors; | ||||||||||||||||
|
||||||||||||||||
/// <summary> | ||||||||||||||||
/// This represents the connector entity for Ollama. | ||||||||||||||||
/// </summary> | ||||||||||||||||
public class OllamaConnector(AppSettings settings) : LanguageModelConnector(settings.Ollama) | ||||||||||||||||
{ | ||||||||||||||||
/// <inheritdoc/> | ||||||||||||||||
public override bool EnsureLanguageModelSettingsValid() | ||||||||||||||||
{ | ||||||||||||||||
var settings = this.Settings as OllamaSettings; | ||||||||||||||||
if (settings is null) | ||||||||||||||||
{ | ||||||||||||||||
throw new InvalidOperationException("Missing configuration: Ollama."); | ||||||||||||||||
} | ||||||||||||||||
|
||||||||||||||||
if (string.IsNullOrWhiteSpace(settings.BaseUrl!.Trim()) == true) | ||||||||||||||||
{ | ||||||||||||||||
throw new InvalidOperationException("Missing configuration: Ollama:BaseUrl."); | ||||||||||||||||
} | ||||||||||||||||
|
||||||||||||||||
if (string.IsNullOrWhiteSpace(settings.Model!.Trim()) == true) | ||||||||||||||||
{ | ||||||||||||||||
throw new InvalidOperationException("Missing configuration: Ollama:Model."); | ||||||||||||||||
} | ||||||||||||||||
|
||||||||||||||||
tae0y marked this conversation as resolved.
Show resolved
Hide resolved
|
||||||||||||||||
return true; | ||||||||||||||||
} | ||||||||||||||||
|
||||||||||||||||
/// <inheritdoc/> | ||||||||||||||||
public override async Task<IChatClient> GetChatClientAsync() | ||||||||||||||||
{ | ||||||||||||||||
var settings = this.Settings as OllamaSettings; | ||||||||||||||||
var baseUrl = settings!.BaseUrl!; | ||||||||||||||||
var model = settings!.Model!; | ||||||||||||||||
|
||||||||||||||||
var config = new OllamaApiClient.Configuration | ||||||||||||||||
{ | ||||||||||||||||
Uri = new Uri(baseUrl), | ||||||||||||||||
Model = model, | ||||||||||||||||
}; | ||||||||||||||||
|
||||||||||||||||
var chatClient = new OllamaApiClient(config); | ||||||||||||||||
|
||||||||||||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 요 사이에 요걸 넣어줘야 나중에 자동으로 모델을 pull 땡겨옵니다. open-chat-playground/src/OpenChat.PlaygroundApp/Connectors/HuggingFaceConnector.cs Lines 64 to 70 in 5ac25c0
|
||||||||||||||||
return await Task.FromResult(chatClient).ConfigureAwait(false); | ||||||||||||||||
} | ||||||||||||||||
} |
Uh oh!
There was an error while loading. Please reload this page.