Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 1 addition & 5 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -79,12 +79,8 @@ apps/remixdesktop/log_input_signals_new.txt
logs
apps/remix-ide-e2e/src/extensions/chrome/metamask
apps/remix-ide-e2e/tmp/
apps/remix-ide-e2e/tmp/

# IDE - Cursor
<<<<<<< HEAD
.cursor/
=======
.cursor/
PR_MESSAGE.md
>>>>>>> master
apps/remix-ide-e2e/tmp/
4 changes: 3 additions & 1 deletion apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import * as packageJson from '../../../../../package.json'
import { Plugin } from '@remixproject/engine';
import { trackMatomoEvent } from '@remix-api'
import { IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, AssistantParams, CodeExplainAgent, SecurityAgent, CompletionParams, OllamaInferencer, isOllamaAvailable, getBestAvailableModel } from '@remix/remix-ai-core';
import { IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, AssistantParams, CodeExplainAgent, SecurityAgent, CompletionParams, OllamaInferencer, isOllamaAvailable, getBestAvailableModel, resetOllamaHostOnSettingsChange } from '@remix/remix-ai-core';
import { CodeCompletionAgent, ContractAgent, workspaceAgent, IContextType, mcpDefaultServersConfig } from '@remix/remix-ai-core';
import { MCPInferencer } from '@remix/remix-ai-core';
import { IMCPServer, IMCPConnectionStatus } from '@remix/remix-ai-core';
Expand Down Expand Up @@ -63,6 +63,8 @@ export class RemixAIPlugin extends Plugin {
}

onActivation(): void {
// Expose Ollama reset function globally for settings integration
resetOllamaHostOnSettingsChange();

if (this.isOnDesktop) {
this.useRemoteInferencer = true
Expand Down
5 changes: 4 additions & 1 deletion apps/remix-ide/src/app/tabs/locales/en/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -69,5 +69,8 @@
"settings.mcpServerConfigurationDescription": "Connect to Model Context Protocol servers for enhanced AI context",
"settings.enableMCPEnhancement": "Enable MCP Integration",
"settings.enableMCPEnhancementDescription": "Manage your MCP server connections",
"settings.aiPrivacyPolicyDescription": "Understand how RemixAI processes your data."
"settings.aiPrivacyPolicyDescription": "Understand how RemixAI processes your data.",
"settings.ollamaConfig": "Ollama URL Configuration",
"settings.ollamaConfigDescription": "Configure Ollama endpoint for local AI model integration",
"settings.ollama-endpoint": "ENDPOINT URL"
}
4 changes: 2 additions & 2 deletions libs/remix-ai-core/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import { RemoteInferencer } from './inferencers/remote/remoteInference'
import { OllamaInferencer } from './inferencers/local/ollamaInferencer'
import { MCPInferencer } from './inferencers/mcp/mcpInferencer'
import { RemixMCPServer, createRemixMCPServer } from './remix-mcp-server'
import { isOllamaAvailable, getBestAvailableModel, listModels, discoverOllamaHost } from './inferencers/local/ollama'
import { isOllamaAvailable, getBestAvailableModel, listModels, discoverOllamaHost, resetOllamaHostOnSettingsChange } from './inferencers/local/ollama'
import { FIMModelManager, FIMModelConfig, FIM_MODEL_CONFIGS } from './inferencers/local/fimModelConfig'
import { ChatHistory } from './prompts/chat'
import { downloadLatestReleaseExecutable } from './helpers/inferenceServerReleases'
Expand All @@ -18,7 +18,7 @@ import { mcpDefaultServersConfig } from './config/mcpDefaultServers'
export {
IModel, IModelResponse, ChatCommandParser,
ModelType, DefaultModels, ICompletions, IParams, IRemoteModel, buildChatPrompt,
RemoteInferencer, OllamaInferencer, MCPInferencer, RemixMCPServer, isOllamaAvailable, getBestAvailableModel, listModels, discoverOllamaHost,
RemoteInferencer, OllamaInferencer, MCPInferencer, RemixMCPServer, isOllamaAvailable, getBestAvailableModel, listModels, discoverOllamaHost, resetOllamaHostOnSettingsChange,
FIMModelManager, FIMModelConfig, FIM_MODEL_CONFIGS, createRemixMCPServer,
InsertionParams, CompletionParams, GenerationParams, AssistantParams,
ChatEntry, AIRequestType, ChatHistory, downloadLatestReleaseExecutable,
Expand Down
93 changes: 66 additions & 27 deletions libs/remix-ai-core/src/inferencers/local/ollama.ts
Original file line number Diff line number Diff line change
@@ -1,60 +1,89 @@
import axios from 'axios';

// Helper function to track events using MatomoManager instance
function trackMatomoEvent(category: string, action: string, name?: string) {
try {
if (typeof window !== 'undefined' && (window as any)._matomoManagerInstance) {
(window as any)._matomoManagerInstance.trackEvent(category, action, name)
}
} catch (error) {
// Silent fail for tracking
}
}
import { Registry } from '@remix-project/remix-lib';
import { trackMatomoEvent } from '@remix-api'

// default Ollama ports to check (11434 is the legacy/standard port)
const OLLAMA_PORTS = [11434, 11435, 11436];
const OLLAMA_BASE_HOST = 'http://localhost';
const DEFAULT_OLLAMA_HOST = 'http://localhost:11434';

let discoveredOllamaHost: string | null = null;

function getConfiguredOllamaEndpoint(): string | null {
const filemanager = Registry.getInstance().get('filemanager').api;
try {
const config = Registry.getInstance().get('config').api
const configuredEndpoint = config.get('settings/ollama-endpoint');
if (configuredEndpoint && configuredEndpoint !== DEFAULT_OLLAMA_HOST) {
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_using_configured_endpoint', value: configuredEndpoint });
return configuredEndpoint;
}
} catch (error) {
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_config_access_failed', value: error.message || 'unknown' });
}
return null;
}

export async function discoverOllamaHost(): Promise<string | null> {
const filemanager = Registry.getInstance().get('filemanager').api;
if (discoveredOllamaHost) {
trackMatomoEvent('ai', 'remixAI', `ollama_host_cache_hit:${discoveredOllamaHost}`);
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_host_cache_hit:${discoveredOllamaHost}` })
return discoveredOllamaHost;
}

// First, try to use the configured endpoint from settings
const configuredEndpoint = getConfiguredOllamaEndpoint();
if (configuredEndpoint) {
try {
const res = await axios.get(`${configuredEndpoint}/api/tags`, { timeout: 2000 });
if (res.status === 200) {
discoveredOllamaHost = configuredEndpoint;
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_configured_endpoint_success', value: configuredEndpoint });
return configuredEndpoint;
}
return null;
} catch (error) {
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_configured_endpoint_failed', value: `${configuredEndpoint}:${error.message || 'unknown'}` });
// Fall back to discovery if configured endpoint fails
return null;
}
}

// Fall back to port discovery if no configured endpoint
for (const port of OLLAMA_PORTS) {
const host = `${OLLAMA_BASE_HOST}:${port}`;
trackMatomoEvent('ai', 'remixAI', `ollama_port_check:${port}`);
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_port_check:${port}` });
try {
const res = await axios.get(`${host}/api/tags`, { timeout: 2000 });
if (res.status === 200) {
discoveredOllamaHost = host;
trackMatomoEvent('ai', 'remixAI', `ollama_host_discovered_success:${host}`);
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_host_discovered_success:${host}` });
return host;
}
} catch (error) {
trackMatomoEvent('ai', 'remixAI', `ollama_port_connection_failed:${port}:${error.message || 'unknown'}`);
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_port_connection_failed:${port}:${error.message || 'unknown'}` });
continue; // next port
}
}
trackMatomoEvent('ai', 'remixAI', 'ollama_host_discovery_failed:no_ports_available');
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_host_discovery_failed:no_ports_available' });
return null;
}

export async function isOllamaAvailable(): Promise<boolean> {
trackMatomoEvent('ai', 'remixAI', 'ollama_availability_check:checking');
const filemanager = Registry.getInstance().get('filemanager').api;
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_availability_check:checking' });
const host = await discoverOllamaHost();
const isAvailable = host !== null;
trackMatomoEvent('ai', 'remixAI', `ollama_availability_result:available:${isAvailable}`);
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_availability_result:available:${isAvailable}` });
return isAvailable;
}

export async function listModels(): Promise<string[]> {
trackMatomoEvent('ai', 'remixAI', 'ollama_list_models_start:fetching');
const filemanager = Registry.getInstance().get('filemanager').api;
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_list_models_start:fetching' });
const host = await discoverOllamaHost();
if (!host) {
trackMatomoEvent('ai', 'remixAI', 'ollama_list_models_failed:no_host');
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_list_models_failed:no_host' });
throw new Error('Ollama is not available');
}

Expand All @@ -71,26 +100,35 @@ export function getOllamaHost(): string | null {
}

export function resetOllamaHost(): void {
trackMatomoEvent('ai', 'remixAI', `ollama_reset_host:${discoveredOllamaHost || 'null'}`);
const fileManager = Registry.getInstance().get('filemanager').api;
trackMatomoEvent(fileManager, { category: 'ai', action: 'remixAI', name: `ollama_reset_host:${discoveredOllamaHost || 'null'}` });
discoveredOllamaHost = null;
}

export function resetOllamaHostOnSettingsChange(): void {
const fileManager = Registry.getInstance().get('filemanager').api;
// This function should be called when Ollama settings are updated
resetOllamaHost();
trackMatomoEvent(fileManager, { category: 'ai', action: 'remixAI', name: 'ollama_reset_on_settings_change' });
}

export async function pullModel(modelName: string): Promise<void> {
const filemanager = Registry.getInstance().get('filemanager').api;
// in case the user wants to pull a model from registry
trackMatomoEvent('ai', 'remixAI', `ollama_pull_model_start:${modelName}`);
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_pull_model_start:${modelName}` });
const host = await discoverOllamaHost();
if (!host) {
trackMatomoEvent('ai', 'remixAI', `ollama_pull_model_failed:${modelName}|no_host`);
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_pull_model_failed:${modelName}|no_host` });
throw new Error('Ollama is not available');
}

try {
const startTime = Date.now();
await axios.post(`${host}/api/pull`, { name: modelName });
const duration = Date.now() - startTime;
trackMatomoEvent('ai', 'remixAI', `ollama_pull_model_success:${modelName}|duration:${duration}ms`);
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_pull_model_success:${modelName}|duration:${duration}ms` });
} catch (error) {
trackMatomoEvent('ai', 'remixAI', `ollama_pull_model_error:${modelName}|${error.message || 'unknown'}`);
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_pull_model_error:${modelName}|${error.message || 'unknown'}` });
console.error('Error pulling model:', error);
throw new Error(`Failed to pull model: ${modelName}`);
}
Expand All @@ -106,7 +144,8 @@ export async function validateModel(modelName: string): Promise<boolean> {
}

export async function getBestAvailableModel(): Promise<string | null> {
trackMatomoEvent('ai', 'remixAI', 'ollama_get_best');
const filemanager = Registry.getInstance().get('filemanager').api;
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_get_best' });
try {
const models = await listModels();
if (models.length === 0) return null;
Expand All @@ -125,7 +164,7 @@ export async function getBestAvailableModel(): Promise<string | null> {
// TODO get model stats and get best model
return models[0];
} catch (error) {
trackMatomoEvent('ai', 'remixAI', `ollama_get_best_model_error:${error.message || 'unknown'}`);
trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_get_best_model_error:${error.message || 'unknown'}` });
console.error('Error getting best available model:', error);
return null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -645,7 +645,7 @@ export const RemixUiRemixAiAssistant = React.forwardRef<
setMessages(prev => [...prev, {
id: crypto.randomUUID(),
role: 'assistant',
content: '**Ollama is not available.**\n\nTo use Ollama with Remix IDE:\n\n1. **Install Ollama**: Visit [ollama.ai](https://ollama.ai) to download\n2. **Start Ollama**: Run `ollama serve` in your terminal\n3. **Install a model**: Run `ollama pull codestral:latest`\n4. **Configure CORS**: Set `OLLAMA_ORIGINS=https://remix.ethereum.org`\n\nSee the [Ollama Setup Guide](https://github.com/ethereum/remix-project/blob/master/OLLAMA_SETUP.md) for detailed instructions.\n\n*Switching back to previous model for now.*',
content: '**Ollama is not available.**\n\nTo use Ollama with Remix IDE:\n\n1. **Install Ollama**: Visit [ollama.ai](https://ollama.ai) to download\n2. **Start Ollama**: Run `ollama serve` in your terminal\n3. **Install a model**: Run `ollama pull codestral:latest`\n4. **Configure CORS**: e.g \`OLLAMA_ORIGINS=https://remix.ethereum.org ollama serve\`\n\nSee the [Ollama Setup Guide](https://github.com/ethereum/remix-project/blob/master/OLLAMA_SETUP.md) for detailed instructions.\n\n*Switching back to previous model for now.*',
timestamp: Date.now(),
sentiment: 'none'
}])
Expand All @@ -662,7 +662,7 @@ export const RemixUiRemixAiAssistant = React.forwardRef<
setMessages(prev => [...prev, {
id: crypto.randomUUID(),
role: 'assistant',
content: `**Failed to connect to Ollama.**\n\nError: ${error.message || 'Unknown error'}\n\nPlease ensure:\n- Ollama is running (\`ollama serve\`)\n- CORS is configured for Remix IDE\n- At least one model is installed\n\nSee the [Ollama Setup Guide](https://github.com/ethereum/remix-project/blob/master/OLLAMA_SETUP.md) for help.\n\n*Switching back to previous model.*`,
content: `**Failed to connect to Ollama.**\n\nError: ${error.message || 'Unknown error'}\n\nPlease ensure:\n- Ollama is running (\`ollama serve\`)\n- The ollama CORS setting is configured for Remix IDE. e.g \`OLLAMA_ORIGINS=https://remix.ethereum.org ollama serve\` Please see [Ollama Setup Guide](https://github.com/ethereum/remix-project/blob/master/OLLAMA_SETUP.md) for detailed instructions.\n- At least one model is installed\n\nSee the [Ollama Setup Guide](https://github.com/ethereum/remix-project/blob/master/OLLAMA_SETUP.md) for help.\n\n*Switching back to previous model.*`,
timestamp: Date.now(),
sentiment: 'none'
}])
Expand Down
10 changes: 10 additions & 0 deletions libs/remix-ui/settings/src/lib/remix-ui-settings.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,16 @@ const settingsSections: SettingsSection[] = [
action: 'link',
link: 'https://remix-ide.readthedocs.io/en/latest/ai.html'
}
},
{
name: 'ollama-config',
label: 'settings.ollamaConfig',
description: 'settings.ollamaConfigDescription',
type: 'toggle',
toggleUIOptions: [{
name: 'ollama-endpoint',
type: 'text'
}]
}]
},
...(mcpEnabled ? [{
Expand Down
25 changes: 24 additions & 1 deletion libs/remix-ui/settings/src/lib/settingsReducer.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { Registry } from '@remix-project/remix-lib'
import { SettingsActions, SettingsState } from '../types'

import { resetOllamaHostOnSettingsChange } from '@remix/remix-ai-core';
const config = Registry.getInstance().get('config').api
const settingsConfig = Registry.getInstance().get('settingsConfig').api
const defaultTheme = config.get('settings/theme') ? settingsConfig.themes.find((theme) => theme.name.toLowerCase() === config.get('settings/theme').toLowerCase()) : settingsConfig.themes[0]
Expand All @@ -18,12 +18,14 @@ const sindriAccessToken = config.get('settings/sindri-access-token') || ''
const etherscanAccessToken = config.get('settings/etherscan-access-token') || ''
const mcpServersEnable = config.get('settings/mcp/servers/enable') || false
const mcpServerManagement = config.get('settings/mcp-server-management') || false
const ollamaEndpoint = config.get('settings/ollama-endpoint') || 'http://localhost:11434'

let githubConfig = config.get('settings/github-config') || false
let ipfsConfig = config.get('settings/ipfs-config') || false
let swarmConfig = config.get('settings/swarm-config') || false
let sindriConfig = config.get('settings/sindri-config') || false
let etherscanConfig = config.get('settings/etherscan-config') || false
let ollamaConfig = config.get('settings/ollama-config') || false
let generateContractMetadata = config.get('settings/generate-contract-metadata')
let autoCompletion = config.get('settings/auto-completion')
let showGas = config.get('settings/show-gas')
Expand All @@ -50,6 +52,10 @@ if (!etherscanConfig && etherscanAccessToken) {
config.set('settings/etherscan-config', true)
etherscanConfig = true
}
if (!ollamaConfig && ollamaEndpoint !== 'http://localhost:11434') {
config.set('settings/ollama-config', true)
ollamaConfig = true
}
if (typeof generateContractMetadata !== 'boolean') {
config.set('settings/generate-contract-metadata', true)
generateContractMetadata = true
Expand Down Expand Up @@ -196,6 +202,14 @@ export const initialState: SettingsState = {
value: mcpServerManagement,
isLoading: false
},
'ollama-config': {
value: ollamaConfig,
isLoading: false
},
'ollama-endpoint': {
value: ollamaEndpoint,
isLoading: false
},
toaster: {
value: '',
isLoading: false
Expand All @@ -206,6 +220,15 @@ export const settingReducer = (state: SettingsState, action: SettingsActions): S
switch (action.type) {
case 'SET_VALUE':
config.set('settings/' + action.payload.name, action.payload.value)
// Reset Ollama host cache when endpoint is changed
if (action.payload.name === 'ollama-endpoint') {
try {
resetOllamaHostOnSettingsChange();
} catch (error) {
// Ignore errors - Ollama functionality is optional
}
}

return { ...state, [action.payload.name]: { ...state[action.payload.name], value: action.payload.value, isLoading: false } }
case 'SET_LOADING':
return { ...state, [action.payload.name]: { ...state[action.payload.name], isLoading: true } }
Expand Down
2 changes: 2 additions & 0 deletions libs/remix-ui/settings/src/types/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,8 @@ export interface SettingsState {
'ai-privacy-policy': ConfigState,
'mcp/servers/enable': ConfigState,
'mcp-server-management': ConfigState,
'ollama-config': ConfigState,
'ollama-endpoint': ConfigState,
toaster: ConfigState
}
export interface SettingsActionPayloadTypes {
Expand Down