Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion src/Cellm/AddIn/CellmAddIn.cs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
using Cellm.Models.Providers.OpenAi;
using Cellm.Models.Providers.OpenAiCompatible;
using Cellm.Models.Providers.OpenRouter;
using Cellm.Models.Providers.Vertex;
using Cellm.Models.Resilience;
using Cellm.Tools;
using Cellm.Tools.FileReader;
Expand Down Expand Up @@ -84,6 +85,7 @@ private static ServiceCollection ConfigureServices(ServiceCollection services)
.Configure<OllamaConfiguration>(configuration.GetRequiredSection(nameof(OllamaConfiguration)))
.Configure<OpenAiConfiguration>(configuration.GetRequiredSection(nameof(OpenAiConfiguration)))
.Configure<OpenAiCompatibleConfiguration>(configuration.GetRequiredSection(nameof(OpenAiCompatibleConfiguration)))
.Configure<VertexConfiguration>(configuration.GetRequiredSection(nameof(VertexConfiguration)))
.Configure<OpenRouterConfiguration>(configuration.GetRequiredSection(nameof(OpenRouterConfiguration)))
.Configure<ResilienceConfiguration>(configuration.GetRequiredSection(nameof(ResilienceConfiguration)))
.Configure<SentryConfiguration>(configuration.GetRequiredSection(nameof(SentryConfiguration)));
Expand Down Expand Up @@ -149,7 +151,7 @@ private static ServiceCollection ConfigureServices(ServiceCollection services)
cfg.AddBehavior<UsageBehavior<ProviderRequest, ProviderResponse>>(ServiceLifetime.Singleton);
})
.AddSingleton<IProviderBehavior, AdditionalPropertiesBehavior>()
.AddSingleton<IProviderBehavior, GeminiTemperatureBehavior>()
.AddSingleton<IProviderBehavior, GoogleTemperatureBehavior>()
.AddSingleton<IProviderBehavior, OpenAiTemperatureBehavior>()
.AddSingleton<IProviderBehavior, MistralThinkingBehavior>();

Expand All @@ -169,6 +171,7 @@ private static ServiceCollection ConfigureServices(ServiceCollection services)
.AddResilientHttpClient(resilienceConfiguration, cellmAddInConfiguration, Provider.Gemini)
.AddResilientHttpClient(resilienceConfiguration, cellmAddInConfiguration, Provider.Mistral)
.AddResilientHttpClient(resilienceConfiguration, cellmAddInConfiguration, Provider.OpenAiCompatible)
.AddResilientHttpClient(resilienceConfiguration, cellmAddInConfiguration, Provider.Vertex)
.AddResilientHttpClient(resilienceConfiguration, cellmAddInConfiguration, Provider.OpenRouter);

#pragma warning disable EXTEXP0018 // Type is for evaluation purposes only and is subject to change or removal in future updates.
Expand All @@ -189,6 +192,7 @@ private static ServiceCollection ConfigureServices(ServiceCollection services)
.AddOllamaChatClient()
.AddOpenAiChatClient()
.AddOpenAiCompatibleChatClient()
.AddVertexChatClient()
.AddOpenRouterChatClient();

// Add tools
Expand Down Expand Up @@ -237,6 +241,7 @@ internal static IEnumerable<IProviderConfiguration> GetProviderConfigurations()
Services.GetRequiredService<IOptionsMonitor<OllamaConfiguration>>().CurrentValue,
Services.GetRequiredService<IOptionsMonitor<OpenAiConfiguration>>().CurrentValue,
Services.GetRequiredService<IOptionsMonitor<OpenAiCompatibleConfiguration>>().CurrentValue,
Services.GetRequiredService<IOptionsMonitor<VertexConfiguration>>().CurrentValue,
Services.GetRequiredService<IOptionsMonitor<OpenRouterConfiguration>>().CurrentValue
];
}
Expand Down
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
5 changes: 4 additions & 1 deletion src/Cellm/AddIn/UserInterface/Ribbon/RibbonModelGroup.cs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
using Cellm.Models.Providers.Ollama;
using Cellm.Models.Providers.OpenAi;
using Cellm.Models.Providers.OpenAiCompatible;
using Cellm.Models.Providers.Vertex;
using Cellm.Models.Providers.OpenRouter;
using Cellm.Users;
using ExcelDna.Integration.CustomUI;
Expand Down Expand Up @@ -495,6 +496,8 @@
case Provider.OpenAiCompatible:
currentBaseAddress = GetProviderConfiguration<OpenAiCompatibleConfiguration>()?.BaseAddress?.ToString() ?? "";
break;
case Provider.Vertex:

Check failure on line 499 in src/Cellm/AddIn/UserInterface/Ribbon/RibbonModelGroup.cs

View workflow job for this annotation

GitHub Actions / Build

Control cannot fall through from one case label ('case Provider.Vertex:') to another

Check failure on line 499 in src/Cellm/AddIn/UserInterface/Ribbon/RibbonModelGroup.cs

View workflow job for this annotation

GitHub Actions / Build

Control cannot fall through from one case label ('case Provider.Vertex:') to another
currentBaseAddress = GetProviderConfiguration<VertexConfiguration>()?.BaseAddress?.ToString() ?? "";
case Provider.OpenRouter:
currentBaseAddress = GetProviderConfiguration<OpenRouterConfiguration>()?.BaseAddress?.ToString() ?? "";
break;
Expand Down Expand Up @@ -551,7 +554,7 @@
{
return provider switch
{
Provider.Azure or Provider.Aws or Provider.OpenAiCompatible => true,
Provider.Azure or Provider.Aws or Provider.OpenAiCompatible or Provider.Vertex => true,
_ => false
};
}
Expand Down
1 change: 1 addition & 0 deletions src/Cellm/Cellm.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -92,5 +92,6 @@
<EmbeddedResource Include="AddIn\UserInterface\Resources\Ollama.png" />
<EmbeddedResource Include="AddIn\UserInterface\Resources\OpenAi.png" />
<EmbeddedResource Include="AddIn\UserInterface\Resources\OpenRouter.svg" />
<EmbeddedResource Include="AddIn\UserInterface\Resources\Vertex.png" />
</ItemGroup>
</Project>
Original file line number Diff line number Diff line change
Expand Up @@ -4,24 +4,24 @@

namespace Cellm.Models.Providers.Behaviors;

internal class GeminiTemperatureBehavior(IOptionsMonitor<CellmAddInConfiguration> cellmAddinConfiguration) : IProviderBehavior
internal class GoogleTemperatureBehavior(IOptionsMonitor<CellmAddInConfiguration> cellmAddinConfiguration) : IProviderBehavior
{
private const float DefaultMinTemp = 0.0f;
private const float DefaultMaxTemp = 1.0f;
private const float GeminiMaxTemperature = 2.0f;
private const float GoogleMaxTemperature = 2.0f;

public bool IsEnabled(Provider provider)
{
return provider == Provider.Gemini;
return provider == Provider.Gemini || provider == Provider.Vertex;
}

public void Before(Provider provider, Prompt prompt)
{
var temperature = prompt.Options.Temperature ?? (float)cellmAddinConfiguration.CurrentValue.DefaultTemperature;

// Scale temperature from [0;1] to [0;2]
temperature = (temperature / DefaultMaxTemp) * GeminiMaxTemperature;
prompt.Options.Temperature = Math.Clamp(temperature, DefaultMinTemp, GeminiMaxTemperature);
temperature = (temperature / DefaultMaxTemp) * GoogleMaxTemperature;
prompt.Options.Temperature = Math.Clamp(temperature, DefaultMinTemp, GoogleMaxTemperature);
}

public void After(Provider Provider, Prompt prompt)
Expand All @@ -31,7 +31,7 @@ public void After(Provider Provider, Prompt prompt)
var temperature = prompt.Options.Temperature.Value;

// Scale temperature back from [0;2] to [0;1]
temperature = (temperature / GeminiMaxTemperature) * DefaultMaxTemp;
temperature = (temperature / GoogleMaxTemperature) * DefaultMaxTemp;
prompt.Options.Temperature = Math.Clamp(temperature, DefaultMinTemp, DefaultMaxTemp);
}
}
Expand Down
3 changes: 2 additions & 1 deletion src/Cellm/Models/Providers/Provider.cs
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,6 @@ public enum Provider
Ollama,
OpenAi,
OpenAiCompatible,
OpenRouter
OpenRouter,
Vertex
}
35 changes: 35 additions & 0 deletions src/Cellm/Models/Providers/Vertex/VertexConfiguration.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
using Cellm.Users;
using Microsoft.Extensions.AI;

namespace Cellm.Models.Providers.Vertex;

internal class VertexConfiguration : IProviderConfiguration
{
public Provider Id { get => Provider.Vertex; }

public string Name { get => "Vertex AI"; }

public Entitlement Entitlement { get => Entitlement.EnableVertexProvider; }

public string Icon { get => $"AddIn/UserInterface/Resources/{nameof(Provider.Vertex)}.png"; }

public Uri BaseAddress { get; init; } = new Uri("https://us-central1-aiplatform.googleapis.com/v1beta1/projects/YOUR_PROJECT_ID/locations/us-central1/endpoints/openapi");

public string DefaultModel { get; init; } = string.Empty;

public string ApiKey { get; init; } = string.Empty;

public string SmallModel { get; init; } = string.Empty;

public string MediumModel { get; init; } = string.Empty;

public string LargeModel { get; init; } = string.Empty;

public AdditionalPropertiesDictionary? AdditionalProperties { get; init; } = [];

public bool SupportsJsonSchemaResponses { get; init; } = true;

public bool SupportsStructuredOutputWithTools { get; init; } = false;

public bool IsEnabled { get; init; } = false;
}
29 changes: 29 additions & 0 deletions src/Cellm/Models/ServiceCollectionExtensions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
using Cellm.Models.Providers.OpenAi;
using Cellm.Models.Providers.OpenAiCompatible;
using Cellm.Models.Providers.OpenRouter;
using Cellm.Models.Providers.Vertex;
using Cellm.Models.Resilience;
using Cellm.Users;
using Microsoft.Extensions.AI;
Expand Down Expand Up @@ -427,6 +428,34 @@
return services;
}

public static IServiceCollection AddVertexChatClient(this IServiceCollection services)

Check failure on line 431 in src/Cellm/Models/ServiceCollectionExtensions.cs

View workflow job for this annotation

GitHub Actions / Build

'ServiceCollectionExtensions.AddVertexChatClient(IServiceCollection)': not all code paths return a value

Check failure on line 431 in src/Cellm/Models/ServiceCollectionExtensions.cs

View workflow job for this annotation

GitHub Actions / Build

'ServiceCollectionExtensions.AddVertexChatClient(IServiceCollection)': not all code paths return a value
{
services
.AddKeyedChatClient(Provider.Vertex, serviceProvider =>
{
var account = serviceProvider.GetRequiredService<Account>();
account.ThrowIfNotEntitled(Entitlement.EnableVertexProvider);

var vertexConfiguration = serviceProvider.GetRequiredService<IOptionsMonitor<VertexConfiguration>>();
var resilientHttpClient = serviceProvider.GetResilientHttpClient(Provider.Vertex);

if (string.IsNullOrWhiteSpace(vertexConfiguration.CurrentValue.ApiKey))
{
throw new CellmException($"Empty {nameof(VertexConfiguration.ApiKey)} for {Provider.Vertex}. Please set your API key.");
}

var openAiClient = new OpenAIClient(
new ApiKeyCredential(vertexConfiguration.CurrentValue.ApiKey),
new OpenAIClientOptions
{
Transport = new HttpClientPipelineTransport(resilientHttpClient),
Endpoint = vertexConfiguration.CurrentValue.BaseAddress
});

return openAiClient.GetChatClient(vertexConfiguration.CurrentValue.DefaultModel).AsIChatClient();
});
}

public static IServiceCollection AddOpenRouterChatClient(this IServiceCollection services)
{
services
Expand Down
1 change: 1 addition & 0 deletions src/Cellm/Users/Entitlement.cs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ public enum Entitlement
EnableOpenAiCompatibleProviderLocalModels,
EnableOpenAiCompatibleProviderHostedModels,
EnableOpenRouterProvider,
EnableVertexProvider,
EnableModelContextProtocol,
DisableTelemetry
}
1 change: 1 addition & 0 deletions src/Cellm/Users/Models/Entitlements.cs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ internal class Entitlements()
Entitlement.EnableDeepSeekProvider,
Entitlement.EnableGeminiProvider,
Entitlement.EnableMistralProvider,
Entitlement.EnableModelContextProtocol,
Entitlement.EnableOllamaProvider,
Entitlement.EnableOpenAiProvider,
Entitlement.EnableOpenAiCompatibleProvider,
Expand Down
8 changes: 8 additions & 0 deletions src/Cellm/appsettings.json
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,14 @@
"LargeModel": "anthropic/claude-opus-4-5-20251101",
"IsEnabled": true
},
"VertexConfiguration": {
"BaseAddress": "https://us-central1-aiplatform.googleapis.com/v1beta1/projects/YOUR_PROJECT_ID/locations/us-central1/endpoints/openapi",
"DefaultModel": "gemini-2.5-flash",
"ApiKey": "",
"SmallModel": "gemini-2.5-flash-lite",
"MediumModel": "gemini-2.5-flash",
"LargeModel": "gemini-2.5-pro",
},
"ModelContextProtocolConfiguration": {
"StdioServers": [
{
Expand Down
Loading