5#ifndef FORESTHUB_PROVIDER_REMOTE_GEMINI_HPP
6#define FORESTHUB_PROVIDER_REMOTE_GEMINI_HPP
53 std::shared_ptr<core::HttpClient> http_;
55 std::string base_url_;
56 std::vector<core::ModelID> supported_models_;
57 core::HttpClient::Headers cached_headers_;
Extended LLM interface with health checks and model discovery.
Definition provider.hpp:32
GeminiProvider(const config::ProviderConfig &cfg, std::shared_ptr< core::HttpClient > http_client)
Construct a Gemini provider from configuration.
core::ProviderID ProviderId() const override
Returns "gemini" as the provider identifier.
bool SupportsModel(const core::ModelID &model) const override
Check if this provider handles the given model.
std::shared_ptr< core::ChatResponse > Chat(const core::ChatRequest &req) override
Send a chat request to the Gemini generateContent API.
std::string Health() const override
Check that the Gemini API is reachable.
Client and provider configuration types.
Abstract HTTP client interface and response type.
std::string ModelID
Unique identifier for an LLM model.
Definition model.hpp:23
std::string ProviderID
Unique identifier for an LLM provider.
Definition model.hpp:20
Remote provider implementations (ForestHub, OpenAI, Gemini, Anthropic).
LLM provider implementations.
Top-level namespace for the ForestHub SDK.
LLMClient and Provider interfaces for chat operations.
Shared configuration for any remote LLM provider.
Definition config.hpp:24
Chat completion request sent to an LLM provider.
Definition types.hpp:39