From 47653ef3ed1c1eec2bd4c886f45e3bfb81c08e83 Mon Sep 17 00:00:00 2001 From: Vemaster Date: Mon, 6 Oct 2025 11:37:24 +0200 Subject: [PATCH 1/2] xAI integration --- Source/Private/Core/N2CSettings.cpp | 16 +++++ Source/Private/LLM/N2CLLMModels.cpp | 33 ++++++++++ .../LLM/Providers/N2CxAIResponseParser.cpp | 62 +++++++++++++++++++ .../Private/LLM/Providers/N2CxAIService.cpp | 60 ++++++++++++++++++ Source/Public/Core/N2CSettings.h | 25 ++++++++ Source/Public/LLM/N2CLLMModels.h | 13 ++++ Source/Public/LLM/N2CLLMPricing.h | 19 ++++++ .../LLM/Providers/N2CxAIResponseParser.h | 23 +++++++ Source/Public/LLM/Providers/N2CxAIService.h | 33 ++++++++++ 9 files changed, 284 insertions(+) create mode 100644 Source/Private/LLM/Providers/N2CxAIResponseParser.cpp create mode 100644 Source/Private/LLM/Providers/N2CxAIService.cpp create mode 100644 Source/Public/LLM/Providers/N2CxAIResponseParser.h create mode 100644 Source/Public/LLM/Providers/N2CxAIService.h diff --git a/Source/Private/Core/N2CSettings.cpp b/Source/Private/Core/N2CSettings.cpp index c89f014..09119e3 100644 --- a/Source/Private/Core/N2CSettings.cpp +++ b/Source/Private/Core/N2CSettings.cpp @@ -44,6 +44,7 @@ UN2CSettings::UN2CSettings() Anthropic_API_Key_UI = UserSecrets->Anthropic_API_Key; Gemini_API_Key_UI = UserSecrets->Gemini_API_Key; DeepSeek_API_Key_UI = UserSecrets->DeepSeek_API_Key; + xAI_API_Key_UI = UserSecrets->xAI_API_Key; // Initialize token estimate EstimatedReferenceTokens = GetReferenceFilesTokenEstimate(); @@ -80,6 +81,8 @@ FString UN2CSettings::GetActiveApiKey() const return UserSecrets->Gemini_API_Key; case EN2CLLMProvider::DeepSeek: return UserSecrets->DeepSeek_API_Key; + case EN2CLLMProvider::xAI: + return UserSecrets->xAI_API_Key; case EN2CLLMProvider::LMStudio: return "lm-studio"; // LM Studio just requires a dummy API key for its OpenAI endpoint default: @@ -99,6 +102,8 @@ FString UN2CSettings::GetActiveModel() const return FN2CLLMModelUtils::GetGeminiModelValue(Gemini_Model); case EN2CLLMProvider::DeepSeek: return FN2CLLMModelUtils::GetDeepSeekModelValue(DeepSeekModel); + case EN2CLLMProvider::xAI: + return FN2CLLMModelUtils::GetxAIModelValue(xAI_Model); case EN2CLLMProvider::Ollama: return OllamaModel; case EN2CLLMProvider::LMStudio: @@ -269,6 +274,17 @@ void UN2CSettings::PostEditChangeProperty(FPropertyChangedEvent& PropertyChanged UserSecrets->SaveSecrets(); return; } + if (PropertyName == GET_MEMBER_NAME_CHECKED(UN2CSettings, xAI_API_Key_UI)) + { + if (!UserSecrets) + { + UserSecrets = NewObject(); + UserSecrets->LoadSecrets(); + } + UserSecrets->xAI_API_Key = xAI_API_Key_UI; + UserSecrets->SaveSecrets(); + return; + } // Update logger severity when MinSeverity changes if (PropertyName == GET_MEMBER_NAME_CHECKED(UN2CSettings, MinSeverity)) diff --git a/Source/Private/LLM/N2CLLMModels.cpp b/Source/Private/LLM/N2CLLMModels.cpp index 14d4248..01b51d9 100644 --- a/Source/Private/LLM/N2CLLMModels.cpp +++ b/Source/Private/LLM/N2CLLMModels.cpp @@ -40,6 +40,13 @@ const TMap FN2CLLMModelUtils::DeepSeekPr {EN2CDeepSeekModel::DeepSeek_V3, FN2CDeepSeekPricing(0.07f, 0.27f)} }; +const TMap FN2CLLMModelUtils::xAIPricing = { + {EN2CxAIModel::Grok_Code_Fast_1, FN2CxAIPricing(0.2f, 1.5f)}, + {EN2CxAIModel::Grok_4_Fast_Reasoning, FN2CxAIPricing(0.4f, 1.0f)}, + {EN2CxAIModel::Grok_4_Fast_NonReasoning, FN2CxAIPricing(0.4f, 1.0f)}, + {EN2CxAIModel::Grok_4, FN2CxAIPricing(6.0f, 30.0f)} +}; + FString FN2CLLMModelUtils::GetOpenAIModelValue(EN2COpenAIModel Model) { switch (Model) @@ -160,3 +167,29 @@ FN2CGeminiPricing FN2CLLMModelUtils::GetGeminiPricing(EN2CGeminiModel Model) } return FN2CGeminiPricing(); } + +FString FN2CLLMModelUtils::GetxAIModelValue(EN2CxAIModel Model) +{ + switch (Model) + { + case EN2CxAIModel::Grok_Code_Fast_1: + return TEXT("grok-code-fast"); + case EN2CxAIModel::Grok_4_Fast_Reasoning: + return TEXT("grok-4-fast-reasoning-latest"); + case EN2CxAIModel::Grok_4_Fast_NonReasoning: + return TEXT("grok-4-fast-non-reasoning-latest"); + case EN2CxAIModel::Grok_4: + return TEXT("grok-4-latest"); + default: + return TEXT("grok-code-fast"); + } +} + +FN2CxAIPricing FN2CLLMModelUtils::GetxAIPricing(EN2CxAIModel Model) +{ + if (const FN2CxAIPricing* Found = xAIPricing.Find(Model)) + { + return *Found; + } + return FN2CxAIPricing(); +} diff --git a/Source/Private/LLM/Providers/N2CxAIResponseParser.cpp b/Source/Private/LLM/Providers/N2CxAIResponseParser.cpp new file mode 100644 index 0000000..bf77706 --- /dev/null +++ b/Source/Private/LLM/Providers/N2CxAIResponseParser.cpp @@ -0,0 +1,62 @@ +// Copyright (c) 2025 Nick McClure (Protospatial). All Rights Reserved. + +#include "LLM/Providers/N2CxAIResponseParser.h" +#include "Utils/N2CLogger.h" +#include "Serialization/JsonSerializer.h" + +bool UN2CxAIResponseParser::ParseLLMResponse( + const FString& InJson, + FN2CTranslationResponse& OutResponse) +{ + // Parse JSON string + TSharedPtr JsonObject; + TSharedRef> Reader = TJsonReaderFactory<>::Create(InJson); + + if (!FJsonSerializer::Deserialize(Reader, JsonObject) || !JsonObject.IsValid()) + { + FN2CLogger::Get().LogError( + FString::Printf(TEXT("Failed to parse xAI response JSON: %s"), *InJson), + TEXT("xAIResponseParser") + ); + return false; + } + + // Check for error response (xAI aligns with OpenAI shape) + FString ErrorMessage; + if (JsonObject->HasField(TEXT("error"))) + { + if (HandleCommonErrorResponse(JsonObject, TEXT("error"), ErrorMessage)) + { + FN2CLogger::Get().LogError(ErrorMessage, TEXT("xAIResponseParser")); + } + return false; + } + + // Extract message content from OpenAI-compatible format + FString MessageContent; + if (!ExtractStandardMessageContent(JsonObject, TEXT("choices"), TEXT("message"), TEXT("content"), MessageContent)) + { + FN2CLogger::Get().LogError(TEXT("Failed to extract message content from xAI response"), TEXT("xAIResponseParser")); + return false; + } + + // Extract usage information if available + const TSharedPtr UsageObject = JsonObject->GetObjectField(TEXT("usage")); + if (UsageObject.IsValid()) + { + int32 PromptTokens = 0; + int32 CompletionTokens = 0; + UsageObject->TryGetNumberField(TEXT("prompt_tokens"), PromptTokens); + UsageObject->TryGetNumberField(TEXT("completion_tokens"), CompletionTokens); + + OutResponse.Usage.InputTokens = PromptTokens; + OutResponse.Usage.OutputTokens = CompletionTokens; + + FN2CLogger::Get().Log(FString::Printf(TEXT("LLM Token Usage - Input: %d Output: %d"), PromptTokens, CompletionTokens), EN2CLogSeverity::Info); + } + + FN2CLogger::Get().Log(FString::Printf(TEXT("LLM Response Message Content: %s"), *MessageContent), EN2CLogSeverity::Debug); + + // Parse the extracted content as our expected JSON format + return Super::ParseLLMResponse(MessageContent, OutResponse); +} diff --git a/Source/Private/LLM/Providers/N2CxAIService.cpp b/Source/Private/LLM/Providers/N2CxAIService.cpp new file mode 100644 index 0000000..84051b8 --- /dev/null +++ b/Source/Private/LLM/Providers/N2CxAIService.cpp @@ -0,0 +1,60 @@ +// Copyright (c) 2025 Nick McClure (Protospatial). All Rights Reserved. + +#include "LLM/Providers/N2CxAIService.h" + +#include "LLM/N2CSystemPromptManager.h" +#include "LLM/N2CLLMPayloadBuilder.h" + +UN2CResponseParserBase* UN2CxAIService::CreateResponseParser() +{ + UN2CxAIResponseParser* Parser = NewObject(this); + return Parser; +} + +void UN2CxAIService::GetConfiguration( + FString& OutEndpoint, + FString& OutAuthToken, + bool& OutSupportsSystemPrompts) +{ + OutEndpoint = Config.ApiEndpoint.IsEmpty() ? GetDefaultEndpoint() : Config.ApiEndpoint; + OutAuthToken = Config.ApiKey; + + // xAI Grok chat completions support system prompts (OpenAI-compatible) + OutSupportsSystemPrompts = true; +} + +void UN2CxAIService::GetProviderHeaders(TMap& OutHeaders) const +{ + OutHeaders.Add(TEXT("Authorization"), FString::Printf(TEXT("Bearer %s"), *Config.ApiKey)); + OutHeaders.Add(TEXT("Content-Type"), TEXT("application/json")); +} + +FString UN2CxAIService::FormatRequestPayload(const FString& UserMessage, const FString& SystemMessage) const +{ + // Create and configure payload builder (xAI is OpenAI-compatible) + UN2CLLMPayloadBuilder* PayloadBuilder = NewObject(); + PayloadBuilder->Initialize(Config.Model); + PayloadBuilder->ConfigureForOpenAI(); + + // Set common parameters + PayloadBuilder->SetTemperature(0.0f); + PayloadBuilder->SetMaxTokens(8192); + + // xAI Grok supports JSON schema through the OpenAI-style response_format + PayloadBuilder->SetJsonResponseFormat(UN2CLLMPayloadBuilder::GetN2CResponseSchema()); + + // Prepare content (prepend source files paths/content if configured) + FString FinalContent = UserMessage; + if (PromptManager) + { + // Try prepending source files to the user message + PromptManager->PrependSourceFilesToUserMessage(FinalContent); + } + + // Add messages (xAI supports system role) + PayloadBuilder->AddSystemMessage(SystemMessage); + PayloadBuilder->AddUserMessage(FinalContent); + + // Build and return the payload + return PayloadBuilder->Build(); +} diff --git a/Source/Public/Core/N2CSettings.h b/Source/Public/Core/N2CSettings.h index baa3676..fc24eed 100644 --- a/Source/Public/Core/N2CSettings.h +++ b/Source/Public/Core/N2CSettings.h @@ -421,6 +421,15 @@ class NODETOCODE_API UN2CSettings : public UDeveloperSettings meta = (DisplayName = "API Key")) FString DeepSeek_API_Key_UI; + /** xAI Model Selection */ + UPROPERTY(Config, EditAnywhere, BlueprintReadOnly, Category = "Node to Code | LLM Services | xAI") + EN2CxAIModel xAI_Model = EN2CxAIModel::Grok_Code_Fast_1; + + /** xAI API Key - Stored separately in user secrets */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Node to Code | LLM Services | xAI", + meta = (DisplayName = "API Key")) + FString xAI_API_Key_UI; + /** Ollama configuration */ UPROPERTY(Config, EditAnywhere, BlueprintReadOnly, Category = "Node to Code | LLM Services | Ollama") FN2COllamaConfig OllamaConfig; @@ -461,6 +470,10 @@ class NODETOCODE_API UN2CSettings : public UDeveloperSettings /** DeepSeek Model Pricing */ UPROPERTY(Config, EditAnywhere, BlueprintReadOnly, Category = "Node to Code | LLM Services | Pricing | DeepSeek") TMap DeepSeekModelPricing; + + /** xAI Model Pricing */ + UPROPERTY(Config, EditAnywhere, BlueprintReadOnly, Category = "Node to Code | LLM Services | Pricing | xAI") + TMap xAIModelPricing; /** Target programming language for translation */ UPROPERTY(Config, EditAnywhere, BlueprintReadOnly, Category = "Node to Code | Code Generation", @@ -550,6 +563,12 @@ class NODETOCODE_API UN2CSettings : public UDeveloperSettings return Pricing->InputCost; } return FN2CLLMModelUtils::GetDeepSeekPricing(DeepSeekModel).InputCost; + case EN2CLLMProvider::xAI: + if (const FN2CxAIPricing* Pricing = xAIModelPricing.Find(xAI_Model)) + { + return Pricing->InputCost; + } + return FN2CLLMModelUtils::GetxAIPricing(xAI_Model).InputCost; case EN2CLLMProvider::Ollama: case EN2CLLMProvider::LMStudio: return 0.0f; // Local models are free @@ -582,6 +601,12 @@ class NODETOCODE_API UN2CSettings : public UDeveloperSettings return Pricing->OutputCost; } return FN2CLLMModelUtils::GetDeepSeekPricing(DeepSeekModel).OutputCost; + case EN2CLLMProvider::xAI: + if (const FN2CxAIPricing* Pricing = xAIModelPricing.Find(xAI_Model)) + { + return Pricing->OutputCost; + } + return FN2CLLMModelUtils::GetxAIPricing(xAI_Model).OutputCost; case EN2CLLMProvider::Ollama: case EN2CLLMProvider::LMStudio: return 0.0f; // Local models are free diff --git a/Source/Public/LLM/N2CLLMModels.h b/Source/Public/LLM/N2CLLMModels.h index b6b3c6c..a1ebb6d 100644 --- a/Source/Public/LLM/N2CLLMModels.h +++ b/Source/Public/LLM/N2CLLMModels.h @@ -56,6 +56,16 @@ enum class EN2CDeepSeekModel : uint8 DeepSeek_V3 UMETA(DisplayName = "DeepSeek V3", Value = "deepseek-chat"), }; +/** Available xAI models */ +UENUM(BlueprintType) +enum class EN2CxAIModel : uint8 +{ + Grok_Code_Fast_1 UMETA(DisplayName = "Grok Code Fast 1", Value = "grok-code-fast"), + Grok_4_Fast_Reasoning UMETA(DisplayName = "Grok 4 Fast Reasoning", Value = "grok-4-fast-reasoning-latest"), + Grok_4_Fast_NonReasoning UMETA(DisplayName = "Grok 4 Fast NonReasoning", Value = "grok-4-fast-non-reasoning-latest"), + Grok_4 UMETA(DisplayName = "Grok 4", Value = "grok-4-latest") +}; + /** Helper functions for model enums */ struct FN2CLLMModelUtils @@ -65,12 +75,14 @@ struct FN2CLLMModelUtils static FString GetAnthropicModelValue(EN2CAnthropicModel Model); static FString GetGeminiModelValue(EN2CGeminiModel Model); static FString GetDeepSeekModelValue(EN2CDeepSeekModel Model); + static FString GetxAIModelValue(EN2CxAIModel Model); /** Pricing getters */ static FN2COpenAIPricing GetOpenAIPricing(EN2COpenAIModel Model); static FN2CAnthropicPricing GetAnthropicPricing(EN2CAnthropicModel Model); static FN2CDeepSeekPricing GetDeepSeekPricing(EN2CDeepSeekModel Model); static FN2CGeminiPricing GetGeminiPricing(EN2CGeminiModel Model); + static FN2CxAIPricing GetxAIPricing(EN2CxAIModel Model); /** System prompt support checks */ static bool SupportsSystemPrompts(EN2COpenAIModel Model) @@ -94,4 +106,5 @@ struct FN2CLLMModelUtils static const TMap AnthropicPricing; static const TMap DeepSeekPricing; static const TMap GeminiPricing; + static const TMap xAIPricing; }; diff --git a/Source/Public/LLM/N2CLLMPricing.h b/Source/Public/LLM/N2CLLMPricing.h index 9182ee4..0169e06 100644 --- a/Source/Public/LLM/N2CLLMPricing.h +++ b/Source/Public/LLM/N2CLLMPricing.h @@ -82,3 +82,22 @@ struct FN2CDeepSeekPricing FN2CDeepSeekPricing(float InInput, float InOutput) : InputCost(InInput), OutputCost(InOutput) {} }; + +/** Pricing information for xAI models */ +USTRUCT(BlueprintType) +struct FN2CxAIPricing +{ + GENERATED_BODY() + + // Cost per 1M tokens + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Node to Code | LLM Pricing") + float InputCost = 0.0f; + + // Cost per 1M tokens + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Node to Code | LLM Pricing") + float OutputCost = 0.0f; + + FN2CxAIPricing() {} + FN2CxAIPricing(float InInput, float InOutput) + : InputCost(InInput), OutputCost(InOutput) {} +}; \ No newline at end of file diff --git a/Source/Public/LLM/Providers/N2CxAIResponseParser.h b/Source/Public/LLM/Providers/N2CxAIResponseParser.h new file mode 100644 index 0000000..03c7e84 --- /dev/null +++ b/Source/Public/LLM/Providers/N2CxAIResponseParser.h @@ -0,0 +1,23 @@ +// Copyright (c) 2025 Nick McClure (Protospatial). All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "LLM/N2CResponseParserBase.h" +#include "N2CxAIResponseParser.generated.h" + +/** + * @class UN2CxAIResponseParser + * @brief Parser for xAI Chat Completion API responses + */ +UCLASS() +class NODETOCODE_API UN2CxAIResponseParser : public UN2CResponseParserBase +{ + GENERATED_BODY() + +public: + /** Parse xAI-specific JSON response (OpenAI-compatible schema) */ + virtual bool ParseLLMResponse( + const FString& InJson, + FN2CTranslationResponse& OutResponse) override; +}; diff --git a/Source/Public/LLM/Providers/N2CxAIService.h b/Source/Public/LLM/Providers/N2CxAIService.h new file mode 100644 index 0000000..9f54758 --- /dev/null +++ b/Source/Public/LLM/Providers/N2CxAIService.h @@ -0,0 +1,33 @@ +// Copyright (c) 2025 Nick McClure (Protospatial). All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "LLM/N2CBaseLLMService.h" +#include "N2CxAIResponseParser.h" +#include "N2CxAIService.generated.h" + +// Forward declarations +class UN2CSystemPromptManager; + +/** + * @class UN2CxAIService + * @brief Implementation of xAI Chat Completion API integration (OpenAI-compatible) + */ +UCLASS() +class NODETOCODE_API UN2CxAIService : public UN2CBaseLLMService +{ + GENERATED_BODY() + +public: + // Provider-specific implementations + virtual void GetConfiguration(FString& OutEndpoint, FString& OutAuthToken, bool& OutSupportsSystemPrompts) override; + virtual EN2CLLMProvider GetProviderType() const override { return EN2CLLMProvider::xAI; } + virtual void GetProviderHeaders(TMap& OutHeaders) const override; + +protected: + // Provider-specific implementations + virtual FString FormatRequestPayload(const FString& UserMessage, const FString& SystemMessage) const override; + virtual UN2CResponseParserBase* CreateResponseParser() override; + virtual FString GetDefaultEndpoint() const override { return TEXT("https://api.x.ai/v1/chat/completions"); } +}; From 976327553a1641def9958b2ef467ec59646cba0c Mon Sep 17 00:00:00 2001 From: Vemaster Date: Mon, 6 Oct 2025 12:34:11 +0200 Subject: [PATCH 2/2] add xAI provider / api key field --- Source/Public/Core/N2CUserSecrets.h | 4 ++++ Source/Public/LLM/N2CLLMTypes.h | 3 ++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/Source/Public/Core/N2CUserSecrets.h b/Source/Public/Core/N2CUserSecrets.h index 0c007ca..8da0357 100644 --- a/Source/Public/Core/N2CUserSecrets.h +++ b/Source/Public/Core/N2CUserSecrets.h @@ -44,6 +44,10 @@ class NODETOCODE_API UN2CUserSecrets : public UObject /** DeepSeek API Key */ UPROPERTY(EditAnywhere, Category = "Node to Code | API Keys") FString DeepSeek_API_Key; + + /** xAI API Key */ + UPROPERTY(EditAnywhere, Category = "Node to Code | API Keys") + FString xAI_API_Key; private: /** Ensure the secrets directory exists */ diff --git a/Source/Public/LLM/N2CLLMTypes.h b/Source/Public/LLM/N2CLLMTypes.h index fe41ac8..cfcf804 100644 --- a/Source/Public/LLM/N2CLLMTypes.h +++ b/Source/Public/LLM/N2CLLMTypes.h @@ -23,7 +23,8 @@ enum class EN2CLLMProvider : uint8 Gemini UMETA(DisplayName = "Gemini"), Ollama UMETA(DisplayName = "Ollama"), DeepSeek UMETA(DisplayName = "DeepSeek"), - LMStudio UMETA(DisplayName = "LM Studio") + LMStudio UMETA(DisplayName = "LM Studio"), + xAI UMETA(DisplayName = "xAI"), }; /** Status of the Node to Code system */