Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions Source/Private/Core/N2CSettings.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ UN2CSettings::UN2CSettings()
Anthropic_API_Key_UI = UserSecrets->Anthropic_API_Key;
Gemini_API_Key_UI = UserSecrets->Gemini_API_Key;
DeepSeek_API_Key_UI = UserSecrets->DeepSeek_API_Key;
xAI_API_Key_UI = UserSecrets->xAI_API_Key;

// Initialize token estimate
EstimatedReferenceTokens = GetReferenceFilesTokenEstimate();
Expand Down Expand Up @@ -80,6 +81,8 @@ FString UN2CSettings::GetActiveApiKey() const
return UserSecrets->Gemini_API_Key;
case EN2CLLMProvider::DeepSeek:
return UserSecrets->DeepSeek_API_Key;
case EN2CLLMProvider::xAI:
return UserSecrets->xAI_API_Key;
case EN2CLLMProvider::LMStudio:
return "lm-studio"; // LM Studio just requires a dummy API key for its OpenAI endpoint
default:
Expand All @@ -99,6 +102,8 @@ FString UN2CSettings::GetActiveModel() const
return FN2CLLMModelUtils::GetGeminiModelValue(Gemini_Model);
case EN2CLLMProvider::DeepSeek:
return FN2CLLMModelUtils::GetDeepSeekModelValue(DeepSeekModel);
case EN2CLLMProvider::xAI:
return FN2CLLMModelUtils::GetxAIModelValue(xAI_Model);
case EN2CLLMProvider::Ollama:
return OllamaModel;
case EN2CLLMProvider::LMStudio:
Expand Down Expand Up @@ -269,6 +274,17 @@ void UN2CSettings::PostEditChangeProperty(FPropertyChangedEvent& PropertyChanged
UserSecrets->SaveSecrets();
return;
}
if (PropertyName == GET_MEMBER_NAME_CHECKED(UN2CSettings, xAI_API_Key_UI))
{
if (!UserSecrets)
{
UserSecrets = NewObject<UN2CUserSecrets>();
UserSecrets->LoadSecrets();
}
UserSecrets->xAI_API_Key = xAI_API_Key_UI;
UserSecrets->SaveSecrets();
return;
}

// Update logger severity when MinSeverity changes
if (PropertyName == GET_MEMBER_NAME_CHECKED(UN2CSettings, MinSeverity))
Expand Down
33 changes: 33 additions & 0 deletions Source/Private/LLM/N2CLLMModels.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,13 @@ const TMap<EN2CDeepSeekModel, FN2CDeepSeekPricing> FN2CLLMModelUtils::DeepSeekPr
{EN2CDeepSeekModel::DeepSeek_V3, FN2CDeepSeekPricing(0.07f, 0.27f)}
};

const TMap<EN2CxAIModel, FN2CxAIPricing> FN2CLLMModelUtils::xAIPricing = {
{EN2CxAIModel::Grok_Code_Fast_1, FN2CxAIPricing(0.2f, 1.5f)},
{EN2CxAIModel::Grok_4_Fast_Reasoning, FN2CxAIPricing(0.4f, 1.0f)},
{EN2CxAIModel::Grok_4_Fast_NonReasoning, FN2CxAIPricing(0.4f, 1.0f)},
{EN2CxAIModel::Grok_4, FN2CxAIPricing(6.0f, 30.0f)}
};

FString FN2CLLMModelUtils::GetOpenAIModelValue(EN2COpenAIModel Model)
{
switch (Model)
Expand Down Expand Up @@ -160,3 +167,29 @@ FN2CGeminiPricing FN2CLLMModelUtils::GetGeminiPricing(EN2CGeminiModel Model)
}
return FN2CGeminiPricing();
}

FString FN2CLLMModelUtils::GetxAIModelValue(EN2CxAIModel Model)
{
switch (Model)
{
case EN2CxAIModel::Grok_Code_Fast_1:
return TEXT("grok-code-fast");
case EN2CxAIModel::Grok_4_Fast_Reasoning:
return TEXT("grok-4-fast-reasoning-latest");
case EN2CxAIModel::Grok_4_Fast_NonReasoning:
return TEXT("grok-4-fast-non-reasoning-latest");
case EN2CxAIModel::Grok_4:
return TEXT("grok-4-latest");
default:
return TEXT("grok-code-fast");
}
}

FN2CxAIPricing FN2CLLMModelUtils::GetxAIPricing(EN2CxAIModel Model)
{
if (const FN2CxAIPricing* Found = xAIPricing.Find(Model))
{
return *Found;
}
return FN2CxAIPricing();
}
62 changes: 62 additions & 0 deletions Source/Private/LLM/Providers/N2CxAIResponseParser.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
// Copyright (c) 2025 Nick McClure (Protospatial). All Rights Reserved.

#include "LLM/Providers/N2CxAIResponseParser.h"
#include "Utils/N2CLogger.h"
#include "Serialization/JsonSerializer.h"

bool UN2CxAIResponseParser::ParseLLMResponse(
const FString& InJson,
FN2CTranslationResponse& OutResponse)
{
// Parse JSON string
TSharedPtr<FJsonObject> JsonObject;
TSharedRef<TJsonReader<>> Reader = TJsonReaderFactory<>::Create(InJson);

if (!FJsonSerializer::Deserialize(Reader, JsonObject) || !JsonObject.IsValid())
{
FN2CLogger::Get().LogError(
FString::Printf(TEXT("Failed to parse xAI response JSON: %s"), *InJson),
TEXT("xAIResponseParser")
);
return false;
}

// Check for error response (xAI aligns with OpenAI shape)
FString ErrorMessage;
if (JsonObject->HasField(TEXT("error")))
{
if (HandleCommonErrorResponse(JsonObject, TEXT("error"), ErrorMessage))
{
FN2CLogger::Get().LogError(ErrorMessage, TEXT("xAIResponseParser"));
}
return false;
}

// Extract message content from OpenAI-compatible format
FString MessageContent;
if (!ExtractStandardMessageContent(JsonObject, TEXT("choices"), TEXT("message"), TEXT("content"), MessageContent))
{
FN2CLogger::Get().LogError(TEXT("Failed to extract message content from xAI response"), TEXT("xAIResponseParser"));
return false;
}

// Extract usage information if available
const TSharedPtr<FJsonObject> UsageObject = JsonObject->GetObjectField(TEXT("usage"));
if (UsageObject.IsValid())
{
int32 PromptTokens = 0;
int32 CompletionTokens = 0;
UsageObject->TryGetNumberField(TEXT("prompt_tokens"), PromptTokens);
UsageObject->TryGetNumberField(TEXT("completion_tokens"), CompletionTokens);

OutResponse.Usage.InputTokens = PromptTokens;
OutResponse.Usage.OutputTokens = CompletionTokens;

FN2CLogger::Get().Log(FString::Printf(TEXT("LLM Token Usage - Input: %d Output: %d"), PromptTokens, CompletionTokens), EN2CLogSeverity::Info);
}

FN2CLogger::Get().Log(FString::Printf(TEXT("LLM Response Message Content: %s"), *MessageContent), EN2CLogSeverity::Debug);

// Parse the extracted content as our expected JSON format
return Super::ParseLLMResponse(MessageContent, OutResponse);
}
60 changes: 60 additions & 0 deletions Source/Private/LLM/Providers/N2CxAIService.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
// Copyright (c) 2025 Nick McClure (Protospatial). All Rights Reserved.

#include "LLM/Providers/N2CxAIService.h"

#include "LLM/N2CSystemPromptManager.h"
#include "LLM/N2CLLMPayloadBuilder.h"

UN2CResponseParserBase* UN2CxAIService::CreateResponseParser()
{
UN2CxAIResponseParser* Parser = NewObject<UN2CxAIResponseParser>(this);
return Parser;
}

void UN2CxAIService::GetConfiguration(
FString& OutEndpoint,
FString& OutAuthToken,
bool& OutSupportsSystemPrompts)
{
OutEndpoint = Config.ApiEndpoint.IsEmpty() ? GetDefaultEndpoint() : Config.ApiEndpoint;
OutAuthToken = Config.ApiKey;

// xAI Grok chat completions support system prompts (OpenAI-compatible)
OutSupportsSystemPrompts = true;
}

void UN2CxAIService::GetProviderHeaders(TMap<FString, FString>& OutHeaders) const
{
OutHeaders.Add(TEXT("Authorization"), FString::Printf(TEXT("Bearer %s"), *Config.ApiKey));
OutHeaders.Add(TEXT("Content-Type"), TEXT("application/json"));
}

FString UN2CxAIService::FormatRequestPayload(const FString& UserMessage, const FString& SystemMessage) const
{
// Create and configure payload builder (xAI is OpenAI-compatible)
UN2CLLMPayloadBuilder* PayloadBuilder = NewObject<UN2CLLMPayloadBuilder>();
PayloadBuilder->Initialize(Config.Model);
PayloadBuilder->ConfigureForOpenAI();

// Set common parameters
PayloadBuilder->SetTemperature(0.0f);
PayloadBuilder->SetMaxTokens(8192);

// xAI Grok supports JSON schema through the OpenAI-style response_format
PayloadBuilder->SetJsonResponseFormat(UN2CLLMPayloadBuilder::GetN2CResponseSchema());

// Prepare content (prepend source files paths/content if configured)
FString FinalContent = UserMessage;
if (PromptManager)
{
// Try prepending source files to the user message
PromptManager->PrependSourceFilesToUserMessage(FinalContent);
}

// Add messages (xAI supports system role)
PayloadBuilder->AddSystemMessage(SystemMessage);
PayloadBuilder->AddUserMessage(FinalContent);

// Build and return the payload
return PayloadBuilder->Build();
}
25 changes: 25 additions & 0 deletions Source/Public/Core/N2CSettings.h
Original file line number Diff line number Diff line change
Expand Up @@ -421,6 +421,15 @@ class NODETOCODE_API UN2CSettings : public UDeveloperSettings
meta = (DisplayName = "API Key"))
FString DeepSeek_API_Key_UI;

/** xAI Model Selection */
UPROPERTY(Config, EditAnywhere, BlueprintReadOnly, Category = "Node to Code | LLM Services | xAI")
EN2CxAIModel xAI_Model = EN2CxAIModel::Grok_Code_Fast_1;

/** xAI API Key - Stored separately in user secrets */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Node to Code | LLM Services | xAI",
meta = (DisplayName = "API Key"))
FString xAI_API_Key_UI;

/** Ollama configuration */
UPROPERTY(Config, EditAnywhere, BlueprintReadOnly, Category = "Node to Code | LLM Services | Ollama")
FN2COllamaConfig OllamaConfig;
Expand Down Expand Up @@ -461,6 +470,10 @@ class NODETOCODE_API UN2CSettings : public UDeveloperSettings
/** DeepSeek Model Pricing */
UPROPERTY(Config, EditAnywhere, BlueprintReadOnly, Category = "Node to Code | LLM Services | Pricing | DeepSeek")
TMap<EN2CDeepSeekModel, FN2CDeepSeekPricing> DeepSeekModelPricing;

/** xAI Model Pricing */
UPROPERTY(Config, EditAnywhere, BlueprintReadOnly, Category = "Node to Code | LLM Services | Pricing | xAI")
TMap<EN2CxAIModel, FN2CxAIPricing> xAIModelPricing;

/** Target programming language for translation */
UPROPERTY(Config, EditAnywhere, BlueprintReadOnly, Category = "Node to Code | Code Generation",
Expand Down Expand Up @@ -550,6 +563,12 @@ class NODETOCODE_API UN2CSettings : public UDeveloperSettings
return Pricing->InputCost;
}
return FN2CLLMModelUtils::GetDeepSeekPricing(DeepSeekModel).InputCost;
case EN2CLLMProvider::xAI:
if (const FN2CxAIPricing* Pricing = xAIModelPricing.Find(xAI_Model))
{
return Pricing->InputCost;
}
return FN2CLLMModelUtils::GetxAIPricing(xAI_Model).InputCost;
case EN2CLLMProvider::Ollama:
case EN2CLLMProvider::LMStudio:
return 0.0f; // Local models are free
Expand Down Expand Up @@ -582,6 +601,12 @@ class NODETOCODE_API UN2CSettings : public UDeveloperSettings
return Pricing->OutputCost;
}
return FN2CLLMModelUtils::GetDeepSeekPricing(DeepSeekModel).OutputCost;
case EN2CLLMProvider::xAI:
if (const FN2CxAIPricing* Pricing = xAIModelPricing.Find(xAI_Model))
{
return Pricing->OutputCost;
}
return FN2CLLMModelUtils::GetxAIPricing(xAI_Model).OutputCost;
case EN2CLLMProvider::Ollama:
case EN2CLLMProvider::LMStudio:
return 0.0f; // Local models are free
Expand Down
4 changes: 4 additions & 0 deletions Source/Public/Core/N2CUserSecrets.h
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,10 @@ class NODETOCODE_API UN2CUserSecrets : public UObject
/** DeepSeek API Key */
UPROPERTY(EditAnywhere, Category = "Node to Code | API Keys")
FString DeepSeek_API_Key;

/** xAI API Key */
UPROPERTY(EditAnywhere, Category = "Node to Code | API Keys")
FString xAI_API_Key;

private:
/** Ensure the secrets directory exists */
Expand Down
13 changes: 13 additions & 0 deletions Source/Public/LLM/N2CLLMModels.h
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,16 @@ enum class EN2CDeepSeekModel : uint8
DeepSeek_V3 UMETA(DisplayName = "DeepSeek V3", Value = "deepseek-chat"),
};

/** Available xAI models */
UENUM(BlueprintType)
enum class EN2CxAIModel : uint8
{
Grok_Code_Fast_1 UMETA(DisplayName = "Grok Code Fast 1", Value = "grok-code-fast"),
Grok_4_Fast_Reasoning UMETA(DisplayName = "Grok 4 Fast Reasoning", Value = "grok-4-fast-reasoning-latest"),
Grok_4_Fast_NonReasoning UMETA(DisplayName = "Grok 4 Fast NonReasoning", Value = "grok-4-fast-non-reasoning-latest"),
Grok_4 UMETA(DisplayName = "Grok 4", Value = "grok-4-latest")
};


/** Helper functions for model enums */
struct FN2CLLMModelUtils
Expand All @@ -65,12 +75,14 @@ struct FN2CLLMModelUtils
static FString GetAnthropicModelValue(EN2CAnthropicModel Model);
static FString GetGeminiModelValue(EN2CGeminiModel Model);
static FString GetDeepSeekModelValue(EN2CDeepSeekModel Model);
static FString GetxAIModelValue(EN2CxAIModel Model);

/** Pricing getters */
static FN2COpenAIPricing GetOpenAIPricing(EN2COpenAIModel Model);
static FN2CAnthropicPricing GetAnthropicPricing(EN2CAnthropicModel Model);
static FN2CDeepSeekPricing GetDeepSeekPricing(EN2CDeepSeekModel Model);
static FN2CGeminiPricing GetGeminiPricing(EN2CGeminiModel Model);
static FN2CxAIPricing GetxAIPricing(EN2CxAIModel Model);

/** System prompt support checks */
static bool SupportsSystemPrompts(EN2COpenAIModel Model)
Expand All @@ -94,4 +106,5 @@ struct FN2CLLMModelUtils
static const TMap<EN2CAnthropicModel, FN2CAnthropicPricing> AnthropicPricing;
static const TMap<EN2CDeepSeekModel, FN2CDeepSeekPricing> DeepSeekPricing;
static const TMap<EN2CGeminiModel, FN2CGeminiPricing> GeminiPricing;
static const TMap<EN2CxAIModel, FN2CxAIPricing> xAIPricing;
};
19 changes: 19 additions & 0 deletions Source/Public/LLM/N2CLLMPricing.h
Original file line number Diff line number Diff line change
Expand Up @@ -82,3 +82,22 @@ struct FN2CDeepSeekPricing
FN2CDeepSeekPricing(float InInput, float InOutput)
: InputCost(InInput), OutputCost(InOutput) {}
};

/** Pricing information for xAI models */
USTRUCT(BlueprintType)
struct FN2CxAIPricing
{
GENERATED_BODY()

// Cost per 1M tokens
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Node to Code | LLM Pricing")
float InputCost = 0.0f;

// Cost per 1M tokens
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Node to Code | LLM Pricing")
float OutputCost = 0.0f;

FN2CxAIPricing() {}
FN2CxAIPricing(float InInput, float InOutput)
: InputCost(InInput), OutputCost(InOutput) {}
};
3 changes: 2 additions & 1 deletion Source/Public/LLM/N2CLLMTypes.h
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ enum class EN2CLLMProvider : uint8
Gemini UMETA(DisplayName = "Gemini"),
Ollama UMETA(DisplayName = "Ollama"),
DeepSeek UMETA(DisplayName = "DeepSeek"),
LMStudio UMETA(DisplayName = "LM Studio")
LMStudio UMETA(DisplayName = "LM Studio"),
xAI UMETA(DisplayName = "xAI"),
};

/** Status of the Node to Code system */
Expand Down
23 changes: 23 additions & 0 deletions Source/Public/LLM/Providers/N2CxAIResponseParser.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
// Copyright (c) 2025 Nick McClure (Protospatial). All Rights Reserved.

#pragma once

#include "CoreMinimal.h"
#include "LLM/N2CResponseParserBase.h"
#include "N2CxAIResponseParser.generated.h"

/**
* @class UN2CxAIResponseParser
* @brief Parser for xAI Chat Completion API responses
*/
UCLASS()
class NODETOCODE_API UN2CxAIResponseParser : public UN2CResponseParserBase
{
GENERATED_BODY()

public:
/** Parse xAI-specific JSON response (OpenAI-compatible schema) */
virtual bool ParseLLMResponse(
const FString& InJson,
FN2CTranslationResponse& OutResponse) override;
};
Loading