From cb59ad026dd2c0fdde24b057ac4c06756bbe88d3 Mon Sep 17 00:00:00 2001 From: DawnDevelop Date: Tue, 18 Nov 2025 00:11:07 +0100 Subject: [PATCH 1/2] Added webhook enhancements --- CHANGELOG.md | 10 ++++++ .../Interfaces/IAnalyzerServiceResolver.cs | 8 ----- .../Application/Interfaces/IGitClient.cs | 4 ++- .../Analysis/ProcessAnalysisJobCommand.cs | 8 ++--- .../ProcessCommentWebhookEventCommand.cs | 18 +++++----- .../Models/AnalyzerServiceOptions.cs | 2 +- .../Models/PullRequestAnalysisReportModel.cs | 5 --- src/Lintellect.Api/ConfigureServices.cs | 33 ++++++++++--------- .../Domain/Entities/AnalysisJob.cs | 3 +- .../Services/AI/AnalyzerServiceResolver.cs | 14 -------- .../CSharp/InlineSuggestionsSystemPrompt.md | 8 +++-- .../Java/InlineSuggestionsSystemPrompt.md | 4 +++ .../InlineSuggestionsSystemPrompt.md | 4 +++ .../Python/InlineSuggestionsSystemPrompt.md | 4 +++ .../Templates/QuestionAnsweringPrompt.md | 2 +- .../InlineSuggestionsSystemPrompt.md | 4 +++ .../Services/AI/SemanticAnalyzerService.cs | 4 +-- .../AzureDevops/AzureDevopsClientService.cs | 21 +++++++----- .../Git/GitHub/GitHubClientService.cs | 2 +- .../Services/Git/PullRequestService.cs | 5 +-- src/Lintellect.AppHost/AppHost.cs | 5 ++- .../Lintellect.AppHost.csproj | 2 +- .../Commands/StaticAnalysisCommand.cs | 10 ------ .../Services/AnalyzerApiClientService.cs | 6 ++++ .../Models/AnalysisRequest.cs | 2 -- src/Lintellect.Shared/Models/EAnalyzers.cs | 7 ---- .../Mocks/AI/MockAnalyzerService.cs | 11 ------- .../Mocks/Git/MockGitHubClient.cs | 2 +- .../Setup/LintellectApiFixture.cs | 1 - 29 files changed, 94 insertions(+), 115 deletions(-) delete mode 100644 src/Lintellect.Api/Application/Interfaces/IAnalyzerServiceResolver.cs delete mode 100644 src/Lintellect.Api/Infrastructure/Services/AI/AnalyzerServiceResolver.cs delete mode 100644 src/Lintellect.Shared/Models/EAnalyzers.cs diff --git a/CHANGELOG.md b/CHANGELOG.md index 07cc96a..6e9c8e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,16 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Added + +- Enhanced webhook comment processing for Azure DevOps PRs with question answering +- Thread context support for webhook comment responses + +### Changed + +- Simplified analyzer service architecture by removing IAnalyzerServiceResolver +- Updated prompt templates for better AI responses + ### Fixed - Docker build pipeline diff --git a/src/Lintellect.Api/Application/Interfaces/IAnalyzerServiceResolver.cs b/src/Lintellect.Api/Application/Interfaces/IAnalyzerServiceResolver.cs deleted file mode 100644 index 4f13ec7..0000000 --- a/src/Lintellect.Api/Application/Interfaces/IAnalyzerServiceResolver.cs +++ /dev/null @@ -1,8 +0,0 @@ -using Lintellect.Shared.Models; - -namespace Lintellect.Api.Application.Interfaces; - -public interface IAnalyzerServiceResolver -{ - IAnalyzerService GetAnalyzerService(EAnalyzers provider); -} diff --git a/src/Lintellect.Api/Application/Interfaces/IGitClient.cs b/src/Lintellect.Api/Application/Interfaces/IGitClient.cs index 43bb083..a3433a3 100644 --- a/src/Lintellect.Api/Application/Interfaces/IGitClient.cs +++ b/src/Lintellect.Api/Application/Interfaces/IGitClient.cs @@ -72,7 +72,8 @@ Task CreateCommentAsync( string projectName, string repositoryName, int pullRequestId, - string comment); + string comment, + int? threadId = null); /// /// Creates a new code change suggestion comment on a pull request. @@ -148,4 +149,5 @@ Task AddCodeOwnersToPr( /// the with details of the specified comment thread. /// Task GetPullRequestThreadContextAsync(string projectName, string repositoryName, int pullRequestId, int prCommentId); + } diff --git a/src/Lintellect.Api/Application/Messages/Commands/Analysis/ProcessAnalysisJobCommand.cs b/src/Lintellect.Api/Application/Messages/Commands/Analysis/ProcessAnalysisJobCommand.cs index c793aba..2665df7 100644 --- a/src/Lintellect.Api/Application/Messages/Commands/Analysis/ProcessAnalysisJobCommand.cs +++ b/src/Lintellect.Api/Application/Messages/Commands/Analysis/ProcessAnalysisJobCommand.cs @@ -22,7 +22,7 @@ public sealed record ProcessAnalysisJobCommand( public sealed class ProcessAnalysisJobCommandHandler( IApplicationDbContext context, PullRequestService prService, - IAnalyzerServiceResolver analyzerResolver) : IRequestHandler + IAnalyzerService analyzerService) : IRequestHandler { public async ValueTask Handle(ProcessAnalysisJobCommand request, CancellationToken cancellationToken) { @@ -42,7 +42,6 @@ public async ValueTask Handle(ProcessAnalysisJob LinesAdded = 0, LinesRemoved = 0 }, - AnalyzerUsed = analysisRequest.AIAnalyzer.ToString(), AnalyzedAt = DateTimeOffset.UtcNow, InlineSuggestions = null }; @@ -65,12 +64,11 @@ public async ValueTask Handle(ProcessAnalysisJob // Step 2: Prepare analyzer and custom instructions - var analyzer = analyzerResolver.GetAnalyzerService(analysisRequest.AIAnalyzer); var customInstructions = await prService.GetCustomInstructionsAsync(analysisRequest); var aiAnalyzerModel = new AnalyzerServiceModel(analysisRequest, customInstructions ?? string.Empty); // Step 3: Execute analysis tasks in parallel - var analysisResults = await ExecuteAnalysisTasksAsync(analyzer, aiAnalyzerModel, diffs, analysisRequest, cancellationToken); + var analysisResults = await ExecuteAnalysisTasksAsync(analyzerService, aiAnalyzerModel, diffs, analysisRequest, cancellationToken); // Step 4: Post results to PR await PostResultsToPullRequestAsync(prService, analysisRequest, analysisResults, cancellationToken); @@ -275,7 +273,6 @@ private static PullRequestAnalysisReportModel BuildAnalysisReport( Summary = results.Summary, DetailedAnalysis = results.DetailedAnalysis, DiffStatistics = BuildDiffStatistics(diffs), - AnalyzerUsed = analysisRequest.AIAnalyzer.ToString(), AnalyzedAt = DateTimeOffset.UtcNow, InlineSuggestions = results.InlineSuggestions.Count != 0 ? "Inline suggestions posted" : null }; @@ -342,6 +339,7 @@ private async Task CheckForDuplicateAnalysisAsync(AnalysisRequest analysis var existingJob = await context.AnalysisJobs .Where(job => job.AnalysisRequest != null && + job.Status == Domain.Enums.AnalysisStatus.Completed && job.AnalysisRequest.GitInfo != null && job.AnalysisRequest.GitInfo.PullRequestId == pullRequestId && job.AnalysisRequest.GitProvider == analysisRequest.GitProvider) diff --git a/src/Lintellect.Api/Application/Messages/Commands/Webhooks/ProcessCommentWebhookEventCommand.cs b/src/Lintellect.Api/Application/Messages/Commands/Webhooks/ProcessCommentWebhookEventCommand.cs index ee2a585..7c7c94e 100644 --- a/src/Lintellect.Api/Application/Messages/Commands/Webhooks/ProcessCommentWebhookEventCommand.cs +++ b/src/Lintellect.Api/Application/Messages/Commands/Webhooks/ProcessCommentWebhookEventCommand.cs @@ -21,7 +21,7 @@ public sealed record ProcessCommentWebhookEventCommand(WebhookEvent WebhookEvent public sealed class ProcessWebhookEventCommandHandler( ILogger logger, PullRequestService pullRequestService, - IAnalyzerServiceResolver analyzerResolver + IAnalyzerService analyzerService ) : IRequestHandler { @@ -92,7 +92,7 @@ private async Task HandleAzureDevOpsCommentAsync(WebhookEvent webhookEvent, Canc var context = BuildQuestionContext(question, [.. threadContext.Comments], customInstructions); // Answer the question - await AnswerQuestionAsync(analysisRequest, context, question, cancellationToken); + await AnswerQuestionAsync(analysisRequest, context, question, threadContext.Id, cancellationToken); } private Task HandleGitHubCommentAsync(WebhookEvent webhookEvent, CancellationToken cancellationToken) @@ -138,16 +138,13 @@ private static bool IsQuestion(string comment) var trimmed = comment.Trim(); // Check if bot is mentioned - if (trimmed.Contains("Lintellect", StringComparison.OrdinalIgnoreCase) || + if (trimmed.Contains("@lintellect", StringComparison.OrdinalIgnoreCase) || trimmed.Contains("lintellect", StringComparison.OrdinalIgnoreCase)) { return true; } - // Check if it starts with question words - var questionWords = new[] { "explain", "what", "how", "why", "can you", "help", "tell me", "?" }; - return questionWords.Any(word => trimmed.StartsWith(word, StringComparison.OrdinalIgnoreCase)) || - trimmed.EndsWith("?", StringComparison.OrdinalIgnoreCase); + return false; } /// @@ -175,6 +172,7 @@ public async Task AnswerQuestionAsync( AnalysisRequest request, string threadContext, string question, + int threadId, CancellationToken cancellationToken = default) { ArgumentNullException.ThrowIfNull(request); @@ -184,14 +182,14 @@ public async Task AnswerQuestionAsync( try { - var analyzer = analyzerResolver.GetAnalyzerService(request.AIAnalyzer); var instructions = await pullRequestService.GetCustomInstructionsAsync(request); var model = new AnalyzerServiceModel(request, instructions ?? string.Empty); - var answer = await analyzer.AnswerQuestionAsync(model, threadContext, question, cancellationToken); + + var answer = await analyzerService.AnswerQuestionAsync(model, threadContext, question, cancellationToken); // Post answer back to PR - await pullRequestService.AddCommentAsync(request, answer); + await pullRequestService.AddCommentAsync(request, answer, threadId); } catch (Exception ex) { diff --git a/src/Lintellect.Api/Application/Models/AnalyzerServiceOptions.cs b/src/Lintellect.Api/Application/Models/AnalyzerServiceOptions.cs index d9b126a..cbe35ef 100644 --- a/src/Lintellect.Api/Application/Models/AnalyzerServiceOptions.cs +++ b/src/Lintellect.Api/Application/Models/AnalyzerServiceOptions.cs @@ -50,7 +50,7 @@ public sealed class SemanticAnalyzerOptions /// /// The deployment name or model to use. /// - public string DeploymentName { get; set; } = "gpt-4o"; + public string? DeploymentName { get; set; } /// /// Maximum tokens for the response. diff --git a/src/Lintellect.Api/Application/Models/PullRequestAnalysisReportModel.cs b/src/Lintellect.Api/Application/Models/PullRequestAnalysisReportModel.cs index 78b5d33..011cd21 100644 --- a/src/Lintellect.Api/Application/Models/PullRequestAnalysisReportModel.cs +++ b/src/Lintellect.Api/Application/Models/PullRequestAnalysisReportModel.cs @@ -27,11 +27,6 @@ public sealed class PullRequestAnalysisReportModel /// public required DiffStatistics DiffStatistics { get; init; } - /// - /// The AI analyzer that was used. - /// - public required string AnalyzerUsed { get; init; } - /// /// When the analysis was performed. /// diff --git a/src/Lintellect.Api/ConfigureServices.cs b/src/Lintellect.Api/ConfigureServices.cs index 2916901..de42530 100644 --- a/src/Lintellect.Api/ConfigureServices.cs +++ b/src/Lintellect.Api/ConfigureServices.cs @@ -60,9 +60,8 @@ public static IServiceCollection AddAnalyzerServices( options.ApiKey ??= claudeApiKey; }); - services.AddKeyedScoped( - EAnalyzers.Claude, - (sp, key) => + services.AddScoped( + (sp) => { var options = sp.GetRequiredService>().Value; var mcpServiceResolver = sp.GetRequiredService(); @@ -70,26 +69,31 @@ public static IServiceCollection AddAnalyzerServices( return new ClaudeAnalyzerService(options, mcpServiceResolver); }); } + else + { + var semanticApiKey = configuration.GetValue("SEMANTIC_API_KEY") ?? + configuration.GetSection("SemanticAnalyzer:ApiKey").Value; - // Only register Semantic (AIFoundry) if configured - var semanticApiKey = configuration.GetValue("SEMANTIC_API_KEY") ?? - configuration.GetSection("SemanticAnalyzer:ApiKey").Value; - var semanticEndpoint = configuration.GetValue("SEMANTIC_ENDPOINT") ?? - configuration.GetSection("SemanticAnalyzer:Endpoint").Value; + var semanticEndpoint = configuration.GetValue("SEMANTIC_ENDPOINT") ?? + configuration.GetSection("SemanticAnalyzer:Endpoint").Value; + + var semanticDeploymentName = configuration.GetValue("SEMANTIC_DEPLOYMENT_NAME") ?? + configuration.GetSection("SemanticAnalyzer:DeploymentName").Value; - if (!string.IsNullOrWhiteSpace(semanticApiKey) || !string.IsNullOrWhiteSpace(semanticEndpoint)) - { services.Configure(options => { configuration.GetSection("SemanticAnalyzer").Bind(options); configureSemanticOptions?.Invoke(options); + options.ApiKey ??= semanticApiKey; options.Endpoint ??= semanticEndpoint; + + options.DeploymentName ??= semanticDeploymentName; + options.DeploymentName ??= "gpt-4o"; //fallback }); - services.AddKeyedScoped( - EAnalyzers.AIFoundry, - (sp, key) => + services.AddScoped( + (sp) => { var options = sp.GetRequiredService>().Value; var mcpResolver = sp.GetRequiredService(); @@ -98,9 +102,6 @@ public static IServiceCollection AddAnalyzerServices( }); } - // Register the resolver that picks the right analyzer based on configuration - services.AddScoped(); - services.AddKeyedSingleton(EMcpServer.Context7); services.AddKeyedSingleton(EMcpServer.MicrosoftDocs); services.AddScoped(); diff --git a/src/Lintellect.Api/Domain/Entities/AnalysisJob.cs b/src/Lintellect.Api/Domain/Entities/AnalysisJob.cs index 21efbbc..bf6770c 100644 --- a/src/Lintellect.Api/Domain/Entities/AnalysisJob.cs +++ b/src/Lintellect.Api/Domain/Entities/AnalysisJob.cs @@ -29,7 +29,7 @@ public AnalysisJob(AnalysisRequest cliAnalysisResult) ArgumentNullException.ThrowIfNull(cliAnalysisResult); Status = AnalysisStatus.Pending; - AnalysisRequest = CloneAnalysisRequest(cliAnalysisResult); + AnalysisRequest = cliAnalysisResult; AddDomainEvent(new AnalysisJobCreatedEvent(Id, cliAnalysisResult.GitInfo?.ProjectName ?? "Unknown", @@ -123,7 +123,6 @@ private static AnalysisRequest CloneAnalysisRequest(AnalysisRequest request) EnableDescriptionSummary = request.EnableDescriptionSummary, EnableAzureDevopsCodeOwners = request.EnableAzureDevopsCodeOwners, McpServer = request.McpServer is null ? [] : [.. request.McpServer], - AIAnalyzer = request.AIAnalyzer }; } } diff --git a/src/Lintellect.Api/Infrastructure/Services/AI/AnalyzerServiceResolver.cs b/src/Lintellect.Api/Infrastructure/Services/AI/AnalyzerServiceResolver.cs deleted file mode 100644 index 62df1c1..0000000 --- a/src/Lintellect.Api/Infrastructure/Services/AI/AnalyzerServiceResolver.cs +++ /dev/null @@ -1,14 +0,0 @@ -using Lintellect.Api.Application.Interfaces; -using Lintellect.Shared.Models; - -namespace Lintellect.Api.Infrastructure.Services.AI; - -internal sealed class AnalyzerServiceResolver(IServiceProvider serviceProvider) : IAnalyzerServiceResolver -{ - public IAnalyzerService GetAnalyzerService(EAnalyzers provider) - { - var client = serviceProvider.GetKeyedService(provider); - - return client ?? throw new NotSupportedException($"Git provider '{provider}' is not supported"); - } -} diff --git a/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/CSharp/InlineSuggestionsSystemPrompt.md b/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/CSharp/InlineSuggestionsSystemPrompt.md index 2f44221..90cfee5 100644 --- a/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/CSharp/InlineSuggestionsSystemPrompt.md +++ b/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/CSharp/InlineSuggestionsSystemPrompt.md @@ -1,13 +1,17 @@ -You are an expert C# code reviewer providing inline code suggestions for {{gitProvider}} pull requests. +You are an expert C# code reviewer providing inline code suggestions for pull requests. ## Your Role: -You are NOT just a static analysis findings reporter. You are a COMPREHENSIVE C# code reviewer who: +You are a COMPREHENSIVE C# code reviewer who: 1. Reviews every line of changed C# code for issues beyond what static analyzers catch 2. Identifies security vulnerabilities, logic errors, performance issues, and bugs specific to C# 3. Suggests C# best practices and code quality improvements 4. Provides fixes for C# static analyzer findings (CA rules, compiler warnings) +5. ONLY make actionable suggestions with clear "what" and "how". +6. Avoid bikeshedding or subjective style preferences. +7. NEVER a comment if there are no issues to address. +8. You don't need to summarize changes; focus on inline suggestions only. ## C# Specific Guidelines: diff --git a/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/Java/InlineSuggestionsSystemPrompt.md b/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/Java/InlineSuggestionsSystemPrompt.md index b5cd91a..037f94e 100644 --- a/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/Java/InlineSuggestionsSystemPrompt.md +++ b/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/Java/InlineSuggestionsSystemPrompt.md @@ -8,6 +8,10 @@ You are NOT just a static analysis findings reporter. You are a COMPREHENSIVE Ja 2. Identifies security vulnerabilities, logic errors, performance issues, and bugs specific to Java 3. Suggests Java best practices and code quality improvements 4. Provides fixes for Java static analyzer findings (SpotBugs, PMD, Checkstyle, SonarQube) +5. ONLY make actionable suggestions with clear "what" and "how". +6. Avoid bikeshedding or subjective style preferences. +7. NEVER a comment if there are no issues to address. +8. You don't need to summarize changes; focus on inline suggestions only. ## Your Task: diff --git a/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/JavaScript/InlineSuggestionsSystemPrompt.md b/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/JavaScript/InlineSuggestionsSystemPrompt.md index 599f7f5..363473a 100644 --- a/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/JavaScript/InlineSuggestionsSystemPrompt.md +++ b/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/JavaScript/InlineSuggestionsSystemPrompt.md @@ -8,6 +8,10 @@ You are NOT just a static analysis findings reporter. You are a COMPREHENSIVE Ja 2. Identifies security vulnerabilities, logic errors, performance issues, and bugs specific to JavaScript 3. Suggests JavaScript best practices and code quality improvements 4. Provides fixes for JavaScript static analyzer findings (ESLint, JSHint, etc.) +5. ONLY make actionable suggestions with clear "what" and "how". +6. Avoid bikeshedding or subjective style preferences. +7. NEVER a comment if there are no issues to address. +8. You don't need to summarize changes; focus on inline suggestions only. ## Your Task: Generate inline code suggestions as structured JSON that can be posted as PR comments. diff --git a/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/Python/InlineSuggestionsSystemPrompt.md b/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/Python/InlineSuggestionsSystemPrompt.md index 27f1fab..b6ca42a 100644 --- a/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/Python/InlineSuggestionsSystemPrompt.md +++ b/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/Python/InlineSuggestionsSystemPrompt.md @@ -8,6 +8,10 @@ You are NOT just a static analysis findings reporter. You are a COMPREHENSIVE Py 2. Identifies security vulnerabilities, logic errors, performance issues, and bugs specific to Python 3. Suggests Python best practices and code quality improvements 4. Provides fixes for Python static analyzer findings (pylint, flake8, mypy, bandit) +5. ONLY make actionable suggestions with clear "what" and "how". +6. Avoid bikeshedding or subjective style preferences. +7. NEVER a comment if there are no issues to address. +8. You don't need to summarize changes; focus on inline suggestions only. ## Your Task: diff --git a/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/QuestionAnsweringPrompt.md b/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/QuestionAnsweringPrompt.md index 25aeb95..53626c0 100644 --- a/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/QuestionAnsweringPrompt.md +++ b/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/QuestionAnsweringPrompt.md @@ -11,7 +11,6 @@ You are a helpful code review assistant answering questions about a pull request ## Context Available - **Question**: The specific question being asked - **Thread Context**: Previous comments in the conversation thread -- **Code Changes**: PR diffs showing what changed in the pull request - **Project Guidelines**: Custom instructions and guidelines for this project ## Response Format @@ -22,6 +21,7 @@ You are a helpful code review assistant answering questions about a pull request - File paths in backticks: `path/to/file.cs` - Line references: `file.cs:42-45` - If referencing multiple files or sections, organize clearly with headings +- NEVER end the your sentence with a questionmark. ## Thread Context {{threadContext}} diff --git a/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/TypeScript/InlineSuggestionsSystemPrompt.md b/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/TypeScript/InlineSuggestionsSystemPrompt.md index bbed187..55e0593 100644 --- a/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/TypeScript/InlineSuggestionsSystemPrompt.md +++ b/src/Lintellect.Api/Infrastructure/Services/AI/Prompts/Templates/TypeScript/InlineSuggestionsSystemPrompt.md @@ -8,6 +8,10 @@ You are NOT just a static analysis findings reporter. You are a COMPREHENSIVE Ty 2. Identifies security vulnerabilities, logic errors, performance issues, and bugs specific to TypeScript 3. Suggests TypeScript best practices and code quality improvements 4. Provides fixes for TypeScript static analyzer findings (TSLint, ESLint, TypeScript compiler) +5. ONLY make actionable suggestions with clear "what" and "how". +6. Avoid bikeshedding or subjective style preferences. +7. NEVER a comment if there are no issues to address. +8. You don't need to summarize changes; focus on inline suggestions only. ## Your Task: Generate inline code suggestions as structured JSON that can be posted as PR comments. diff --git a/src/Lintellect.Api/Infrastructure/Services/AI/SemanticAnalyzerService.cs b/src/Lintellect.Api/Infrastructure/Services/AI/SemanticAnalyzerService.cs index e38db43..de2aedc 100644 --- a/src/Lintellect.Api/Infrastructure/Services/AI/SemanticAnalyzerService.cs +++ b/src/Lintellect.Api/Infrastructure/Services/AI/SemanticAnalyzerService.cs @@ -291,7 +291,7 @@ private async Task CreateKernelAsync(SemanticAnalyzerOptions options, Li _logger.LogDebug("Using ApiKey authentication for Azure OpenAI chat completion"); builder.AddAzureOpenAIChatCompletion( - deploymentName: options.DeploymentName, + deploymentName: options.DeploymentName!, endpoint: options.Endpoint, apiKey: options.ApiKey); } @@ -304,7 +304,7 @@ private async Task CreateKernelAsync(SemanticAnalyzerOptions options, Li _logger.LogDebug("Using TokenCredential authentication for Azure OpenAI chat completion"); builder.AddAzureOpenAIChatCompletion( - deploymentName: options.DeploymentName, + deploymentName: options.DeploymentName!, endpoint: options.Endpoint!, options.TokenCredential); } diff --git a/src/Lintellect.Api/Infrastructure/Services/Git/AzureDevops/AzureDevopsClientService.cs b/src/Lintellect.Api/Infrastructure/Services/Git/AzureDevops/AzureDevopsClientService.cs index 41f32f2..8f9731d 100644 --- a/src/Lintellect.Api/Infrastructure/Services/Git/AzureDevops/AzureDevopsClientService.cs +++ b/src/Lintellect.Api/Infrastructure/Services/Git/AzureDevops/AzureDevopsClientService.cs @@ -434,7 +434,8 @@ public async Task CreateCommentAsync( string projectName, string repositoryName, int pullRequestId, - string comment) + string comment, + int? threadId = null) { var gitClient = await GetHttpGitClient(); @@ -451,13 +452,15 @@ public async Task CreateCommentAsync( Status = Microsoft.TeamFoundation.SourceControl.WebApi.CommentThreadStatus.Active }; - var azureDevOpsThread = await gitClient.CreateThreadAsync( - thread, - projectName, - repositoryName, - pullRequestId); - - return MapToGenericCommentThread(azureDevOpsThread); + GitPullRequestCommentThread threadResult = threadId.HasValue + ? await gitClient.UpdateThreadAsync( + thread, projectName, repositoryName, pullRequestId, threadId.Value) + : await gitClient.CreateThreadAsync( + thread, + projectName, + repositoryName, + pullRequestId); + return MapToGenericCommentThread(threadResult); } public async Task CreateCodeChangeCommentAsync( @@ -806,7 +809,7 @@ public async Task GetPullRequestThreadContextAsync(str { var gitClient = await GetHttpGitClient(); - var thread = await gitClient.GetPullRequestThreadAsync(projectName, pullRequestId, prCommentId); + var thread = await gitClient.GetPullRequestThreadAsync(projectName, repositoryName, pullRequestId, prCommentId); return MapToGenericCommentThread(thread); } diff --git a/src/Lintellect.Api/Infrastructure/Services/Git/GitHub/GitHubClientService.cs b/src/Lintellect.Api/Infrastructure/Services/Git/GitHub/GitHubClientService.cs index 7b6360e..e51f8f6 100644 --- a/src/Lintellect.Api/Infrastructure/Services/Git/GitHub/GitHubClientService.cs +++ b/src/Lintellect.Api/Infrastructure/Services/Git/GitHub/GitHubClientService.cs @@ -180,7 +180,7 @@ public async Task CreateCommentAsync( string projectName, string repositoryName, int pullRequestId, - string comment) + string comment, int? threadId = null) { try { diff --git a/src/Lintellect.Api/Infrastructure/Services/Git/PullRequestService.cs b/src/Lintellect.Api/Infrastructure/Services/Git/PullRequestService.cs index b05c790..bce5c00 100644 --- a/src/Lintellect.Api/Infrastructure/Services/Git/PullRequestService.cs +++ b/src/Lintellect.Api/Infrastructure/Services/Git/PullRequestService.cs @@ -135,7 +135,7 @@ public async Task> GetCompactDiffsAsync( /// The created comment thread. public async Task AddCommentAsync( AnalysisRequest analysisResult, - string comment) + string comment, int? threadId = null) { var gitClient = _clientFactory.CreateClient(analysisResult); @@ -143,7 +143,8 @@ public async Task AddCommentAsync( analysisResult.GitInfo!.ProjectName!, analysisResult.GitInfo!.RepositoryName, analysisResult.GitInfo!.PullRequestId, - comment); + comment, + threadId); } /// diff --git a/src/Lintellect.AppHost/AppHost.cs b/src/Lintellect.AppHost/AppHost.cs index 64897d4..65f2e61 100644 --- a/src/Lintellect.AppHost/AppHost.cs +++ b/src/Lintellect.AppHost/AppHost.cs @@ -8,6 +8,7 @@ // Add PostgreSQL server var postgres = builder.AddPostgres("postgres") .WithDataVolume() + .WithHostPort(5432) .WithLifetime(ContainerLifetime.Persistent); // Add database @@ -18,16 +19,14 @@ // Add API project with database reference builder.AddProject("API") .WithExternalHttpEndpoints() - .WithHttpEndpoint(8080) .WithEnvironment("LINTELLECT_API_KEY", apiKey) .WithEnvironment("AZURE_DEVOPS_PAT", builder.Configuration.GetValue("AZURE_DEVOPS_PAT")) .WithEnvironment("AZURE_DEVOPS_ORG_URL", builder.Configuration.GetValue("AZURE_DEVOPS_ORG_URL")) .WithEnvironment("SEMANTIC_API_KEY", builder.Configuration.GetValue("SEMANTIC_API_KEY")) .WithEnvironment("SEMANTIC_ENDPOINT", builder.Configuration.GetValue("SEMANTIC_ENDPOINT")) - .WithHttpHealthCheck("/health") .WithReference(postgresDb) .WaitFor(postgres) .WithComputeEnvironment(compose); -await builder.Build().StartAsync(); +await builder.Build().RunAsync(); diff --git a/src/Lintellect.AppHost/Lintellect.AppHost.csproj b/src/Lintellect.AppHost/Lintellect.AppHost.csproj index 1305b91..ddb48bb 100644 --- a/src/Lintellect.AppHost/Lintellect.AppHost.csproj +++ b/src/Lintellect.AppHost/Lintellect.AppHost.csproj @@ -1,6 +1,6 @@ - + Exe diff --git a/src/Lintellect.Cli/Commands/StaticAnalysisCommand.cs b/src/Lintellect.Cli/Commands/StaticAnalysisCommand.cs index d216691..b91d698 100644 --- a/src/Lintellect.Cli/Commands/StaticAnalysisCommand.cs +++ b/src/Lintellect.Cli/Commands/StaticAnalysisCommand.cs @@ -111,14 +111,6 @@ public StaticAnalysisCommand() : base("analyze", "Run static analysis on code") AllowMultipleArgumentsPerToken = true }; - var aianalyzer = new Option("--ai-analyzer") - { - Description = "AI analyzer to use (default: AIFoundry)", - DefaultValueFactory = _ => EAnalyzers.AIFoundry, - Aliases = { "-ai", "-analyzer" }, - AllowMultipleArgumentsPerToken = false - }; - Options.Add(solution); Options.Add(serviceUrl); @@ -134,7 +126,6 @@ public StaticAnalysisCommand() : base("analyze", "Run static analysis on code") Options.Add(enableSemgrep); Options.Add(mcpServer); - Options.Add(aianalyzer); SetAction(async (parseResult) => { @@ -175,7 +166,6 @@ public StaticAnalysisCommand() : base("analyze", "Run static analysis on code") var mcpServers = mcpServerValue ?? []; analysisResult.McpServer = [.. mcpServers]; - analysisResult.AIAnalyzer = parseResult.GetValue(aianalyzer); Console.WriteLine(); Console.WriteLine($"Analysis completed: {analysisResult.Findings.Count} finding(s) detected"); diff --git a/src/Lintellect.Cli/Services/AnalyzerApiClientService.cs b/src/Lintellect.Cli/Services/AnalyzerApiClientService.cs index 0ddfd4f..c849694 100644 --- a/src/Lintellect.Cli/Services/AnalyzerApiClientService.cs +++ b/src/Lintellect.Cli/Services/AnalyzerApiClientService.cs @@ -37,6 +37,12 @@ public async Task StartAnalysisAsync( var jsonContent = JsonSerializer.Serialize(request); + Console.WriteLine($""" + Sending Post request to {_httpClient.BaseAddress}{StartAnalysisEndpoint}: + + {jsonContent} + """); + using StringContent content = new( jsonContent, Encoding.UTF8, diff --git a/src/Lintellect.Shared/Models/AnalysisRequest.cs b/src/Lintellect.Shared/Models/AnalysisRequest.cs index c0761b6..2a4dac9 100644 --- a/src/Lintellect.Shared/Models/AnalysisRequest.cs +++ b/src/Lintellect.Shared/Models/AnalysisRequest.cs @@ -19,6 +19,4 @@ public class AnalysisRequest public bool EnableAzureDevopsCodeOwners { get; set; } = false; public List McpServer { get; set; } = []; - - public EAnalyzers AIAnalyzer { get; set; } = EAnalyzers.AIFoundry; } diff --git a/src/Lintellect.Shared/Models/EAnalyzers.cs b/src/Lintellect.Shared/Models/EAnalyzers.cs deleted file mode 100644 index 0472e13..0000000 --- a/src/Lintellect.Shared/Models/EAnalyzers.cs +++ /dev/null @@ -1,7 +0,0 @@ -namespace Lintellect.Shared.Models; - -public enum EAnalyzers -{ - AIFoundry = 0, - Claude = 1 -} diff --git a/tests/Lintellect.Api.FunctionalTests/Mocks/AI/MockAnalyzerService.cs b/tests/Lintellect.Api.FunctionalTests/Mocks/AI/MockAnalyzerService.cs index bba2217..8ee0e83 100644 --- a/tests/Lintellect.Api.FunctionalTests/Mocks/AI/MockAnalyzerService.cs +++ b/tests/Lintellect.Api.FunctionalTests/Mocks/AI/MockAnalyzerService.cs @@ -1,17 +1,6 @@ namespace Lintellect.Api.FunctionalTests.Mocks.AI; -/// -/// Mock implementation of IAnalyzerServiceResolver for testing. -/// -public sealed class MockAnalyzerServiceResolver : IAnalyzerServiceResolver -{ - public IAnalyzerService GetAnalyzerService(EAnalyzers provider) - { - return new MockAnalyzerService(); - } -} - /// /// Mock implementation of IAnalyzerService for testing. /// diff --git a/tests/Lintellect.Api.FunctionalTests/Mocks/Git/MockGitHubClient.cs b/tests/Lintellect.Api.FunctionalTests/Mocks/Git/MockGitHubClient.cs index 6985922..355831e 100644 --- a/tests/Lintellect.Api.FunctionalTests/Mocks/Git/MockGitHubClient.cs +++ b/tests/Lintellect.Api.FunctionalTests/Mocks/Git/MockGitHubClient.cs @@ -50,7 +50,7 @@ public Task GetPullRequestAsync(string projectName, string reposito return Task.FromResult("Mock file content"); } - public Task CreateCommentAsync(string projectName, string repositoryName, int pullRequestId, string comment) + public Task CreateCommentAsync(string projectName, string repositoryName, int pullRequestId, string comment, int? threadId = null) { return Task.FromResult(new PullRequestCommentThread { diff --git a/tests/Lintellect.Api.FunctionalTests/Setup/LintellectApiFixture.cs b/tests/Lintellect.Api.FunctionalTests/Setup/LintellectApiFixture.cs index 11e87c6..6bec8b0 100644 --- a/tests/Lintellect.Api.FunctionalTests/Setup/LintellectApiFixture.cs +++ b/tests/Lintellect.Api.FunctionalTests/Setup/LintellectApiFixture.cs @@ -66,7 +66,6 @@ protected override IHost CreateHost(IHostBuilder builder) // Replace external services with mocks services.AddScoped(); - services.AddScoped(); // Remove background services for testing to avoid race conditions var analysisBackgroundService = services.FirstOrDefault(s => s.ImplementationType == typeof(AnalysisBackgroundService)); From 10ce58f41bfd7bfe811d9889e0cfb9d74f6950f9 Mon Sep 17 00:00:00 2001 From: DawnDevelop Date: Tue, 18 Nov 2025 00:14:10 +0100 Subject: [PATCH 2/2] fix workflow --- .github/workflows/release-api.yml | 50 +------------------------------ 1 file changed, 1 insertion(+), 49 deletions(-) diff --git a/.github/workflows/release-api.yml b/.github/workflows/release-api.yml index 494decb..42782e6 100644 --- a/.github/workflows/release-api.yml +++ b/.github/workflows/release-api.yml @@ -206,50 +206,10 @@ jobs: path: ./lintellect-api-${{ needs.extract-version.outputs.version }}-${{ matrix.platform }}.${{ matrix.extension }} retention-days: 30 - generate-release-notes: - name: Generate Release Notes - runs-on: ubuntu-latest - needs: [extract-version] - outputs: - release-notes: ${{ steps.notes.outputs.release-notes }} - - steps: - - name: Checkout code - uses: actions/checkout@v5 - with: - fetch-depth: 0 - - - name: Generate release notes - id: notes - run: | - # Extract API section from CHANGELOG.md - VERSION="${{ needs.extract-version.outputs.version }}" - - # Find the API section for this version - API_SECTION=$(awk "/## \[API v$VERSION\]/,/^## \[/ {if (!/^## \[/) print}" CHANGELOG.md) - - if [ -z "$API_SECTION" ]; then - echo "No API section found for version $VERSION in CHANGELOG.md" - API_SECTION="## API v$VERSION\n\nNo release notes available." - fi - - # Create release notes - RELEASE_NOTES="## Lintellect API v$VERSION\n\n$API_SECTION\n\n### Docker Images\n\n- \`ghcr.io/${{ github.repository_owner }}/lintellect-api:$VERSION\`\n- \`ghcr.io/${{ github.repository_owner }}/lintellect-api:latest\`\n\n### Installation\n\n\`\`\`bash\n# Pull the latest image\ndocker pull ghcr.io/${{ github.repository_owner }}/lintellect-api:$VERSION\n\n# Run the API\ndocker run -p 7000:7000 ghcr.io/${{ github.repository_owner }}/lintellect-api:$VERSION\n\`\`\`" - - echo "release-notes<> $GITHUB_OUTPUT - echo "$RELEASE_NOTES" >> $GITHUB_OUTPUT - echo "EOF" >> $GITHUB_OUTPUT - create-release: name: Create Release runs-on: ubuntu-latest - needs: - [ - extract-version, - build-docker-images, - build-binaries, - generate-release-notes, - ] + needs: [extract-version, build-docker-images, build-binaries] permissions: contents: write @@ -264,17 +224,10 @@ jobs: pattern: lintellect-api-* merge-multiple: false - - name: Write release notes to file - run: | - cat > release-notes.md << 'RELEASE_NOTES_EOF' - ${{ needs.generate-release-notes.outputs.release-notes }} - RELEASE_NOTES_EOF - - name: Create release with GitHub CLI run: | gh release create ${{ github.ref_name }} \ --title "Lintellect API v${{ needs.extract-version.outputs.version }}" \ - --notes-file release-notes.md \ ./binaries/lintellect-api-linux-x64/lintellect-api-${{ needs.extract-version.outputs.version }}-linux-x64.tar.gz \ ./binaries/lintellect-api-win-x64/lintellect-api-${{ needs.extract-version.outputs.version }}-win-x64.zip env: @@ -293,7 +246,6 @@ jobs: echo "✅ API v${{ needs.extract-version.outputs.version }} released successfully!" echo "🐳 Docker images pushed to GitHub Container Registry" echo "📦 Binary archives attached to GitHub Release" - echo "📝 Release notes published" - name: Notify failure if: needs.create-release.result == 'failure'