Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion src/action/review/poster.ts
Original file line number Diff line number Diff line change
Expand Up @@ -173,9 +173,11 @@ export async function postTriggerReview(
}

try {
// Consolidate findings within this batch (intra-batch dedup)
// Cross-location merging is already done by runSkillTask() before findings
// reach the poster. No need to merge again here.
let findingsToPost = filteredFindings;

// Consolidate findings within this batch (intra-batch dedup)
if (findingsToPost.length > 1) {
const consolidateResult = await consolidateBatchFindings(findingsToPost, {
apiKey,
Expand Down
17 changes: 14 additions & 3 deletions src/cli/output/tasks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import {
aggregateUsage,
aggregateAuxiliaryUsage,
deduplicateFindings,
mergeCrossLocationFindings,
generateSummary,
type AuxiliaryUsageEntry,
type SkillRunnerOptions,
Expand Down Expand Up @@ -332,10 +333,20 @@ export async function runSkillTask(
const uniqueFindings = deduplicateFindings(allFindings);
emitDedupMetrics(allFindings.length, uniqueFindings.length);

// Merge findings that describe the same issue at different locations
const mergeResult = await mergeCrossLocationFindings(uniqueFindings, {
apiKey: runnerOptions.apiKey,
repoPath: context.repoPath,
});
const mergedFindings = mergeResult.findings;
if (mergeResult.usage) {
allAuxEntries.push({ agent: 'merge', usage: mergeResult.usage });
}

const report: SkillReport = {
skill: skill.name,
summary: generateSummary(skill.name, uniqueFindings),
findings: uniqueFindings,
summary: generateSummary(skill.name, mergedFindings),
findings: mergedFindings,
usage: aggregateUsage(allUsage),
durationMs: duration,
files: preparedFiles.map((file, i) => {
Expand Down Expand Up @@ -373,7 +384,7 @@ export async function runSkillTask(
callbacks.onSkillUpdate(name, {
status: 'done',
durationMs: duration,
findings: uniqueFindings,
findings: mergedFindings,
usage: report.usage,
});
callbacks.onSkillComplete(name, report);
Expand Down
67 changes: 67 additions & 0 deletions src/cli/terminal.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -338,5 +338,72 @@ describe('renderTerminalReport', () => {

expect(output).not.toContain('WARN:');
});

it('renders additional locations in CI mode', () => {
const report = createReport({
durationMs: 5000,
findings: [
createFinding({
severity: 'high',
title: 'Missing null check',
location: { path: 'src/a.ts', startLine: 10 },
additionalLocations: [
{ path: 'src/b.ts', startLine: 20 },
{ path: 'src/c.ts', startLine: 30, endLine: 35 },
],
}),
],
});

const output = renderTerminalReport([report], ciMode);

expect(output).toContain('also at: src/b.ts:20, src/c.ts:30-35');
});
});

describe('TTY additionalLocations', () => {
it('shows additional locations in TTY mode', () => {
const report = createReport({
findings: [
createFinding({
location: { path: 'src/a.ts', startLine: 10 },
additionalLocations: [
{ path: 'src/b.ts', startLine: 20 },
],
}),
],
});

const output = renderTerminalReport([report], {
isTTY: true,
supportsColor: false,
columns: 80,
});

expect(output).toContain('+1 more location:');
expect(output).toContain('src/b.ts:20');
});

it('uses plural for multiple additional locations', () => {
const report = createReport({
findings: [
createFinding({
location: { path: 'src/a.ts', startLine: 10 },
additionalLocations: [
{ path: 'src/b.ts', startLine: 20 },
{ path: 'src/c.ts', startLine: 30 },
],
}),
],
});

const output = renderTerminalReport([report], {
isTTY: true,
supportsColor: false,
columns: 80,
});

expect(output).toContain('+2 more locations:');
});
});
});
19 changes: 19 additions & 0 deletions src/cli/terminal.ts
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,16 @@ function formatFindingTTY(finding: Finding): string[] {
// For 'line_not_found', we silently skip - the line may not exist in this version
}

// Additional locations
if (finding.additionalLocations?.length) {
const count = finding.additionalLocations.length;
lines.push(` ${chalk.dim(`+${count} more ${pluralize(count, 'location')}:`)}`);
for (const loc of finding.additionalLocations) {
const range = loc.endLine ? `${loc.startLine}-${loc.endLine}` : `${loc.startLine}`;
lines.push(` ${chalk.dim(`${loc.path}:${range}`)}`);
}
}

// Blank line, then description
lines.push('');
lines.push(` ${chalk.dim(finding.description)}`);
Expand Down Expand Up @@ -127,6 +137,15 @@ function formatFindingCI(finding: Finding): string[] {
lines.push(` confidence: ${finding.confidence}`);
}

// Additional locations
if (finding.additionalLocations?.length) {
const locs = finding.additionalLocations.map((loc) => {
const range = loc.endLine ? `${loc.startLine}-${loc.endLine}` : `${loc.startLine}`;
return `${loc.path}:${range}`;
});
lines.push(` also at: ${locs.join(', ')}`);
}

// Description
lines.push(` ${finding.description}`);

Expand Down
84 changes: 84 additions & 0 deletions src/output/dedup.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -282,6 +282,62 @@ describe('deduplicateFindings', () => {
const result = await deduplicateFindings(findings, existingComments, {});
expect(result.newFindings).toHaveLength(1);
});

it('matches additional locations against Warden comments', async () => {
// Simulates winner-flip: finding primary is now at b.ts, but our old comment is at a.ts
const finding: Finding = {
id: 'f1',
severity: 'high',
title: 'SQL Injection',
description: 'User input passed to query',
location: { path: 'src/b.ts', startLine: 20 },
additionalLocations: [{ path: 'src/db.ts', startLine: 42 }],
};

const existingComments: ExistingComment[] = [
{
id: 1,
path: 'src/db.ts',
line: 42,
title: 'SQL Injection',
description: 'User input passed to query',
contentHash: generateContentHash('SQL Injection', 'User input passed to query'),
isWarden: true,
},
];

const result = await deduplicateFindings([finding], existingComments, { hashOnly: true });
expect(result.newFindings).toHaveLength(0);
expect(result.duplicateActions).toHaveLength(1);
expect(result.duplicateActions[0]!.type).toBe('update_warden');
});

it('does not match additional locations against external comments', async () => {
const finding: Finding = {
id: 'f1',
severity: 'high',
title: 'SQL Injection',
description: 'User input passed to query',
location: { path: 'src/b.ts', startLine: 20 },
additionalLocations: [{ path: 'src/db.ts', startLine: 42 }],
};

const existingComments: ExistingComment[] = [
{
id: 1,
path: 'src/db.ts',
line: 42,
title: 'SQL Injection',
description: 'User input passed to query',
contentHash: generateContentHash('SQL Injection', 'User input passed to query'),
isWarden: false,
},
];

const result = await deduplicateFindings([finding], existingComments, { hashOnly: true });
expect(result.newFindings).toHaveLength(1);
expect(result.duplicateActions).toHaveLength(0);
});
});

describe('parseWardenSkills', () => {
Expand Down Expand Up @@ -638,4 +694,32 @@ describe('consolidateBatchFindings', () => {
expect(result.findings).toHaveLength(2);
expect(result.removedCount).toBe(0);
});

it('preserves locations from losers via mergeGroup (hash dedup)', async () => {
// Two exact-duplicate findings at the same location: hash dedup should pick first
const finding1: Finding = {
id: 'f1',
severity: 'high',
title: 'SQL Injection',
description: 'User input passed to query',
location: { path: 'src/db.ts', startLine: 42 },
};

const finding2: Finding = {
id: 'f2',
severity: 'high',
title: 'SQL Injection',
description: 'User input passed to query',
location: { path: 'src/db.ts', startLine: 42 },
additionalLocations: [{ path: 'src/api.ts', startLine: 100 }],
};

// Hash dedup removes exact duplicates before LLM phase
// Since they have the same hash+line, finding2 is dropped
const result = await consolidateBatchFindings([finding1, finding2], { hashOnly: true });
expect(result.findings).toHaveLength(1);
// Hash dedup just drops duplicates (doesn't merge locations)
// That's expected: mergeGroup is used for LLM-grouped findings
expect(result.findings[0]).toBe(finding1);
});
});
Loading