Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 30 additions & 0 deletions src/Elastic.Markdown/DocumentationGenerator.cs
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,36 @@ public async Task ResolveDirectoryTree(Cancel ctx)
_logger.LogInformation("Resolving tree");
await DocumentationSet.Tree.Resolve(ctx);
_logger.LogInformation("Resolved tree");
ReportDuplicateTitles(DocumentationSet.Tree.ResolvedMarkdownFiles);
}

private void ReportDuplicateTitles(List<MarkdownFile> files)
{
// Create a dictionary where keys are the titles
// and values are files with that title
var titleMap = new Dictionary<string, List<MarkdownFile>>(StringComparer.OrdinalIgnoreCase);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

HashSet<string> will suffice we care about something being duplicate not necessary how many times since the diagnostics reporting will take care of that.

I think it makes more sense to implement this as IMarkdownExporter implementations (maybe ValidationMarkdownExporter).

That way it works for both docs-builder and docs-assembler. Here the duplicate file checking is per DocumentationSet not overall.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If a title is flagged as a duplicate, it is good to know the set of pages with the same title. Then the author can coordinate with the author(s) of the other pages for a solution. There is no harm in maintaining the full context.

foreach (var file in files)
{
if (string.IsNullOrWhiteSpace(file.Title))
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This should be reported as an error.

continue;
// If there is no entry for this title, create it and
// initialize it to an empty list
if (!titleMap.TryGetValue(file.Title, out var list))
titleMap[file.Title] = [];
titleMap[file.Title].Add(file);
}
// Go through all the titles and if a title has multiple files, report it
foreach (var kv in titleMap)
{
var documentFiles = kv.Value;
if (documentFiles.Count > 1)
{
var fileList = string.Join(", ", documentFiles.Select(f => f.RelativePath));
foreach (var documentFile in documentFiles)
Context.Collector.EmitHint(documentFile.RelativePath,
$"Duplicate titles found. The title '{kv.Key}' is used in files: {{{fileList}}}");
}
}
}

public async Task<GenerationResult> GenerateAll(Cancel ctx)
Expand Down
21 changes: 18 additions & 3 deletions src/Elastic.Markdown/IO/Navigation/DocumentationGroup.cs
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ public class DocumentationGroup : INodeNavigationItem<MarkdownFile, INavigationI

private readonly IRootNavigationItem<MarkdownFile, INavigationItem>? _root;

public List<MarkdownFile> ResolvedMarkdownFiles { get; set; }

protected virtual IRootNavigationItem<MarkdownFile, INavigationItem> DefaultNavigation =>
_root ?? throw new InvalidOperationException("root navigation's model is not of type MarkdownFile");

Expand All @@ -70,6 +72,7 @@ protected DocumentationGroup(string folderName,
// We'll need to address this more structurally
// ReSharper disable VirtualMemberCallInConstructor
_root = toplevelTree;
ResolvedMarkdownFiles = [];
toplevelTree ??= DefaultNavigation;
if (parent?.Depth == 0)
toplevelTree = DefaultNavigation;
Expand Down Expand Up @@ -225,10 +228,22 @@ public async Task Resolve(Cancel ctx = default)
if (_resolved)
return;

await Parallel.ForEachAsync(FilesInOrder, ctx, async (file, token) => await file.MinimalParseAsync(token));
await Parallel.ForEachAsync(GroupsInOrder, ctx, async (group, token) => await group.Resolve(token));
// First add the index file
ResolvedMarkdownFiles.Add(Index);
// Then add all the files in this group
ResolvedMarkdownFiles.AddRange(FilesInOrder);
// Then add all files in subgroups, breadth first
var treeGroups = new Queue<DocumentationGroup>(GroupsInOrder);
while (treeGroups.Count > 0)
{
var group = treeGroups.Dequeue();
ResolvedMarkdownFiles.Add(group.Index);
ResolvedMarkdownFiles.AddRange(group.FilesInOrder);
foreach (var subgroup in group.GroupsInOrder)
treeGroups.Enqueue(subgroup);
}

_ = await Index.MinimalParseAsync(ctx);
await Parallel.ForEachAsync(ResolvedMarkdownFiles, ctx, async (file, token) => await file.MinimalParseAsync(token));

_resolved = true;
}
Expand Down
Loading