Skip to content

Commit

Permalink
Merge branch 'main' of https://github.com/fern-api/fern into niels/cs…
Browse files Browse the repository at this point in the history
…harp/editorconfig
  • Loading branch information
Swimburger committed Mar 3, 2025
2 parents 9af7892 + d9fe062 commit 3ee2e9a
Show file tree
Hide file tree
Showing 4 changed files with 47 additions and 18 deletions.
19 changes: 11 additions & 8 deletions packages/cli/cli/src/commands/validate/logViolations.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import chalk from "chalk";
import { eq } from "lodash-es";

import { formatLog } from "@fern-api/cli-logger";
import { assertNever } from "@fern-api/core-utils";
Expand Down Expand Up @@ -28,20 +29,21 @@ export function logViolations({
}): LogViolationsResponse {
// dedupe violations before processing
const deduplicatedViolations: ValidationViolation[] = [];
const map = new Map<NodePath, ValidationViolation[]>();
const record: Record<string, ValidationViolation[]> = {};
for (const violation of violations) {
const existingViolations = map.get(violation.nodePath) ?? [];
const key = JSON.stringify(violation.nodePath);
const existingViolations = record[key] ?? [];
const isDuplicate = existingViolations.some(
(existingViolation) =>
existingViolation.message === violation.message &&
existingViolation.nodePath.length === violation.nodePath.length &&
existingViolation.nodePath.every((item, index) => item === violation.nodePath[index]) &&
existingViolation.nodePath.every((item, index) => eq(item, violation.nodePath[index])) &&
existingViolation.relativeFilepath === violation.relativeFilepath &&
existingViolation.severity === violation.severity
);
if (!isDuplicate) {
deduplicatedViolations.push(violation);
map.set(violation.nodePath, [...existingViolations, violation]);
record[key] = [...existingViolations, violation];
}
}
violations = deduplicatedViolations;
Expand Down Expand Up @@ -70,12 +72,13 @@ export function logViolations({
}

function groupViolationsByNodePath(violations: ValidationViolation[]): Map<NodePath, ValidationViolation[]> {
const map = new Map<NodePath, ValidationViolation[]>();
const record: Record<string, ValidationViolation[]> = {};
for (const violation of violations) {
const existingViolations = map.get(violation.nodePath) ?? [];
map.set(violation.nodePath, [...existingViolations, violation]);
const key = JSON.stringify(violation.nodePath);
const existingViolations = record[key] ?? [];
record[key] = [...existingViolations, violation];
}
return map;
return new Map(Object.entries(record).map(([key, violations]) => [JSON.parse(key), violations]));
}

function logViolationsGroup({
Expand Down
19 changes: 19 additions & 0 deletions packages/cli/cli/versions.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,22 @@
- changelogEntry:
- summary: |
Re-releasing the Fern CLI to fix an issue with the published package.
type: fix
irVersion: 56
version: 0.56.1

- changelogEntry:
- summary: |
Fixed several issues with broken link detection in docs:
- Fixed handling of redirects to ensure broken links aren't reported when valid redirects exist
- Added proper handling of relative paths from different slugs
- Improved URL validation and error messages
Running `fern docs broken-links` will now scan your docs site and log any broken internal links.
type: fix
irVersion: 56
version: 0.56.0

- changelogEntry:
- summary: |
Fixed duplicate validation messages in docs validation by deduplicating violations
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,14 @@ export async function checkIfPathnameExists({
// if the pathname starts with `/`, it must either be a slug or a file in the current workspace
if (pathname.startsWith("/")) {
// only check slugs if the file is expected to be a markdown file
const redirectedPath = withRedirects(pathname, baseUrl, redirects);
let redirectedPath = withoutAnchors(withRedirects(pathname, baseUrl, redirects));
for (let redirectCount = 0; redirectCount < 5; ++redirectCount) {
const nextRedirectPath = withoutAnchors(withRedirects(redirectedPath, baseUrl, redirects));
if (redirectedPath === nextRedirectPath) {
break;
}
redirectedPath = nextRedirectPath;
}

if (markdown && pageSlugs.has(removeLeadingSlash(redirectedPath))) {
return true;
Expand Down Expand Up @@ -114,3 +121,11 @@ function withRedirects(
}
return result.redirect.destination;
}

function withoutAnchors(slug: string): string {
const hashIndex = slug.indexOf("#");
if (hashIndex === -1) {
return slug;
}
return slug.substring(0, hashIndex);
}
Original file line number Diff line number Diff line change
Expand Up @@ -83,14 +83,6 @@ export const ValidMarkdownLinks: Rule = {
return [];
}

let violationSeverity: "fatal" | "error" | "warning" = "error";

// If this file cannot be indexed (noindex=true, or hidden=true), then we report violations as less serious warnings
// since the broken links will not hurt SEO.
if (slugs.every((slug) => !collector.indexablePageSlugs.includes(slug))) {
violationSeverity = "warning";
}

// Find all matches in the Markdown text
const { pathnamesToCheck, violations } = collectPathnamesToCheck(content, {
absoluteFilepath,
Expand Down Expand Up @@ -122,7 +114,7 @@ export const ValidMarkdownLinks: Rule = {
});
return {
name: ValidMarkdownLinks.name,
severity: violationSeverity,
severity: "error" as const,
message,
relativeFilepath: relFilePath
};
Expand Down

0 comments on commit 3ee2e9a

Please sign in to comment.