diff --git a/CLAUDE.md b/CLAUDE.md index 0a009ea..d0e259c 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -17,6 +17,12 @@ npm run build # tsc — outputs to dist/ CI runs on Node 20 and 22. CI checks: lint, format:check, typecheck, test:coverage, build. Build verifies `dist/mcp/server.js`, `dist/cli/index.js`, `dist/core/index.js` exist. +**Before every push, run this exact sequence locally:** +```bash +npm run format:check && npm run lint && npm run typecheck && npm test +``` +Do not skip any step. Do not assume "pre-existing errors" — compare the lint error count against main. If your branch has MORE errors than main, CI will fail. The pre-existing error count on main is ~39 lint errors (all in parsers/rag.ts). + ## Code Style & TypeScript - **Strict mode** with `noUncheckedIndexedAccess`, `exactOptionalPropertyTypes`, `noImplicitReturns` diff --git a/src/cli/index.ts b/src/cli/index.ts index ef71e9f..8eafbbb 100644 --- a/src/cli/index.ts +++ b/src/cli/index.ts @@ -335,6 +335,119 @@ async function handlePackConflict( return false; } +/** Try to resolve a pack from git registries; returns true if installed. */ +async function tryGitRegistryInstall( + nameOrPath: string, + opts: { fromRegistry?: string; version?: string; yes?: boolean }, + db: ReturnType, + provider: EmbeddingProvider, + installOpts: { + batchSize: number | undefined; + resumeFrom: number | undefined; + concurrency: number | undefined; + }, + reporter: ReturnType, +): Promise { + const isLocalFile = nameOrPath.endsWith(".json") || nameOrPath.endsWith(".json.gz"); + if (isLocalFile || loadRegistries().length === 0) return false; + + const { name: packName, version: specVersion } = parsePackSpecifier(nameOrPath); + const version = opts.version ?? specVersion; + + const { resolved, conflict, warnings } = resolvePackFromRegistries(packName, { + version, + registryName: opts.fromRegistry, + conflictResolution: opts.fromRegistry + ? { strategy: "explicit", registryName: opts.fromRegistry } + : opts.yes + ? { strategy: "priority" } + : undefined, + }); + + for (const w of warnings) { + reporter.log(`Warning: ${w}`); + } + + if (conflict && !resolved) { + const handled = await handlePackConflict( + conflict, + packName, + version, + opts, + db, + provider, + installOpts, + reporter, + ); + if (handled) return true; + } + + if (resolved) { + await installResolvedPack(db, provider, resolved, installOpts, reporter); + return true; + } + + return false; +} + +/** Execute a pack install from git registry, URL, or local file. */ +async function executePackInstall( + nameOrPath: string, + opts: { + registry?: string; + fromRegistry?: string; + version?: string; + yes?: boolean; + batchSize?: string; + resumeFrom?: string; + concurrency?: string; + }, +): Promise { + const { db, provider } = initializeAppWithEmbedding(); + const globalOpts = program.opts(); + const reporter = createReporter(globalOpts.verbose); + + const batchSize = opts.batchSize ? parseIntOption(opts.batchSize, "--batch-size") : undefined; + const resumeFrom = opts.resumeFrom ? parseIntOption(opts.resumeFrom, "--resume-from") : undefined; + const concurrency = opts.concurrency + ? parseIntOption(opts.concurrency, "--concurrency") + : undefined; + + if (concurrency !== undefined && concurrency < 1) { + reporter.log('Error: "--concurrency" must be an integer greater than or equal to 1.'); + closeDatabase(); + process.exit(1); + return; + } + + const installOpts = { batchSize, resumeFrom, concurrency }; + + try { + const installed = await tryGitRegistryInstall( + nameOrPath, + opts, + db, + provider, + installOpts, + reporter, + ); + if (installed) return; + + const result = await installPack(db, provider, nameOrPath, { + registryUrl: opts.registry, + ...installOpts, + onProgress: (current, total, docTitle) => { + reporter.progress(current, total, docTitle); + }, + }); + + reporter.clearProgress(); + reportInstallResult(result, reporter); + } finally { + closeDatabase(); + } +} + const program = new Command(); program @@ -1891,86 +2004,7 @@ packCmd concurrency?: string; }, ) => { - const { db, provider } = initializeAppWithEmbedding(); - const globalOpts = program.opts(); - const reporter = createReporter(globalOpts.verbose); - - const batchSize = opts.batchSize ? parseIntOption(opts.batchSize, "--batch-size") : undefined; - const resumeFrom = opts.resumeFrom - ? parseIntOption(opts.resumeFrom, "--resume-from") - : undefined; - const concurrency = opts.concurrency - ? parseIntOption(opts.concurrency, "--concurrency") - : undefined; - - if (concurrency !== undefined && concurrency < 1) { - reporter.log('Error: "--concurrency" must be an integer greater than or equal to 1.'); - closeDatabase(); - process.exit(1); - return; - } - - const installOpts = { batchSize, resumeFrom, concurrency }; - - try { - // Check if this is a local file or URL-based registry install - const isLocalFile = nameOrPath.endsWith(".json") || nameOrPath.endsWith(".json.gz"); - - // Try git registry resolution if not a local file and we have registries configured - if (!isLocalFile && loadRegistries().length > 0) { - const { name: packName, version: specVersion } = parsePackSpecifier(nameOrPath); - const version = opts.version ?? specVersion; - - const { resolved, conflict, warnings } = resolvePackFromRegistries(packName, { - version, - registryName: opts.fromRegistry, - conflictResolution: opts.fromRegistry - ? { strategy: "explicit", registryName: opts.fromRegistry } - : opts.yes - ? { strategy: "priority" } - : undefined, - }); - - for (const w of warnings) { - reporter.log(`Warning: ${w}`); - } - - if (conflict && !resolved) { - const handled = await handlePackConflict( - conflict, - packName, - version, - opts, - db, - provider, - installOpts, - reporter, - ); - if (handled) return; - } - - if (resolved) { - await installResolvedPack(db, provider, resolved, installOpts, reporter); - return; - } - - // Fall through to URL-based registry install if git registry resolution failed - } - - // Original URL-based or local file install - const result = await installPack(db, provider, nameOrPath, { - registryUrl: opts.registry, - ...installOpts, - onProgress: (current, total, docTitle) => { - reporter.progress(current, total, docTitle); - }, - }); - - reporter.clearProgress(); - reportInstallResult(result, reporter); - } finally { - closeDatabase(); - } + await executePackInstall(nameOrPath, opts); }, ); diff --git a/src/connectors/notion.ts b/src/connectors/notion.ts index 9d4b699..2b1ede8 100644 --- a/src/connectors/notion.ts +++ b/src/connectors/notion.ts @@ -328,37 +328,39 @@ function renderChildContent(children: NotionBlock[]): string | undefined { .join("\n"); } +/** Convert a single Notion block to markdown line(s), appending to the output array. */ +function convertSingleBlock(block: NotionBlock, lines: string[]): void { + const text = getBlockText(block); + const children = (block as Record)["children"] as NotionBlock[] | undefined; + + // Handle table specially (children are inline rows) + if (block.type === "table" && children) { + lines.push(...renderTableRows(children)); + return; + } + + // Try special blocks first (ones needing type-specific data extraction) + const specialLine = convertSpecialBlock(block, text); + if (specialLine === undefined) { + const simpleLine = blockToMarkdownLine(block, text); + if (simpleLine !== undefined) lines.push(simpleLine); + } else { + lines.push(specialLine); + } + + // Render nested children (except table children which are handled above) + if (children && block.type !== "table") { + const indented = renderChildContent(children); + if (indented) lines.push(indented); + } +} + /** Convert an array of Notion blocks to markdown. */ export function convertNotionBlocks(blocks: NotionBlock[]): string { const lines: string[] = []; - for (const block of blocks) { - const text = getBlockText(block); - const children = (block as Record)["children"] as NotionBlock[] | undefined; - - // Handle table specially (children are inline rows) - if (block.type === "table" && children) { - lines.push(...renderTableRows(children)); - continue; - } - - // Try special blocks first (ones needing type-specific data extraction) - const specialLine = convertSpecialBlock(block, text); - if (specialLine === undefined) { - // Simple blocks that just need text formatting - const simpleLine = blockToMarkdownLine(block, text); - if (simpleLine !== undefined) lines.push(simpleLine); - } else { - lines.push(specialLine); - } - - // Render nested children (except table children which are handled above) - if (children && block.type !== "table") { - const indented = renderChildContent(children); - if (indented) lines.push(indented); - } + convertSingleBlock(block, lines); } - return lines.join("\n"); } @@ -480,6 +482,24 @@ async function syncNotionDatabase( log.debug({ id: item.id, title: dbTitle, rows: rows.length }, "Indexed Notion database"); } +/** Sync a single search result item (page or database). */ +async function syncNotionItem( + db: Database.Database, + provider: EmbeddingProvider, + token: string, + item: NotionSearchResult, + excludeSet: Set, + result: NotionSyncResult, +): Promise { + if (item.object === "page") { + const indexed = await syncNotionPage(db, provider, token, item); + if (indexed) result.pagesIndexed++; + } else if (item.object === "database") { + await syncNotionDatabase(db, provider, token, item, excludeSet); + result.databasesIndexed++; + } +} + /** Sync pages and databases from Notion into the knowledge base. */ export async function syncNotion( db: Database.Database, @@ -514,13 +534,7 @@ export async function syncNotion( } try { - if (item.object === "page") { - const indexed = await syncNotionPage(db, provider, config.token, item); - if (indexed) result.pagesIndexed++; - } else if (item.object === "database") { - await syncNotionDatabase(db, provider, config.token, item, excludeSet); - result.databasesIndexed++; - } + await syncNotionItem(db, provider, config.token, item, excludeSet, result); } catch (err) { const title = extractTitle(item); const message = err instanceof Error ? err.message : String(err); diff --git a/src/connectors/obsidian.ts b/src/connectors/obsidian.ts index 57dc563..3c59a6a 100644 --- a/src/connectors/obsidian.ts +++ b/src/connectors/obsidian.ts @@ -401,6 +401,61 @@ function deleteRemovedFiles( return deleted; } +/** Process a single vault file outcome and update tracking state. */ +function applyVaultFileOutcome( + outcome: { entry: VaultFileEntry; isUpdate: boolean } | "unchanged", + relPath: string, + trackedFiles: Record, + newTrackedFiles: Record, + result: SyncResult, +): void { + if (outcome === "unchanged") { + newTrackedFiles[relPath] = trackedFiles[relPath]!; + return; + } + newTrackedFiles[relPath] = outcome.entry; + if (outcome.isUpdate) { + result.updated++; + } else { + result.added++; + } +} + +/** Sync all vault files, populating result and newTrackedFiles. */ +async function syncVaultFiles( + db: Database.Database, + provider: EmbeddingProvider, + config: ObsidianConfig, + vaultFiles: string[], + trackedFiles: Record, + newTrackedFiles: Record, + result: SyncResult, +): Promise { + const log = getLogger(); + for (const relPath of vaultFiles) { + try { + const outcome = await processVaultFile( + db, + provider, + config, + relPath, + vaultFiles, + trackedFiles[relPath], + log, + ); + applyVaultFileOutcome(outcome, relPath, trackedFiles, newTrackedFiles, result); + } catch (err) { + const errMsg = err instanceof Error ? err.message : String(err); + result.errors.push({ file: relPath, error: errMsg }); + log.warn({ file: relPath, err }, "Failed to sync file"); + const tracked = trackedFiles[relPath]; + if (tracked) { + newTrackedFiles[relPath] = tracked; + } + } + } +} + export async function syncObsidianVault( db: Database.Database, provider: EmbeddingProvider, @@ -431,37 +486,7 @@ export async function syncObsidianVault( const newTrackedFiles: Record = {}; const currentFileSet = new Set(vaultFiles); - for (const relPath of vaultFiles) { - try { - const outcome = await processVaultFile( - db, - provider, - config, - relPath, - vaultFiles, - trackedFiles[relPath], - log, - ); - - if (outcome === "unchanged") { - newTrackedFiles[relPath] = trackedFiles[relPath]!; - } else if (outcome.isUpdate) { - newTrackedFiles[relPath] = outcome.entry; - result.updated++; - } else { - newTrackedFiles[relPath] = outcome.entry; - result.added++; - } - } catch (err) { - const errMsg = err instanceof Error ? err.message : String(err); - result.errors.push({ file: relPath, error: errMsg }); - log.warn({ file: relPath, err }, "Failed to sync file"); - const tracked = trackedFiles[relPath]; - if (tracked) { - newTrackedFiles[relPath] = tracked; - } - } - } + await syncVaultFiles(db, provider, config, vaultFiles, trackedFiles, newTrackedFiles, result); result.deleted = deleteRemovedFiles(db, trackedFiles, currentFileSet); diff --git a/src/core/graph.ts b/src/core/graph.ts index 3f2d419..3761303 100644 --- a/src/core/graph.ts +++ b/src/core/graph.ts @@ -259,6 +259,29 @@ function computeAveragedEmbeddings(rows: ChunkEmbeddingRow[]): Map, + threshold: number, +): GraphEdge[] { + const edges: GraphEdge[] = []; + for (let j = startIndex; j < docIdList.length; j++) { + const idB = docIdList[j]; + if (!idB) continue; + const vecB = docEmbeddings.get(idB); + if (!vecB) continue; + const sim = cosineSimilarity(vecA, vecB); + if (sim >= threshold) { + edges.push({ source: idA, target: idB, type: "similar_to", weight: sim }); + } + } + return edges; +} + /** Compute pairwise similarity edges from averaged document embeddings. */ function computeSimilarityEdges( docEmbeddings: Map, @@ -271,16 +294,7 @@ function computeSimilarityEdges( if (!idA) continue; const vecA = docEmbeddings.get(idA); if (!vecA) continue; - for (let j = i + 1; j < docIdList.length; j++) { - const idB = docIdList[j]; - if (!idB) continue; - const vecB = docEmbeddings.get(idB); - if (!vecB) continue; - const sim = cosineSimilarity(vecA, vecB); - if (sim >= threshold) { - edges.push({ source: idA, target: idB, type: "similar_to", weight: sim }); - } - } + edges.push(...findEdgesForDocument(idA, vecA, docIdList, i + 1, docEmbeddings, threshold)); } return edges; } diff --git a/src/core/packs.ts b/src/core/packs.ts index 5dd65b1..04832dc 100644 --- a/src/core/packs.ts +++ b/src/core/packs.ts @@ -819,6 +819,72 @@ async function fetchUrlToPackDoc(url: string): Promise { return { title: fetched.title, content: fetched.content.trimEnd(), source: url, tags }; } +/** Process all file sources, collecting documents and errors. */ +async function processFileSources( + allFiles: string[], + totalCount: number, + onProgress: CreatePackFromSourceOptions["onProgress"], +): Promise<{ documents: PackDocument[]; errors: Array<{ source: string; error: string }> }> { + const log = getLogger(); + const documents: PackDocument[] = []; + const errors: Array<{ source: string; error: string }> = []; + + for (let i = 0; i < allFiles.length; i++) { + const filePath = allFiles[i]!; + onProgress?.({ file: filePath, index: i, total: totalCount }); + try { + const doc = await parseFileToPackDoc(filePath); + if (doc) documents.push(doc); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + log.warn({ file: filePath, err: msg }, "Failed to parse file, skipping"); + errors.push({ source: filePath, error: msg }); + } + } + + return { documents, errors }; +} + +/** Process all URL sources, collecting documents and errors. */ +async function processUrlSources( + urls: string[], + fileOffset: number, + totalCount: number, + onProgress: CreatePackFromSourceOptions["onProgress"], +): Promise<{ documents: PackDocument[]; errors: Array<{ source: string; error: string }> }> { + const log = getLogger(); + const documents: PackDocument[] = []; + const errors: Array<{ source: string; error: string }> = []; + + for (let i = 0; i < urls.length; i++) { + const url = urls[i]!; + onProgress?.({ file: url, index: fileOffset + i, total: totalCount }); + try { + const doc = await fetchUrlToPackDoc(url); + if (doc) documents.push(doc); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + log.warn({ url, err: msg }, "Failed to fetch URL, skipping"); + errors.push({ source: url, error: msg }); + } + } + + return { documents, errors }; +} + +/** Validate that at least one document was created, throwing if none. */ +function validateDocuments( + documents: PackDocument[], + errors: Array<{ source: string; error: string }>, +): void { + if (documents.length > 0) return; + const detail = + errors.length > 0 + ? ` (${errors.length} source(s) failed: ${errors.map((e) => e.source).join(", ")})` + : ""; + throw new ValidationError(`No documents could be created from the provided sources${detail}`); +} + /** Create a pack directly from filesystem paths and/or URLs (no database needed). */ export async function createPackFromSource( options: CreatePackFromSourceOptions, @@ -840,9 +906,6 @@ export async function createPackFromSource( const excludePatterns = options.exclude ?? []; const recursive = options.recursive ?? true; - const documents: PackDocument[] = []; - const errors: Array<{ source: string; error: string }> = []; - const urls: string[] = []; const fileSources: string[] = []; for (const src of options.from) { @@ -850,41 +913,15 @@ export async function createPackFromSource( } const allFiles = collectAllSourceFiles(fileSources, recursive, extensions, excludePatterns); - const totalCount = allFiles.length + urls.length; - for (let i = 0; i < allFiles.length; i++) { - const filePath = allFiles[i]!; - options.onProgress?.({ file: filePath, index: i, total: totalCount }); - try { - const doc = await parseFileToPackDoc(filePath); - if (doc) documents.push(doc); - } catch (err) { - const msg = err instanceof Error ? err.message : String(err); - log.warn({ file: filePath, err: msg }, "Failed to parse file, skipping"); - errors.push({ source: filePath, error: msg }); - } - } - for (let i = 0; i < urls.length; i++) { - const url = urls[i]!; - options.onProgress?.({ file: url, index: allFiles.length + i, total: totalCount }); - try { - const doc = await fetchUrlToPackDoc(url); - if (doc) documents.push(doc); - } catch (err) { - const msg = err instanceof Error ? err.message : String(err); - log.warn({ url, err: msg }, "Failed to fetch URL, skipping"); - errors.push({ source: url, error: msg }); - } - } + const fileResults = await processFileSources(allFiles, totalCount, options.onProgress); + const urlResults = await processUrlSources(urls, allFiles.length, totalCount, options.onProgress); - if (documents.length === 0) { - const detail = - errors.length > 0 - ? ` (${errors.length} source(s) failed: ${errors.map((e) => e.source).join(", ")})` - : ""; - throw new ValidationError(`No documents could be created from the provided sources${detail}`); - } + const documents = [...fileResults.documents, ...urlResults.documents]; + const errors = [...fileResults.errors, ...urlResults.errors]; + + validateDocuments(documents, errors); if (errors.length > 0) { log.warn({ errorCount: errors.length, errors }, "Some sources failed during pack creation"); diff --git a/src/core/scheduler.ts b/src/core/scheduler.ts index 5ff4358..213c8a8 100644 --- a/src/core/scheduler.ts +++ b/src/core/scheduler.ts @@ -124,7 +124,7 @@ export class ConnectorScheduler { const log = getLogger(); const inFlight: Promise[] = []; for (const [key, job] of this.jobs) { - void job.task.stop(); // eslint: no-floating-promises requires void for fire-and-forget + void job.task.stop(); // NOSONAR — ESLint no-floating-promises requires void for fire-and-forget if (job.running && job.runPromise) { inFlight.push(job.runPromise); } diff --git a/src/core/spider.ts b/src/core/spider.ts index 5d11faa..d581d3b 100644 --- a/src/core/spider.ts +++ b/src/core/spider.ts @@ -398,6 +398,59 @@ function enqueueChildLinks( log.debug({ url: canonicalUrl, linksFound: links.length }, "Spider: extracted links"); } +/** Check if the spider has exceeded its total timeout. */ +function checkDeadline( + deadline: number, + stats: SpiderStats, + log: ReturnType, +): boolean { + if (Date.now() <= deadline) return false; + log.warn({ pagesFetched: stats.pagesFetched }, "Spider total timeout reached"); + stats.abortReason = "timeout"; + return true; +} + +/** Determine whether a non-seed URL should be skipped (robots, domain, path, exclude). */ +async function shouldSkipNonSeedUrl( + url: string, + config: SpiderConfig, + robotsCache: Map>, + stats: SpiderStats, + log: ReturnType, +): Promise { + const urlOrigin = safeParseOrigin(url, config.seedOrigin); + const robotsRules = await ensureRobotsLoaded(urlOrigin, robotsCache, config.fetchOptions); + const skipReason = shouldSkipUrl(url, config, robotsRules); + if (!skipReason) return false; + log.debug({ url }, skipReason); + stats.pagesSkipped++; + return true; +} + +/** Fetch a single page, returning the raw result or null on failure. */ +async function fetchSpiderPage( + url: string, + config: SpiderConfig, + stats: SpiderStats, + log: ReturnType, +): Promise> | null> { + if (stats.pagesCrawled > 0 && config.requestDelay > 0) { + await sleep(config.requestDelay); + } + + log.info({ url, depth: 0 }, "Spider: fetching page"); + stats.pagesCrawled++; + + try { + return await fetchRaw(url, config.fetchOptions); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + log.warn({ url, err: msg }, "Spider: fetch failed, skipping"); + stats.errors.push({ url, error: msg }); + return null; + } +} + /** * Spider a seed URL, yielding each successfully fetched page as a SpiderResult. * Performs BFS up to maxDepth hops and maxPages total. @@ -432,25 +485,15 @@ export async function* spiderUrl( const deadline = Date.now() + HARD_TOTAL_TIMEOUT_MS; while (queue.length > 0 && stats.pagesFetched < config.maxPages) { - if (Date.now() > deadline) { - log.warn({ pagesFetched: stats.pagesFetched }, "Spider total timeout reached"); - stats.abortReason = "timeout"; - break; - } + if (checkDeadline(deadline, stats, log)) break; const { url, depth } = queue.shift()!; if (visited.has(url)) continue; visited.add(url); if (depth > 0) { - const urlOrigin = safeParseOrigin(url, config.seedOrigin); - const robotsRules = await ensureRobotsLoaded(urlOrigin, robotsCache, config.fetchOptions); - const skipReason = shouldSkipUrl(url, config, robotsRules); - if (skipReason) { - log.debug({ url }, skipReason); - stats.pagesSkipped++; - continue; - } + const skipped = await shouldSkipNonSeedUrl(url, config, robotsCache, stats, log); + if (skipped) continue; } if (stats.pagesFetched >= config.maxPages) { @@ -458,22 +501,8 @@ export async function* spiderUrl( break; } - if (stats.pagesCrawled > 0 && config.requestDelay > 0) { - await sleep(config.requestDelay); - } - - log.info({ url, depth }, "Spider: fetching page"); - stats.pagesCrawled++; - - let raw: Awaited>; - try { - raw = await fetchRaw(url, config.fetchOptions); - } catch (err) { - const msg = err instanceof Error ? err.message : String(err); - log.warn({ url, err: msg }, "Spider: fetch failed, skipping"); - stats.errors.push({ url, error: msg }); - continue; - } + const raw = await fetchSpiderPage(url, config, stats, log); + if (!raw) continue; const result = convertPage(raw, url, depth); if (result.url !== url) visited.add(result.url); diff --git a/tests/fixtures/mock-provider.ts b/tests/fixtures/mock-provider.ts index 1946c1c..143a617 100644 --- a/tests/fixtures/mock-provider.ts +++ b/tests/fixtures/mock-provider.ts @@ -11,12 +11,12 @@ export class MockEmbeddingProvider implements EmbeddingProvider { embedCallCount = 0; embedBatchCallCount = 0; - async embed(text: string): Promise { + embed(text: string): Promise { this.embedCallCount++; return Promise.resolve(this.hashToVector(text)); } - async embedBatch(texts: string[]): Promise { + embedBatch(texts: string[]): Promise { this.embedBatchCallCount++; return Promise.resolve(texts.map((t) => this.hashToVector(t))); } @@ -25,14 +25,14 @@ export class MockEmbeddingProvider implements EmbeddingProvider { private hashToVector(text: string): number[] { let hash = 0; for (let i = 0; i < text.length; i++) { - hash = (hash * 31 + text.charCodeAt(i)) | 0; + hash = Math.trunc(hash * 31 + text.codePointAt(i)!); } const a = Math.sin(hash) * 10000; const b = Math.sin(hash + 1) * 10000; const c = Math.sin(hash + 2) * 10000; const d = Math.sin(hash + 3) * 10000; // Normalize - const mag = Math.sqrt(a * a + b * b + c * c + d * d); + const mag = Math.hypot(a, b, c, d); return [a / mag, b / mag, c / mag, d / mag]; } }