AI Safety Organizations (Overview)
safety-orgs-overviewPath: /knowledge-base/organizations/safety-orgs-overview/
E821Entity ID (EID)
Page Recorddatabase.json — merged from MDX frontmatter + Entity YAML + computed metrics at build time
{
"id": "safety-orgs-overview",
"numericId": "E821",
"path": "/knowledge-base/organizations/safety-orgs-overview/",
"filePath": "knowledge-base/organizations/safety-orgs-overview.mdx",
"title": "AI Safety Organizations (Overview)",
"quality": 48,
"readerImportance": 52.3,
"researchImportance": null,
"tacticalValue": 78,
"contentFormat": "article",
"tractability": null,
"neglectedness": null,
"uncertainty": null,
"causalLevel": null,
"lastUpdated": "2026-03-13",
"dateCreated": "2026-02-19",
"llmSummary": "A well-organized reference overview of ~20 AI safety organizations categorized by function (alignment research, policy, field-building), with a comparative budget/headcount table showing estimated annual budgets of \\$3-10M and cost-per-researcher of \\$143K-\\$400K across nine major orgs, all primarily funded by Coefficient Giving (formerly Open Philanthropy). The page is a competent compilation with useful quantitative estimates but offers little original analysis beyond organizing publicly available information.",
"description": "Overview of organizations focused on AI safety research, policy, and advocacy—from dedicated alignment labs to think tanks and field-building institutions working to reduce catastrophic and existential risks from advanced AI systems.",
"ratings": {
"focus": 8.5,
"novelty": 3.5,
"rigor": 4.5,
"completeness": 6,
"concreteness": 6.5,
"actionability": 4.5,
"objectivity": 6.5
},
"category": "organizations",
"subcategory": "safety-orgs",
"clusters": [
"ai-safety",
"community"
],
"metrics": {
"wordCount": 952,
"tableCount": 1,
"diagramCount": 0,
"internalLinks": 67,
"externalLinks": 0,
"footnoteCount": 0,
"bulletRatio": 0.44,
"sectionCount": 7,
"hasOverview": true,
"structuralScore": 7
},
"suggestedQuality": 47,
"updateFrequency": null,
"evergreen": true,
"wordCount": 952,
"unconvertedLinks": [],
"unconvertedLinkCount": 0,
"convertedLinkCount": 0,
"backlinkCount": 0,
"hallucinationRisk": {
"level": "medium",
"score": 60,
"factors": [
"no-citations",
"few-external-sources"
]
},
"redundancy": {
"maxSimilarity": 14,
"similarPages": [
{
"id": "cais",
"title": "CAIS (Center for AI Safety)",
"path": "/knowledge-base/organizations/cais/",
"similarity": 14
},
{
"id": "planning-for-frontier-lab-scaling",
"title": "Planning for Frontier Lab Scaling",
"path": "/knowledge-base/models/planning-for-frontier-lab-scaling/",
"similarity": 13
},
{
"id": "safety-research-allocation",
"title": "Safety Research Allocation Model",
"path": "/knowledge-base/models/safety-research-allocation/",
"similarity": 13
},
{
"id": "govai",
"title": "GovAI",
"path": "/knowledge-base/organizations/govai/",
"similarity": 13
},
{
"id": "lionheart-ventures",
"title": "Lionheart Ventures",
"path": "/knowledge-base/organizations/lionheart-ventures/",
"similarity": 13
}
]
},
"changeHistory": [
{
"date": "2026-02-19",
"branch": "claude/add-wiki-tables-VhyKT",
"title": "Add concrete shareable data tables to high-value pages",
"summary": "Added three concrete, screenshot-worthy data tables to high-value wiki pages: (1) OpenAI ownership/stakeholder table to openai.mdx showing the 2024-2025 PBC restructuring with Foundation ~26%, Microsoft transitioning from 49% profit share to ~2.5% equity, and Sam Altman's proposed 7% grant; (2) Budget and headcount comparison table to safety-orgs-overview.mdx covering MIRI, ARC, METR, Redwood Research, CAIS, Apollo Research, GovAI, Conjecture, and FAR AI with annual budgets, headcounts, and cost-per-researcher; (3) Per-company compensation comparison table to ai-talent-market-dynamics.mdx comparing Anthropic, OpenAI, Google DeepMind, xAI, Meta AI, and Microsoft Research by total comp range, base salary, equity type, and benefits including Anthropic's unique DAF matching program.",
"model": "sonnet-4",
"duration": "~45min"
},
{
"date": "2026-02-17",
"branch": "claude/clarify-overview-pages-ZQx72",
"title": "Clarify overview pages with new entity type",
"summary": "Added `overview` as a proper entity type throughout the system, migrated all 36 overview pages to `entityType: overview`, built overview-specific InfoBox rendering with child page links, created an OverviewBanner component, and added a knowledge-base-overview page template to Crux."
},
{
"date": "2026-02-16",
"branch": "claude/complete-new-pages-kawqG",
"title": "Fix conflicting numeric IDs + add integrity checks",
"summary": "Fixed all 9 overview pages from PR #118 which had numeric IDs (E687-E695) that conflicted with existing YAML entities. Reassigned to E710-E718. Then hardened the system to prevent recurrence:\n1. Added page-level numericId conflict detection to `build-data.mjs` (build now fails on conflicts)\n2. Created `numeric-id-integrity` global validation rule (cross-page uniqueness, format validation, entity conflict detection)\n3. Added `numericId` and `subcategory` to frontmatter Zod schema with format regex",
"pr": 168
}
],
"coverage": {
"passing": 4,
"total": 13,
"targets": {
"tables": 4,
"diagrams": 0,
"internalLinks": 8,
"externalLinks": 5,
"footnotes": 3,
"references": 3
},
"actuals": {
"tables": 1,
"diagrams": 0,
"internalLinks": 67,
"externalLinks": 0,
"footnotes": 0,
"references": 0,
"quotesWithQuotes": 0,
"quotesTotal": 0,
"accuracyChecked": 0,
"accuracyTotal": 0
},
"items": {
"llmSummary": "green",
"schedule": "red",
"entity": "red",
"editHistory": "green",
"overview": "green",
"tables": "amber",
"diagrams": "red",
"internalLinks": "green",
"externalLinks": "red",
"footnotes": "red",
"references": "red",
"quotes": "red",
"accuracy": "red"
},
"editHistoryCount": 3,
"ratingsString": "N:3.5 R:4.5 A:4.5 C:6"
},
"readerRank": 285,
"recommendedScore": 143.48
}External Links
No external links
Backlinks (0)
No backlinks