Eliezer Yudkowsky: Track Record
eliezer-yudkowsky-predictionsPath: /knowledge-base/people/eliezer-yudkowsky-predictions/
E643Entity ID (EID)
Page Recorddatabase.json — merged from MDX frontmatter + Entity YAML + computed metrics at build time
{
"id": "eliezer-yudkowsky-predictions",
"numericId": "E643",
"path": "/knowledge-base/people/eliezer-yudkowsky-predictions/",
"filePath": "knowledge-base/people/eliezer-yudkowsky-predictions.mdx",
"title": "Eliezer Yudkowsky: Track Record",
"quality": 61,
"readerImportance": 26,
"researchImportance": 43,
"tacticalValue": null,
"contentFormat": "article",
"tractability": null,
"neglectedness": null,
"uncertainty": null,
"causalLevel": null,
"lastUpdated": "2026-03-13",
"dateCreated": "2026-02-15",
"llmSummary": "Comprehensive tracking of Eliezer Yudkowsky's predictions shows clear early errors (Singularity by 2021, nanotech timelines), vindication on AI generalization (2008 FOOM debate), and acknowledged updates on deep learning. Core doom predictions (99% p(doom)) remain unfalsifiable; IMO bet won against Christiano, but pattern shows overconfidence on capabilities timelines while maintaining extreme confidence on catastrophic outcomes.",
"description": "Documenting Eliezer Yudkowsky's AI predictions and claims - assessing accuracy, patterns of over/underconfidence, and epistemic track record",
"ratings": {
"focus": 8.5,
"novelty": 2.5,
"rigor": 6.5,
"completeness": 8,
"concreteness": 7.5,
"actionability": 1
},
"category": "people",
"subcategory": "track-records",
"clusters": [
"community",
"ai-safety"
],
"metrics": {
"wordCount": 4152,
"tableCount": 24,
"diagramCount": 0,
"internalLinks": 19,
"externalLinks": 45,
"footnoteCount": 0,
"bulletRatio": 0.28,
"sectionCount": 36,
"hasOverview": false,
"structuralScore": 13
},
"suggestedQuality": 87,
"updateFrequency": 90,
"evergreen": true,
"wordCount": 4152,
"unconvertedLinks": [
{
"text": "MIRI",
"url": "https://intelligence.org/ai-foom-debate/",
"resourceId": "bfb6662776fe5f08",
"resourceTitle": "The Hanson-Yudkowsky AI-Foom Debate"
},
{
"text": "AI-FOOM Debate",
"url": "https://intelligence.org/ai-foom-debate/",
"resourceId": "bfb6662776fe5f08",
"resourceTitle": "The Hanson-Yudkowsky AI-Foom Debate"
},
{
"text": "TIME",
"url": "https://time.com/6266923/ai-eliezer-yudkowsky-open-letter-not-enough/",
"resourceId": "d0c81bbfe41efe44",
"resourceTitle": "Pausing AI Development Isn't Enough. We Need to Shut it All Down"
},
{
"text": "EA Forum",
"url": "https://forum.effectivealtruism.org/posts/NBgpPaz5vYe3tH4ga/on-deference-and-yudkowsky-s-ai-risk-estimates",
"resourceId": "e1fe34e189cc4c55",
"resourceTitle": "EA Forum surveys"
},
{
"text": "AI-FOOM Debate",
"url": "https://intelligence.org/ai-foom-debate/",
"resourceId": "bfb6662776fe5f08",
"resourceTitle": "The Hanson-Yudkowsky AI-Foom Debate"
},
{
"text": "EA Forum",
"url": "https://forum.effectivealtruism.org/posts/NBgpPaz5vYe3tH4ga/on-deference-and-yudkowsky-s-ai-risk-estimates",
"resourceId": "e1fe34e189cc4c55",
"resourceTitle": "EA Forum surveys"
},
{
"text": "LessWrong",
"url": "https://www.lesswrong.com/posts/j9Q8bRmwCgXRYAgcJ/miri-announces-new-death-with-dignity-strategy",
"resourceId": "79b5b7f6113c8a6c",
"resourceTitle": "Some experts like Eliezer Yudkowsky"
},
{
"text": "MIRI: No Fire Alarm",
"url": "https://intelligence.org/2017/10/13/fire-alarm/",
"resourceId": "599472695a5fba70",
"resourceTitle": "MIRI position"
},
{
"text": "TIME",
"url": "https://time.com/6266923/ai-eliezer-yudkowsky-open-letter-not-enough/",
"resourceId": "d0c81bbfe41efe44",
"resourceTitle": "Pausing AI Development Isn't Enough. We Need to Shut it All Down"
},
{
"text": "Alignment Forum",
"url": "https://www.alignmentforum.org/",
"resourceId": "2e0c662574087c2a",
"resourceTitle": "AI Alignment Forum"
},
{
"text": "EA Forum: On Deference and Yudkowsky's AI Risk Estimates",
"url": "https://forum.effectivealtruism.org/posts/NBgpPaz5vYe3tH4ga/on-deference-and-yudkowsky-s-ai-risk-estimates",
"resourceId": "e1fe34e189cc4c55",
"resourceTitle": "EA Forum surveys"
},
{
"text": "TIME: The Only Way to Deal With AI? Shut It Down",
"url": "https://time.com/6266923/ai-eliezer-yudkowsky-open-letter-not-enough/",
"resourceId": "d0c81bbfe41efe44",
"resourceTitle": "Pausing AI Development Isn't Enough. We Need to Shut it All Down"
},
{
"text": "MIRI: Death with Dignity",
"url": "https://www.lesswrong.com/posts/j9Q8bRmwCgXRYAgcJ/miri-announces-new-death-with-dignity-strategy",
"resourceId": "79b5b7f6113c8a6c",
"resourceTitle": "Some experts like Eliezer Yudkowsky"
},
{
"text": "MIRI: Hanson-Yudkowsky AI-FOOM Debate",
"url": "https://intelligence.org/ai-foom-debate/",
"resourceId": "bfb6662776fe5f08",
"resourceTitle": "The Hanson-Yudkowsky AI-Foom Debate"
},
{
"text": "MIRI: There's No Fire Alarm for AGI",
"url": "https://intelligence.org/2017/10/13/fire-alarm/",
"resourceId": "599472695a5fba70",
"resourceTitle": "MIRI position"
}
],
"unconvertedLinkCount": 15,
"convertedLinkCount": 0,
"backlinkCount": 1,
"hallucinationRisk": {
"level": "medium",
"score": 55,
"factors": [
"no-citations"
]
},
"redundancy": {
"maxSimilarity": 15,
"similarPages": [
{
"id": "eliezer-yudkowsky",
"title": "Eliezer Yudkowsky",
"path": "/knowledge-base/people/eliezer-yudkowsky/",
"similarity": 15
},
{
"id": "case-against-xrisk",
"title": "The Case AGAINST AI Existential Risk",
"path": "/knowledge-base/debates/case-against-xrisk/",
"similarity": 13
},
{
"id": "case-for-xrisk",
"title": "The Case FOR AI Existential Risk",
"path": "/knowledge-base/debates/case-for-xrisk/",
"similarity": 13
},
{
"id": "ai-timelines",
"title": "AI Timelines",
"path": "/knowledge-base/models/ai-timelines/",
"similarity": 13
},
{
"id": "yann-lecun",
"title": "Yann LeCun",
"path": "/knowledge-base/people/yann-lecun/",
"similarity": 13
}
]
},
"coverage": {
"passing": 4,
"total": 13,
"targets": {
"tables": 17,
"diagrams": 2,
"internalLinks": 33,
"externalLinks": 21,
"footnotes": 12,
"references": 12
},
"actuals": {
"tables": 24,
"diagrams": 0,
"internalLinks": 19,
"externalLinks": 45,
"footnotes": 0,
"references": 6,
"quotesWithQuotes": 0,
"quotesTotal": 0,
"accuracyChecked": 0,
"accuracyTotal": 0
},
"items": {
"llmSummary": "green",
"schedule": "green",
"entity": "red",
"editHistory": "red",
"overview": "red",
"tables": "green",
"diagrams": "red",
"internalLinks": "amber",
"externalLinks": "green",
"footnotes": "red",
"references": "amber",
"quotes": "red",
"accuracy": "red"
},
"ratingsString": "N:2.5 R:6.5 A:1 C:8"
},
"readerRank": 480,
"researchRank": 322,
"recommendedScore": 156.83
}External Links
No external links
Backlinks (1)
| id | title | type | relationship |
|---|---|---|---|
| track-records-overview | Track Records (Overview) | concept | — |