Longterm Wiki

Long-Term Future Fund (LTFF)

ltfforganizationPath: /knowledge-base/organizations/ltff/
E543Entity ID (EID)
← Back to page6 backlinksQuality: 56Updated: 2026-03-12
Page Recorddatabase.json — merged from MDX frontmatter + Entity YAML + computed metrics at build time
{
  "id": "ltff",
  "numericId": null,
  "path": "/knowledge-base/organizations/ltff/",
  "filePath": "knowledge-base/organizations/ltff.mdx",
  "title": "Long-Term Future Fund (LTFF)",
  "quality": 56,
  "readerImportance": 30.5,
  "researchImportance": 42,
  "tacticalValue": null,
  "contentFormat": "article",
  "tractability": null,
  "neglectedness": null,
  "uncertainty": null,
  "causalLevel": null,
  "lastUpdated": "2026-03-12",
  "dateCreated": "2026-02-15",
  "llmSummary": "LTFF is a regranting program that has distributed \\$20M since 2017 (approximately \\$10M to AI safety) with median grants of \\$25K, filling a critical niche between personal savings and institutional funders like Coefficient Giving (median \\$257K). In 2023, LTFF granted \\$6.67M with a 19.3% acceptance rate, targeting 21-day decision turnarounds, and serves as an important pipeline for researchers before joining major labs or receiving larger grants.",
  "description": "LTFF is a regranting program under EA Funds that has distributed over \\$20 million since 2017, with approximately \\$10 million going to AI safety work. The fund provides fast, flexible funding primarily to individual researchers through grants with a median size of \\$25K, compared to Coefficient Giving's median of \\$257K. In 2023, LTFF granted \\$6.67M total with a 19.3% acceptance rate. The fund has been an early funder of notable projects including Manifold Markets (\\$200K in 2022), David Krueger's AI safety lab at Cambridge (\\$200K), and numerous MATS scholars, serving as a crucial stepping stone for researchers before receiving larger institutional grants.",
  "ratings": {
    "novelty": 2.5,
    "rigor": 5,
    "actionability": 6.5,
    "completeness": 7
  },
  "category": "organizations",
  "subcategory": "funders",
  "clusters": [
    "community",
    "ai-safety"
  ],
  "metrics": {
    "wordCount": 4765,
    "tableCount": 35,
    "diagramCount": 3,
    "internalLinks": 10,
    "externalLinks": 50,
    "footnoteCount": 0,
    "bulletRatio": 0.11,
    "sectionCount": 59,
    "hasOverview": true,
    "structuralScore": 15
  },
  "suggestedQuality": 100,
  "updateFrequency": 45,
  "evergreen": true,
  "wordCount": 4765,
  "unconvertedLinks": [
    {
      "text": "funds.effectivealtruism.org/funds/far-future",
      "url": "https://funds.effectivealtruism.org/funds/far-future",
      "resourceId": "9baa7f54db71864d",
      "resourceTitle": "Long-Term Future Fund"
    },
    {
      "text": "estimated at approximately \\$53K",
      "url": "https://forum.effectivealtruism.org/posts/XdhwXppfqrpPL2YDX/an-overview-of-the-ai-safety-funding-situation",
      "resourceId": "80125fcaf04609b8",
      "resourceTitle": "Overview of AI Safety Funding"
    },
    {
      "text": "cofounder of Lighthaven venue",
      "url": "https://www.complexsystemspodcast.com/episodes/bits-and-bricks-oliver-habryka/",
      "resourceId": "245d3be19688db54",
      "resourceTitle": "Bits and Bricks: Oliver Habryka - Complex Systems Podcast"
    },
    {
      "text": "Overview of the AI Safety Funding Situation",
      "url": "https://forum.effectivealtruism.org/posts/XdhwXppfqrpPL2YDX/an-overview-of-the-ai-safety-funding-situation",
      "resourceId": "80125fcaf04609b8",
      "resourceTitle": "Overview of AI Safety Funding"
    },
    {
      "text": "Coefficient staff noted",
      "url": "https://forum.effectivealtruism.org/posts/XdhwXppfqrpPL2YDX/an-overview-of-the-ai-safety-funding-situation",
      "resourceId": "80125fcaf04609b8",
      "resourceTitle": "Overview of AI Safety Funding"
    },
    {
      "text": "EA Funds website",
      "url": "https://funds.effectivealtruism.org/funds/far-future",
      "resourceId": "9baa7f54db71864d",
      "resourceTitle": "Long-Term Future Fund"
    },
    {
      "text": "funds.effectivealtruism.org",
      "url": "https://funds.effectivealtruism.org/funds/far-future",
      "resourceId": "9baa7f54db71864d",
      "resourceTitle": "Long-Term Future Fund"
    },
    {
      "text": "Long-Term Future Fund Official Page",
      "url": "https://funds.effectivealtruism.org/funds/far-future",
      "resourceId": "9baa7f54db71864d",
      "resourceTitle": "Long-Term Future Fund"
    },
    {
      "text": "Overview of the AI Safety Funding Situation",
      "url": "https://forum.effectivealtruism.org/posts/XdhwXppfqrpPL2YDX/an-overview-of-the-ai-safety-funding-situation",
      "resourceId": "80125fcaf04609b8",
      "resourceTitle": "Overview of AI Safety Funding"
    },
    {
      "text": "Survival and Flourishing Fund",
      "url": "https://survivalandflourishing.fund/",
      "resourceId": "a01514f7c492ce4c",
      "resourceTitle": "Survival and Flourishing Fund"
    },
    {
      "text": "MATS Program",
      "url": "https://www.matsprogram.org/",
      "resourceId": "ba3a8bd9c8404d7b",
      "resourceTitle": "MATS Research Program"
    },
    {
      "text": "LTFF Fund Page",
      "url": "https://funds.effectivealtruism.org/funds/far-future",
      "resourceId": "9baa7f54db71864d",
      "resourceTitle": "Long-Term Future Fund"
    }
  ],
  "unconvertedLinkCount": 12,
  "convertedLinkCount": 0,
  "backlinkCount": 6,
  "hallucinationRisk": {
    "level": "high",
    "score": 75,
    "factors": [
      "biographical-claims",
      "no-citations"
    ]
  },
  "entityType": "organization",
  "redundancy": {
    "maxSimilarity": 19,
    "similarPages": [
      {
        "id": "sff",
        "title": "Survival and Flourishing Fund (SFF)",
        "path": "/knowledge-base/organizations/sff/",
        "similarity": 19
      },
      {
        "id": "coefficient-giving",
        "title": "Coefficient Giving",
        "path": "/knowledge-base/organizations/coefficient-giving/",
        "similarity": 17
      },
      {
        "id": "manifund",
        "title": "Manifund",
        "path": "/knowledge-base/organizations/manifund/",
        "similarity": 16
      },
      {
        "id": "rethink-priorities",
        "title": "Rethink Priorities",
        "path": "/knowledge-base/organizations/rethink-priorities/",
        "similarity": 15
      },
      {
        "id": "vipul-naik",
        "title": "Vipul Naik",
        "path": "/knowledge-base/people/vipul-naik/",
        "similarity": 15
      }
    ]
  },
  "coverage": {
    "passing": 7,
    "total": 13,
    "targets": {
      "tables": 19,
      "diagrams": 2,
      "internalLinks": 38,
      "externalLinks": 24,
      "footnotes": 14,
      "references": 14
    },
    "actuals": {
      "tables": 35,
      "diagrams": 3,
      "internalLinks": 10,
      "externalLinks": 50,
      "footnotes": 0,
      "references": 5,
      "quotesWithQuotes": 0,
      "quotesTotal": 0,
      "accuracyChecked": 0,
      "accuracyTotal": 0
    },
    "items": {
      "llmSummary": "green",
      "schedule": "green",
      "entity": "green",
      "editHistory": "red",
      "overview": "green",
      "tables": "green",
      "diagrams": "green",
      "internalLinks": "amber",
      "externalLinks": "green",
      "footnotes": "red",
      "references": "amber",
      "quotes": "red",
      "accuracy": "red"
    },
    "ratingsString": "N:2.5 R:5 A:6.5 C:7"
  },
  "readerRank": 444,
  "researchRank": 328,
  "recommendedScore": 148.95
}
External Links

No external links

Backlinks (6)
idtitletyperelationship
longtermist-value-comparisonsRelative Longtermist Value Comparisonsanalysis
ea-funding-absorption-capacityEA Funding Absorption Capacityconcept
funders-overviewLongtermist Funders (Overview)concept
quriQURI (Quantified Uncertainty Research Institute)organization
ai-watchAI Watchproject
research-agendasAI Alignment Research Agenda Comparisoncrux
Longterm Wiki