Longterm Wiki

Survival and Flourishing Fund (SFF)

sfforganizationPath: /knowledge-base/organizations/sff/
E567Entity ID (EID)
← Back to page21 backlinksQuality: 59Updated: 2026-03-12
Page Recorddatabase.json — merged from MDX frontmatter + Entity YAML + computed metrics at build time
{
  "id": "sff",
  "numericId": null,
  "path": "/knowledge-base/organizations/sff/",
  "filePath": "knowledge-base/organizations/sff.mdx",
  "title": "Survival and Flourishing Fund (SFF)",
  "quality": 59,
  "readerImportance": 29,
  "researchImportance": 42,
  "tacticalValue": null,
  "contentFormat": "article",
  "tractability": null,
  "neglectedness": null,
  "uncertainty": null,
  "causalLevel": null,
  "lastUpdated": "2026-03-12",
  "dateCreated": "2026-02-15",
  "llmSummary": "SFF distributed \\$141M since 2019 (primarily from Jaan Tallinn's ~\\$900M fortune), with the 2025 round totaling \\$34.33M (86% to AI safety). Uses unique S-process mechanism where 6-12 recommenders express utility functions and an algorithm allocates grants favoring projects with enthusiastic champions rather than consensus picks; median grant ~\\$100K.",
  "description": "SFF is a donor-advised fund financed primarily by Jaan Tallinn (Skype co-founder, ~\\$900M net worth) that uses a unique S-process simulation mechanism to allocate grants. Since 2019, SFF has distributed over \\$100 million with the 2025 round totaling \\$34.33M (86% to AI safety). The S-process distinguishes SFF from traditional foundations by using multiple recommenders who express preferences as mathematical utility functions, with an algorithm computing allocations that favor projects with at least one enthusiastic champion rather than consensus picks. Key grantees include MIRI, METR (formerly ARC Evals), Center for AI Safety, and various university AI safety programs.",
  "ratings": {
    "novelty": 3.5,
    "rigor": 6,
    "actionability": 5.5,
    "completeness": 7
  },
  "category": "organizations",
  "subcategory": "funders",
  "clusters": [
    "community",
    "ai-safety",
    "governance"
  ],
  "metrics": {
    "wordCount": 4838,
    "tableCount": 22,
    "diagramCount": 2,
    "internalLinks": 17,
    "externalLinks": 79,
    "footnoteCount": 0,
    "bulletRatio": 0.13,
    "sectionCount": 52,
    "hasOverview": true,
    "structuralScore": 15
  },
  "suggestedQuality": 100,
  "updateFrequency": 45,
  "evergreen": true,
  "wordCount": 4838,
  "unconvertedLinks": [
    {
      "text": "Jaan Tallinn",
      "url": "https://en.wikipedia.org/wiki/Jaan_Tallinn",
      "resourceId": "kb-6c02b94b4c2222a9"
    },
    {
      "text": "Jaan Tallinn",
      "url": "https://en.wikipedia.org/wiki/Jaan_Tallinn",
      "resourceId": "kb-6c02b94b4c2222a9"
    },
    {
      "text": "survivalandflourishing.fund",
      "url": "https://survivalandflourishing.fund/",
      "resourceId": "a01514f7c492ce4c",
      "resourceTitle": "Survival and Flourishing Fund"
    },
    {
      "text": "Survival and Flourishing Fund",
      "url": "https://survivalandflourishing.fund/",
      "resourceId": "a01514f7c492ce4c",
      "resourceTitle": "Survival and Flourishing Fund"
    },
    {
      "text": "Jaan Tallinn",
      "url": "https://en.wikipedia.org/wiki/Jaan_Tallinn",
      "resourceId": "kb-6c02b94b4c2222a9"
    },
    {
      "text": "MIRI",
      "url": "https://intelligence.org/",
      "resourceId": "86df45a5f8a9bf6d",
      "resourceTitle": "miri.org"
    },
    {
      "text": "METR",
      "url": "https://metr.org/",
      "resourceId": "45370a5153534152",
      "resourceTitle": "metr.org"
    },
    {
      "text": "Center for AI Safety",
      "url": "https://www.safe.ai/",
      "resourceId": "a306e0b63bdedbd5",
      "resourceTitle": "CAIS Surveys"
    },
    {
      "text": "Apollo Research",
      "url": "https://apolloresearch.ai/",
      "resourceId": "329d8c2e2532be3d",
      "resourceTitle": "Apollo Research"
    },
    {
      "text": "GovAI",
      "url": "https://www.governance.ai/",
      "resourceId": "f35c467b353f990f",
      "resourceTitle": "GovAI"
    },
    {
      "text": "FAR AI",
      "url": "https://far.ai/",
      "resourceId": "9199f43edaf3a03b",
      "resourceTitle": "FAR AI"
    },
    {
      "text": "SecureBio",
      "url": "https://www.securebio.org/",
      "resourceId": "81e8568b008e4245",
      "resourceTitle": "SecureBio organization"
    },
    {
      "text": "SFF website",
      "url": "https://survivalandflourishing.fund/recommendations",
      "resourceId": "aebe92781f2a19fb",
      "resourceTitle": "Survival and Flourishing Fund Recommendations"
    },
    {
      "text": "Jaan Tallinn",
      "url": "https://en.wikipedia.org/wiki/Jaan_Tallinn",
      "resourceId": "kb-6c02b94b4c2222a9"
    },
    {
      "text": "CSER",
      "url": "https://www.cser.ac.uk/",
      "resourceId": "7a14358f49765738",
      "resourceTitle": "CSER Overview - Cambridge"
    },
    {
      "text": "FLI",
      "url": "https://futureoflife.org/",
      "resourceId": "786a68a91a7d5712",
      "resourceTitle": "Future of Life Institute"
    },
    {
      "text": "CAIS",
      "url": "https://www.safe.ai/",
      "resourceId": "a306e0b63bdedbd5",
      "resourceTitle": "CAIS Surveys"
    },
    {
      "text": "UN AI Advisory Body",
      "url": "https://www.un.org/ai-advisory-body",
      "resourceId": "b34af47efb6b7918",
      "resourceTitle": "UN AI Advisory Body"
    },
    {
      "text": "Future of Life Institute's 2023 open letter",
      "url": "https://futureoflife.org/open-letter/pause-giant-ai-experiments/",
      "resourceId": "531f55cee64f6509",
      "resourceTitle": "FLI open letter"
    },
    {
      "text": "Center for AI Safety's 2023 statement",
      "url": "https://www.safe.ai/statement-on-ai-risk",
      "resourceId": "470ac236ca26008c",
      "resourceTitle": "AI Risk Statement"
    },
    {
      "text": "Anthropic",
      "url": "https://www.anthropic.com/",
      "resourceId": "afe2508ac4caf5ee",
      "resourceTitle": "Anthropic"
    },
    {
      "text": "DeepMind",
      "url": "https://deepmind.google/",
      "resourceId": "0ef9b0fe0f3c92b4",
      "resourceTitle": "Google DeepMind"
    },
    {
      "text": "According to Tallinn's 2024 philanthropy overview",
      "url": "https://www.lesswrong.com/posts/8ojWtREJjKmyvWdDb/jaan-tallinn-s-2024-philanthropy-overview",
      "resourceId": "kb-1a997a24c0ac3ec5"
    },
    {
      "text": "Analysis of the AI safety funding landscape",
      "url": "https://forum.effectivealtruism.org/posts/XdhwXppfqrpPL2YDX/an-overview-of-the-ai-safety-funding-situation",
      "resourceId": "80125fcaf04609b8",
      "resourceTitle": "Overview of AI Safety Funding"
    },
    {
      "text": "MIRI",
      "url": "https://intelligence.org/",
      "resourceId": "86df45a5f8a9bf6d",
      "resourceTitle": "miri.org"
    },
    {
      "text": "Center for AI Safety",
      "url": "https://www.safe.ai/",
      "resourceId": "a306e0b63bdedbd5",
      "resourceTitle": "CAIS Surveys"
    },
    {
      "text": "METR",
      "url": "https://metr.org/",
      "resourceId": "45370a5153534152",
      "resourceTitle": "metr.org"
    },
    {
      "text": "80,000 Hours",
      "url": "https://80000hours.org/",
      "resourceId": "ec456e4a78161d43",
      "resourceTitle": "80,000 Hours methodology"
    },
    {
      "text": "GovAI",
      "url": "https://www.governance.ai/",
      "resourceId": "f35c467b353f990f",
      "resourceTitle": "GovAI"
    },
    {
      "text": "Redwood Research",
      "url": "https://www.redwoodresearch.org/",
      "resourceId": "42e7247cbc33fc4c",
      "resourceTitle": "Redwood Research: AI Control"
    },
    {
      "text": "FAR AI",
      "url": "https://far.ai/",
      "resourceId": "9199f43edaf3a03b",
      "resourceTitle": "FAR AI"
    },
    {
      "text": "Conjecture",
      "url": "https://conjecture.dev/",
      "resourceId": "b7aa1f2c839b5ee8",
      "resourceTitle": "Conjecture Blog"
    },
    {
      "text": "Speculation Grants program",
      "url": "https://survivalandflourishing.fund/speculation-grants",
      "resourceId": "kb-ccc48f8fcab1714b"
    },
    {
      "text": "Coefficient Giving",
      "url": "https://coefficientgiving.org/",
      "resourceId": "kb-360d9d206d186a79"
    },
    {
      "text": "LTFF",
      "url": "https://funds.effectivealtruism.org/funds/far-future",
      "resourceId": "9baa7f54db71864d",
      "resourceTitle": "Long-Term Future Fund"
    },
    {
      "text": "EA Forum analysis of AI safety funding",
      "url": "https://forum.effectivealtruism.org/posts/XdhwXppfqrpPL2YDX/an-overview-of-the-ai-safety-funding-situation",
      "resourceId": "80125fcaf04609b8",
      "resourceTitle": "Overview of AI Safety Funding"
    },
    {
      "text": "SFF Official Website",
      "url": "https://survivalandflourishing.fund/",
      "resourceId": "a01514f7c492ce4c",
      "resourceTitle": "Survival and Flourishing Fund"
    },
    {
      "text": "2025 Grant Recommendations",
      "url": "https://survivalandflourishing.fund/2025/recommendations",
      "resourceId": "kb-c255d8c9c9668fd1"
    },
    {
      "text": "2024 Grant Recommendations",
      "url": "https://survivalandflourishing.fund/2024/recommendations",
      "resourceId": "kb-244ccf9e43e90961"
    },
    {
      "text": "Speculation Grants Program",
      "url": "https://survivalandflourishing.fund/speculation-grants",
      "resourceId": "kb-ccc48f8fcab1714b"
    },
    {
      "text": "An Overview of the AI Safety Funding Situation",
      "url": "https://forum.effectivealtruism.org/posts/XdhwXppfqrpPL2YDX/an-overview-of-the-ai-safety-funding-situation",
      "resourceId": "80125fcaf04609b8",
      "resourceTitle": "Overview of AI Safety Funding"
    },
    {
      "text": "SFF 2025 Funding by Cause Area",
      "url": "https://forum.effectivealtruism.org/posts/vhw6R5P52qJr6opou/sff-2025-funding-by-cause-area-usd34-million-to-ai-86-bio-7",
      "resourceId": "kb-c5f8f5c5670c539f"
    },
    {
      "text": "Jaan Tallinn - Wikipedia",
      "url": "https://en.wikipedia.org/wiki/Jaan_Tallinn",
      "resourceId": "kb-6c02b94b4c2222a9"
    },
    {
      "text": "Jaan Tallinn's 2024 Philanthropy Overview",
      "url": "https://www.lesswrong.com/posts/8ojWtREJjKmyvWdDb/jaan-tallinn-s-2024-philanthropy-overview",
      "resourceId": "kb-1a997a24c0ac3ec5"
    },
    {
      "text": "SFF Website",
      "url": "https://survivalandflourishing.fund/",
      "resourceId": "a01514f7c492ce4c",
      "resourceTitle": "Survival and Flourishing Fund"
    },
    {
      "text": "Grant Recommendations History",
      "url": "https://survivalandflourishing.fund/recommendations",
      "resourceId": "aebe92781f2a19fb",
      "resourceTitle": "Survival and Flourishing Fund Recommendations"
    },
    {
      "text": "Speculation Grants",
      "url": "https://survivalandflourishing.fund/speculation-grants",
      "resourceId": "kb-ccc48f8fcab1714b"
    }
  ],
  "unconvertedLinkCount": 47,
  "convertedLinkCount": 0,
  "backlinkCount": 21,
  "hallucinationRisk": {
    "level": "high",
    "score": 75,
    "factors": [
      "biographical-claims",
      "no-citations"
    ]
  },
  "entityType": "organization",
  "redundancy": {
    "maxSimilarity": 19,
    "similarPages": [
      {
        "id": "ltff",
        "title": "Long-Term Future Fund (LTFF)",
        "path": "/knowledge-base/organizations/ltff/",
        "similarity": 19
      },
      {
        "id": "coefficient-giving",
        "title": "Coefficient Giving",
        "path": "/knowledge-base/organizations/coefficient-giving/",
        "similarity": 17
      },
      {
        "id": "anthropic-investors",
        "title": "Anthropic (Funder)",
        "path": "/knowledge-base/organizations/anthropic-investors/",
        "similarity": 16
      },
      {
        "id": "donations-list-website",
        "title": "Donations List Website",
        "path": "/knowledge-base/responses/donations-list-website/",
        "similarity": 16
      },
      {
        "id": "anthropic-ipo",
        "title": "Anthropic IPO",
        "path": "/knowledge-base/organizations/anthropic-ipo/",
        "similarity": 15
      }
    ]
  },
  "coverage": {
    "passing": 8,
    "total": 13,
    "targets": {
      "tables": 19,
      "diagrams": 2,
      "internalLinks": 39,
      "externalLinks": 24,
      "footnotes": 15,
      "references": 15
    },
    "actuals": {
      "tables": 22,
      "diagrams": 2,
      "internalLinks": 17,
      "externalLinks": 79,
      "footnotes": 0,
      "references": 28,
      "quotesWithQuotes": 0,
      "quotesTotal": 0,
      "accuracyChecked": 0,
      "accuracyTotal": 0
    },
    "items": {
      "llmSummary": "green",
      "schedule": "green",
      "entity": "green",
      "editHistory": "red",
      "overview": "green",
      "tables": "green",
      "diagrams": "green",
      "internalLinks": "amber",
      "externalLinks": "green",
      "footnotes": "red",
      "references": "green",
      "quotes": "red",
      "accuracy": "red"
    },
    "ratingsString": "N:3.5 R:6 A:5.5 C:7"
  },
  "readerRank": 455,
  "researchRank": 330,
  "recommendedScore": 154.2
}
External Links

No external links

Backlinks (21)
idtitletyperelationship
palisade-researchPalisade Researchorganization
centre-for-long-term-resilienceCentre for Long-Term Resilienceorganization
dustin-moskovitzDustin Moskovitz (AI Safety Funder)person
ai-futures-projectAI Futures Projectorganization
anthropic-ipoAnthropic IPOanalysis
center-for-applied-rationalityCenter for Applied Rationalityorganization
cserCSER (Centre for the Study of Existential Risk)organization
ea-funding-absorption-capacityEA Funding Absorption Capacityconcept
ea-shareholder-diversification-anthropicEA Shareholder Diversification from Anthropicconcept
far-aiFAR AIorganization
fliFuture of Life Institute (FLI)organization
funders-overviewLongtermist Funders (Overview)concept
lesswrongLessWrongorganization
ltffLong-Term Future Fund (LTFF)organization
matsMATS ML Alignment Theory Scholars programorganization
openai-foundationOpenAI Foundationorganization
quriQURI (Quantified Uncertainty Research Institute)organization
redwood-researchRedwood Researchorganization
seldon-labSeldon Laborganization
jaan-tallinnJaan Tallinnperson
longtermwiki-value-propositionLongtermWiki Value Propositionconcept
Longterm Wiki