Longterm Wiki

Autonomous Weapons

autonomous-weaponsriskPath: /knowledge-base/risks/autonomous-weapons/
E35Entity ID (EID)
← Back to page19 backlinksQuality: 56Updated: 2026-03-13
Page Recorddatabase.json — merged from MDX frontmatter + Entity YAML + computed metrics at build time
{
  "id": "autonomous-weapons",
  "numericId": null,
  "path": "/knowledge-base/risks/autonomous-weapons/",
  "filePath": "knowledge-base/risks/autonomous-weapons.mdx",
  "title": "Autonomous Weapons",
  "quality": 56,
  "readerImportance": 16.5,
  "researchImportance": 26,
  "tacticalValue": null,
  "contentFormat": "article",
  "tractability": null,
  "neglectedness": null,
  "uncertainty": null,
  "causalLevel": "outcome",
  "lastUpdated": "2026-03-13",
  "dateCreated": "2026-02-15",
  "llmSummary": "Comprehensive overview of lethal autonomous weapons systems documenting their battlefield deployment (Libya 2020, Ukraine 2022-present) with AI-enabled drones achieving 70-80% hit rates versus 10-20% manual, in a \\$41.6B market growing 5.9% annually. Documents UN governance efforts (166 votes for 2024 resolution) but identifies critical accountability gaps and escalation risks from machine-speed warfare.",
  "description": "Lethal autonomous weapons systems (LAWS) represent one of the most immediate and concerning applications of AI in military contexts. The global market reached \\$41.6 billion in 2024, with the December 2024 UN resolution receiving 166 votes in favor of new regulations. Ukraine's war has become a testing ground, with AI-enhanced drones achieving 70-80% hit rates versus 10-20% for manual systems.",
  "ratings": {
    "novelty": 3.5,
    "rigor": 6.5,
    "actionability": 4,
    "completeness": 7
  },
  "category": "risks",
  "subcategory": "misuse",
  "clusters": [
    "ai-safety",
    "governance",
    "cyber"
  ],
  "metrics": {
    "wordCount": 2933,
    "tableCount": 9,
    "diagramCount": 2,
    "internalLinks": 34,
    "externalLinks": 0,
    "footnoteCount": 0,
    "bulletRatio": 0,
    "sectionCount": 18,
    "hasOverview": true,
    "structuralScore": 12
  },
  "suggestedQuality": 80,
  "updateFrequency": 21,
  "evergreen": true,
  "wordCount": 2933,
  "unconvertedLinks": [],
  "unconvertedLinkCount": 0,
  "convertedLinkCount": 33,
  "backlinkCount": 19,
  "hallucinationRisk": {
    "level": "medium",
    "score": 60,
    "factors": [
      "no-citations",
      "few-external-sources"
    ]
  },
  "entityType": "risk",
  "redundancy": {
    "maxSimilarity": 17,
    "similarPages": [
      {
        "id": "flash-dynamics",
        "title": "AI Flash Dynamics",
        "path": "/knowledge-base/risks/flash-dynamics/",
        "similarity": 17
      },
      {
        "id": "structural-risks",
        "title": "AI Structural Risk Cruxes",
        "path": "/knowledge-base/cruxes/structural-risks/",
        "similarity": 16
      },
      {
        "id": "autonomous-weapons-proliferation",
        "title": "LAWS Proliferation Model",
        "path": "/knowledge-base/models/autonomous-weapons-proliferation/",
        "similarity": 16
      },
      {
        "id": "irreversibility",
        "title": "AI-Induced Irreversibility",
        "path": "/knowledge-base/risks/irreversibility/",
        "similarity": 16
      },
      {
        "id": "agentic-ai",
        "title": "Agentic AI",
        "path": "/knowledge-base/capabilities/agentic-ai/",
        "similarity": 15
      }
    ]
  },
  "coverage": {
    "passing": 7,
    "total": 13,
    "targets": {
      "tables": 12,
      "diagrams": 1,
      "internalLinks": 23,
      "externalLinks": 15,
      "footnotes": 9,
      "references": 9
    },
    "actuals": {
      "tables": 9,
      "diagrams": 2,
      "internalLinks": 34,
      "externalLinks": 0,
      "footnotes": 0,
      "references": 18,
      "quotesWithQuotes": 0,
      "quotesTotal": 0,
      "accuracyChecked": 0,
      "accuracyTotal": 0
    },
    "items": {
      "llmSummary": "green",
      "schedule": "green",
      "entity": "green",
      "editHistory": "red",
      "overview": "green",
      "tables": "amber",
      "diagrams": "green",
      "internalLinks": "green",
      "externalLinks": "red",
      "footnotes": "red",
      "references": "green",
      "quotes": "red",
      "accuracy": "red"
    },
    "ratingsString": "N:3.5 R:6.5 A:4 C:7"
  },
  "readerRank": 543,
  "researchRank": 448,
  "recommendedScore": 142.07
}
External Links
{
  "wikipedia": "https://en.wikipedia.org/wiki/Lethal_autonomous_weapon",
  "wikidata": "https://www.wikidata.org/wiki/Q25378861",
  "grokipedia": "https://grokipedia.com/page/Lethal_autonomous_weapon"
}
Backlinks (19)
idtitletyperelationship
misuse-risksAI Misuse Risk Cruxescrux
autonomous-weapons-escalationAutonomous Weapons Escalation Modelanalysisrelated
autonomous-weapons-proliferationLAWS Proliferation Modelanalysisrelated
cyberweaponsCyberweapons Riskrisk
__index__/knowledge-base/cruxesKey Cruxesconcept
bioweapons-attack-chainBioweapons Attack Chain Modelanalysis
cyberweapons-attack-automationAutonomous Cyber Attack Timelineanalysis
fliFuture of Life Institute (FLI)organization
gpaiGlobal Partnership on Artificial Intelligence (GPAI)organization
pause-aiPause AIorganization
geoffrey-hintonGeoffrey Hintonperson
stuart-russellStuart Russellperson
yoshua-bengioYoshua Bengioperson
evaluationAI Evaluationapproach
mit-ai-risk-repositoryMIT AI Risk Repositoryproject
fraudAI-Powered Fraudrisk
__index__/knowledge-base/risksAI Risksconcept
misuse-overviewMisuse Risks (Overview)concept
near-term-risksKey Near-Term AI Risksrisk
Longterm Wiki