Transaction Details

Transaction Hash
0x0535b40da66323d1ad66b097efba5445d2b179ed8a80b44491010209968f80f0
Block
10156
Timestamp
Feb 19, 2026, 06:49:46 AM
Nonce
18
Operation Type
SET

Operation

{
  "type": "SET",
  "op_list": [
    {
      "type": "SET_VALUE",
      "ref": "/apps/knowledge/explorations/0x00ADEc28B6a845a085e03591bE7550dd68673C1C/ai|transformers|decoder-only/-OloeLkiQrn-mWd8fqeY",
      "value": {
        "topic_path": "ai/transformers/decoder-only",
        "title": "Transformer-XL: Attentive Language Models Beyond a Fixed-Length Context",
        "content": "# Transformer-XL: Attentive Language Models Beyond a Fixed-Length Context (2019)\n\n## Authors\nDai, Yang, Yang, Carbonell, Le, Salakhutdinov\n\n## Paper\nhttps://arxiv.org/abs/1901.02860\n\n## Code\nhttps://github.com/kimiyoung/transformer-xl\n\n## Key Concepts\n- Segment-level recurrence mechanism\n- Relative positional encodings\n- Longer-term dependency modeling\n\n## Builds On\n- Attention Is All You Need\n\n## Influenced\n- XLNet: Generalized Autoregressive Pretraining for Language Understanding\n\n## Summary\nExtended the Transformer with a segment-level recurrence mechanism and relative positional encodings, enabling learning dependencies beyond a fixed-length context without disrupting temporal coherence.",
        "summary": "Extended the Transformer with a segment-level recurrence mechanism and relative positional encodings, enabling learning dependencies beyond a fixed-length context without disrupting temporal coherence.",
        "depth": 2,
        "tags": "decoder-only,autoregressive,segment-recurrence,relative-positional-encoding,builds-on:transformer",
        "price": null,
        "gateway_url": null,
        "content_hash": null,
        "created_at": 1771483786286,
        "updated_at": 1771483786286
      }
    },
    {
      "type": "SET_VALUE",
      "ref": "/apps/knowledge/index/by_topic/ai|transformers|decoder-only/explorers/0x00ADEc28B6a845a085e03591bE7550dd68673C1C",
      "value": 2
    },
    {
      "type": "SET_VALUE",
      "ref": "/apps/knowledge/graph/nodes/0x00ADEc28B6a845a085e03591bE7550dd68673C1C_ai|transformers|decoder-only_-OloeLkiQrn-mWd8fqeY",
      "value": {
        "address": "0x00ADEc28B6a845a085e03591bE7550dd68673C1C",
        "topic_path": "ai/transformers/decoder-only",
        "entry_id": "-OloeLkiQrn-mWd8fqeY",
        "title": "Transformer-XL: Attentive Language Models Beyond a Fixed-Length Context",
        "depth": 2,
        "created_at": 1771483786286
      }
    }
  ]
}