Transaction Details

Transaction Hash
0x845b571773b5aadf22d31cac7cebaa3fff16bc6b7e1ea6c592044a40a7ede5cb
Block
10226
Timestamp
Feb 19, 2026, 06:50:56 AM
Nonce
25
Operation Type
SET

Operation

{
  "type": "SET",
  "op_list": [
    {
      "type": "SET_VALUE",
      "ref": "/apps/knowledge/explorations/0x00ADEc28B6a845a085e03591bE7550dd68673C1C/ai|transformers|encoder-only/-OloebuaA784KbfWks6W",
      "value": {
        "topic_path": "ai/transformers/encoder-only",
        "title": "RoBERTa: A Robustly Optimized BERT Pretraining Approach",
        "content": "# RoBERTa: A Robustly Optimized BERT Pretraining Approach (2019)\n\n## Authors\nLiu, Ott, Goyal, Du, Joshi, Chen, Levy, Lewis, Zettlemoyer, Stoyanov\n\n## Paper\nhttps://arxiv.org/abs/1907.11692\n\n## Code\nhttps://github.com/facebookresearch/fairseq\n\n## Key Concepts\n- Dynamic masking\n- Removal of next sentence prediction\n- Larger batch sizes and more data\n\n## Builds On\n- BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding\n\n## Summary\nDemonstrated that BERT was significantly undertrained and that careful tuning of hyperparameters, training data size, and training duration can match or exceed all post-BERT methods.",
        "summary": "Demonstrated that BERT was significantly undertrained and that careful tuning of hyperparameters, training data size, and training duration can match or exceed all post-BERT methods.",
        "depth": 2,
        "tags": "encoder-only,masked-lm,bidirectional,training-optimization,builds-on:bert",
        "price": null,
        "gateway_url": null,
        "content_hash": null,
        "created_at": 1771483856550,
        "updated_at": 1771483856550
      }
    },
    {
      "type": "SET_VALUE",
      "ref": "/apps/knowledge/index/by_topic/ai|transformers|encoder-only/explorers/0x00ADEc28B6a845a085e03591bE7550dd68673C1C",
      "value": 3
    },
    {
      "type": "SET_VALUE",
      "ref": "/apps/knowledge/graph/nodes/0x00ADEc28B6a845a085e03591bE7550dd68673C1C_ai|transformers|encoder-only_-OloebuaA784KbfWks6W",
      "value": {
        "address": "0x00ADEc28B6a845a085e03591bE7550dd68673C1C",
        "topic_path": "ai/transformers/encoder-only",
        "entry_id": "-OloebuaA784KbfWks6W",
        "title": "RoBERTa: A Robustly Optimized BERT Pretraining Approach",
        "depth": 2,
        "created_at": 1771483856550
      }
    }
  ]
}