From 205a13027c9fcd7d0c4a1874d6bb0ae45922deee Mon Sep 17 00:00:00 2001
From: Yagil Burowski <yagil@elementlabs.ai>
Date: Sat, 6 Jan 2024 10:39:53 -0500
Subject: [PATCH] Update phi-2.json

---
 models/phi-2.json | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/models/phi-2.json b/models/phi-2.json
index 4ca5aa7..8ebb80a 100644
--- a/models/phi-2.json
+++ b/models/phi-2.json
@@ -2,7 +2,7 @@
     "_descriptorVersion": "0.0.1",  
     "datePublished": "2023-12-13T21:22:37",
     "name": "Phi 2",
-    "description": "Phi-2 is a 2.7 billion parameter Transformer model, an extension of Phi-1.5, with additional training data including synthetic NLP texts and curated web content. It demonstrates near state-of-the-art performance in benchmarks for common sense, language understanding, and logical reasoning within its parameter class. Phi-2 has not undergone reinforcement learning fine-tuning and is open-source, aimed at enabling safety research like toxicity reduction and bias understanding. It is designed for QA, chat, and code formats and has a context length of 2048 tokens. The model was trained on 250 billion tokens from a dataset combining AOAI GPT-3.5 synthetic data and filtered web data, using 1.4 trillion training tokens. It utilized 96xA100-80G GPUs over a span of 14 days. Phi-2 is intended solely for research use.",
+    "description": "Phi-2 is a 2.7 billion parameter Transformer model, an extension of Phi-1.5, with additional training data including synthetic NLP texts and curated web content. It demonstrates near state-of-the-art performance in benchmarks for common sense, language understanding, and logical reasoning within its parameter class. Phi-2 has not undergone reinforcement learning fine-tuning and is open-source, aimed at enabling safety research like toxicity reduction and bias understanding. It is designed for QA, chat, and code formats and has a context length of 2048 tokens. The model was trained on 250 billion tokens from a dataset combining AOAI GPT-3.5 synthetic data and filtered web data, using 1.4 trillion training tokens. It utilized 96xA100-80G GPUs over a span of 14 days. Phi-2 is released under the MIT license.",
     "author": {
       "name": "Microsoft Research",
       "url": "https://www.microsoft.com/en-us/research/",
@@ -56,4 +56,4 @@
         }
       ]
     }
-  }
\ No newline at end of file
+  }