Joetib commited on
Commit
9cab3c7
1 Parent(s): fcbc71d

Upload GPTNeoXForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +2 -2
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "EleutherAI/pythia-410M",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
@@ -21,7 +21,7 @@
21
  "rotary_emb_base": 10000,
22
  "rotary_pct": 0.25,
23
  "tie_word_embeddings": false,
24
- "torch_dtype": "float32",
25
  "transformers_version": "4.35.0",
26
  "use_cache": true,
27
  "use_parallel_residual": true,
 
1
  {
2
+ "_name_or_path": "pythia-finetuned-with-context-5-steps",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
 
21
  "rotary_emb_base": 10000,
22
  "rotary_pct": 0.25,
23
  "tie_word_embeddings": false,
24
+ "torch_dtype": "float16",
25
  "transformers_version": "4.35.0",
26
  "use_cache": true,
27
  "use_parallel_residual": true,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6839172e586593eb40794a99ad0a703d1c68f9364c14668f8e7760ef07291596
3
- size 1621370224
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2fb5c026ab921e6ae1452c546ab88a032f5a5369ef77efd06529e309315fae97
3
+ size 810701896