ZahrizhalAli commited on
Commit
b61f4b5
1 Parent(s): c11a5e0

Delete config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -36
config.json DELETED
@@ -1,36 +0,0 @@
1
- // 20230920102720
2
- // https://huggingface.co/tiiuae/falcon-7b/raw/main/config.json
3
-
4
- {
5
- "_name_or_path": "tiiuae/falcon-7b",
6
- "alibi": false,
7
- "apply_residual_connection_post_layernorm": false,
8
- "architectures": [
9
- "RWForCausalLM"
10
- ],
11
- "attention_dropout": 0.0,
12
- "auto_map": {
13
- "AutoConfig": "configuration_RW.RWConfig",
14
- "AutoModel": "modelling_RW.RWModel",
15
- "AutoModelForSequenceClassification": "modelling_RW.RWForSequenceClassification",
16
- "AutoModelForTokenClassification": "modelling_RW.RWForTokenClassification",
17
- "AutoModelForQuestionAnswering": "modelling_RW.RWForQuestionAnswering",
18
- "AutoModelForCausalLM": "modelling_RW.RWForCausalLM"
19
- },
20
- "bias": false,
21
- "bos_token_id": 11,
22
- "eos_token_id": 11,
23
- "hidden_dropout": 0.0,
24
- "hidden_size": 4544,
25
- "initializer_range": 0.02,
26
- "layer_norm_epsilon": 0.00001,
27
- "model_type": "RefinedWebModel",
28
- "multi_query": true,
29
- "n_head": 71,
30
- "n_layer": 32,
31
- "parallel_attn": true,
32
- "torch_dtype": "bfloat16",
33
- "transformers_version": "4.27.4",
34
- "use_cache": true,
35
- "vocab_size": 65024
36
- }