ClaudiaIoana550 commited on
Commit
c7e73af
1 Parent(s): ed0f798

Upload FalconForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +10 -8
  2. generation_config.json +3 -0
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "alibi": false,
3
  "apply_residual_connection_post_layernorm": false,
4
  "architectures": [
@@ -6,12 +7,12 @@
6
  ],
7
  "attention_dropout": 0.0,
8
  "auto_map": {
9
- "AutoConfig": "configuration_falcon.FalconConfig",
10
- "AutoModel": "modeling_falcon.FalconModel",
11
- "AutoModelForSequenceClassification": "modeling_falcon.FalconForSequenceClassification",
12
- "AutoModelForTokenClassification": "modeling_falcon.FalconForTokenClassification",
13
- "AutoModelForQuestionAnswering": "modeling_falcon.FalconForQuestionAnswering",
14
- "AutoModelForCausalLM": "modeling_falcon.FalconForCausalLM"
15
  },
16
  "bias": false,
17
  "bos_token_id": 11,
@@ -25,9 +26,10 @@
25
  "new_decoder_architecture": false,
26
  "num_attention_heads": 71,
27
  "num_hidden_layers": 32,
 
28
  "parallel_attn": true,
29
- "torch_dtype": "bfloat16",
30
- "transformers_version": "4.27.4",
31
  "use_cache": true,
32
  "vocab_size": 65024
33
  }
 
1
  {
2
+ "_name_or_path": "tiiuae/falcon-7b",
3
  "alibi": false,
4
  "apply_residual_connection_post_layernorm": false,
5
  "architectures": [
 
7
  ],
8
  "attention_dropout": 0.0,
9
  "auto_map": {
10
+ "AutoConfig": "tiiuae/falcon-7b--configuration_falcon.FalconConfig",
11
+ "AutoModel": "tiiuae/falcon-7b--modeling_falcon.FalconModel",
12
+ "AutoModelForCausalLM": "tiiuae/falcon-7b--modeling_falcon.FalconForCausalLM",
13
+ "AutoModelForQuestionAnswering": "tiiuae/falcon-7b--modeling_falcon.FalconForQuestionAnswering",
14
+ "AutoModelForSequenceClassification": "tiiuae/falcon-7b--modeling_falcon.FalconForSequenceClassification",
15
+ "AutoModelForTokenClassification": "tiiuae/falcon-7b--modeling_falcon.FalconForTokenClassification"
16
  },
17
  "bias": false,
18
  "bos_token_id": 11,
 
26
  "new_decoder_architecture": false,
27
  "num_attention_heads": 71,
28
  "num_hidden_layers": 32,
29
+ "num_kv_heads": 71,
30
  "parallel_attn": true,
31
+ "torch_dtype": "float16",
32
+ "transformers_version": "4.30.0",
33
  "use_cache": true,
34
  "vocab_size": 65024
35
  }
generation_config.json CHANGED
@@ -2,5 +2,8 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 11,
4
  "eos_token_id": 11,
 
 
 
5
  "transformers_version": "4.30.0"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 11,
4
  "eos_token_id": 11,
5
+ "max_new_tokens": 1700,
6
+ "pad_token_id": 11,
7
+ "temperature": 0,
8
  "transformers_version": "4.30.0"
9
  }