Upload folder using huggingface_hub (#1)
Browse files- 0aa1f61794e17ca7db621f1ba7c655831095f45a3b55925bda1502e15b60018a (694af08185a40480005921d97f1432a20facf1e6)
- 1ef769e0b44030f477cb5fbb94b350386d1f7a274da0b32dc31d001a7645b2fe (f3178ce982e2b1a5964df92b07b6964426245be8)
- 9acc0cf36c92a6428ef7929a1ddcf5eb9c40113224c168698b4afecbbd802f6c (3910f28e13002749eed1efcc6065a2cfe88b9e15)
- e4ed91ff183518dfb14f974785571a662d7451fc1fc1e5dc16291e5addda3f49 (99e1545b2033ff90f9dde33d172f28f120cfe417)
- 6d392797d3072ac873e70c92fb31f32c6542be8fdf4ddd1e5ff216d61811f9b8 (bfac4340d3171bc10ff8ebb910d7016662389017)
- 278ec9dd57409cffee796d8ac394ea4417025a10d1e5acf820a1f333d7935bdd (efa56316bfd4c9bc1032a3d75fe1ca9e5a0d793b)
- ab0655e3701f394a74d8af80c7cf28d9c2c05040093ae54870a443a10e0c3736 (467d4abd83f87c97750f797031fa25c6582351be)
- ddeab67628dafc592422ced550b7e3036ca7d6886767a7175dd8e903d3b2eeec (e4f0685e3fc3091b9ed7359f9bb259506758beae)
- ad4ee3985be34ca84f36ee43cfe4cae968dd869968136ece1bbd020a6f12b023 (ce5b5448ed36767c980c87fb161101bd82d6b70f)
- cf5d32303f02921b0f03d7f2e15448921b22fe8e35ca97bb9aa30fdbea2f7852 (e961b67f67a7a5346011726f2d6b6fbb24f65261)
- ff3c22635cd083172db059f3578c888b36d810ddf79b8be8560051ae8798afd3 (3d3cf9886009f48afdad466e90466450179e33c1)
- 2297707eeaaa88f8ff6cb77b71293c77ee308bd92593928fd6dbcb9ba2685fcb (89475aa7f7a4d78b45b019214ec133af207b1c01)
- 0fd31733e8773803b25f217957fe955d0ea00aa2a13cd337169d4e0b11a4f241 (1e055224b983caf2bb353d2e7f089903ca0e1fcf)
- 9c99a2a98de6ee66cb0192746a2218947fc28aa334945749b0d35826f644b2ee (d9138923c16d4ab35978f06ee474a3bf739b37c9)
- 73d8eff6a7ca460f8c8bc296da8bcda371e818ebd1f2abde28a030ddba3876cb (18f191a8bbfc13766f9830316a35b391a91c17e3)
- a1d9931bf29043ec9dd22125fe7abac827ec9b53a5d1c21e9880f823d3b4ae13 (634c695d66ca24d190cecbd4faec5e665be0cd9d)
- README.md +35 -0
- added_tokens.json +5 -0
- config.json +30 -0
- merges.txt +0 -0
- model-00001-of-00015.safetensors +3 -0
- model-00002-of-00015.safetensors +3 -0
- model-00003-of-00015.safetensors +3 -0
- model-00004-of-00015.safetensors +3 -0
- model-00005-of-00015.safetensors +3 -0
- model-00006-of-00015.safetensors +3 -0
- model-00007-of-00015.safetensors +3 -0
- model-00008-of-00015.safetensors +3 -0
- model-00009-of-00015.safetensors +3 -0
- model-00010-of-00015.safetensors +3 -0
- model-00011-of-00015.safetensors +3 -0
- model-00012-of-00015.safetensors +3 -0
- model-00013-of-00015.safetensors +3 -0
- model-00014-of-00015.safetensors +3 -0
- model-00015-of-00015.safetensors +3 -0
- model.safetensors.index.json +0 -0
- special_tokens_map.json +20 -0
- tokenizer.json +0 -0
- tokenizer_config.json +43 -0
- vocab.json +0 -0
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
license: other
|
3 |
+
tags:
|
4 |
+
- generated_from_trainer
|
5 |
+
- axolotl
|
6 |
+
- mlx
|
7 |
+
base_model: Qwen/Qwen2-72B
|
8 |
+
datasets:
|
9 |
+
- cognitivecomputations/Dolphin-2.9
|
10 |
+
- teknium/OpenHermes-2.5
|
11 |
+
- m-a-p/CodeFeedback-Filtered-Instruction
|
12 |
+
- cognitivecomputations/dolphin-coder
|
13 |
+
- cognitivecomputations/samantha-data
|
14 |
+
- microsoft/orca-math-word-problems-200k
|
15 |
+
- Locutusque/function-calling-chatml
|
16 |
+
- internlm/Agent-FLAN
|
17 |
+
license_name: tongyi-qianwen
|
18 |
+
license_link: https://huggingface.co/Qwen/Qwen1.5-110B/blob/main/LICENSE
|
19 |
+
---
|
20 |
+
|
21 |
+
# mlx-community/dolphin-2.9.2-qwen2-72b-8bit
|
22 |
+
This model was converted to MLX format from [`cognitivecomputations/dolphin-2.9.2-qwen2-72b`]() using mlx-lm version **0.12.1**.
|
23 |
+
Refer to the [original model card](https://huggingface.co/cognitivecomputations/dolphin-2.9.2-qwen2-72b) for more details on the model.
|
24 |
+
## Use with mlx
|
25 |
+
|
26 |
+
```bash
|
27 |
+
pip install mlx-lm
|
28 |
+
```
|
29 |
+
|
30 |
+
```python
|
31 |
+
from mlx_lm import load, generate
|
32 |
+
|
33 |
+
model, tokenizer = load("mlx-community/dolphin-2.9.2-qwen2-72b-8bit")
|
34 |
+
response = generate(model, tokenizer, prompt="hello", verbose=True)
|
35 |
+
```
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"<|endoftext|>": 151643,
|
3 |
+
"<|im_end|>": 151645,
|
4 |
+
"<|im_start|>": 151644
|
5 |
+
}
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"Qwen2ForCausalLM"
|
4 |
+
],
|
5 |
+
"attention_dropout": 0.0,
|
6 |
+
"eos_token_id": 151645,
|
7 |
+
"hidden_act": "silu",
|
8 |
+
"hidden_size": 8192,
|
9 |
+
"initializer_range": 0.02,
|
10 |
+
"intermediate_size": 29568,
|
11 |
+
"max_position_embeddings": 131072,
|
12 |
+
"max_window_layers": 28,
|
13 |
+
"model_type": "qwen2",
|
14 |
+
"num_attention_heads": 64,
|
15 |
+
"num_hidden_layers": 80,
|
16 |
+
"num_key_value_heads": 8,
|
17 |
+
"quantization": {
|
18 |
+
"group_size": 64,
|
19 |
+
"bits": "8"
|
20 |
+
},
|
21 |
+
"rms_norm_eps": 1e-05,
|
22 |
+
"rope_theta": 1000000.0,
|
23 |
+
"sliding_window": 4096,
|
24 |
+
"tie_word_embeddings": false,
|
25 |
+
"torch_dtype": "bfloat16",
|
26 |
+
"transformers_version": "4.40.2",
|
27 |
+
"use_cache": false,
|
28 |
+
"use_sliding_window": false,
|
29 |
+
"vocab_size": 152064
|
30 |
+
}
|
The diff for this file is too large to render.
See raw diff
|
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:542de67b0e0f42cafe2f8fb035bcebcaf8b810b9091c7536cd4b36f06c58d4a5
|
3 |
+
size 5214291418
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:91444a9cde8a28becbc5409be3f766c440978f33139332453779e733798a38e3
|
3 |
+
size 5177560124
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3e19cb145f5c61b6c4ea27d7cbae9a1e417dda8554ffd87995a995aca3126ab6
|
3 |
+
size 5338047575
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dbb3fc7f8e246eb676da108ee1537e52bcc7b943e251ba97e9111e8c25acc0f9
|
3 |
+
size 5338047581
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5c6a64600a747482c29575bea6242e59ca35946edba4effba01c33d798bbab91
|
3 |
+
size 5177560270
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fe083395124b131b15626cf1fd4e58af78ca7a813d2539341cdcc284159b1305
|
3 |
+
size 5338047562
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:531d8db3c9b91d06b7bc0c61c41a90eebd8e4e4b2adccb9be1509e865d364481
|
3 |
+
size 5338047557
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b14f9de3590f03dff19828a35d5b892f1a29382f880257b86f228730c281a9a0
|
3 |
+
size 5177560278
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e1143823c5dcdfa3417c6f746fba6f9067c34e9386e09096fe7f6642a1a7f5ea
|
3 |
+
size 5338047594
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5b863f5037027ad89b5ac4795912c0aaeca7fec2c2be9f01c01bb129ab6b94f2
|
3 |
+
size 5338047549
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ea650a9a1117ec874cf05a2d8ef6674a363dcb2a8e0802963e905ce9cfeb6b4d
|
3 |
+
size 5177560280
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f392f31dffe7416664d68730ba0af17ef7ea6e5083b8f3e2cadb3925944be954
|
3 |
+
size 5338047560
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a36f0deeba0b0451cc5fe8aa5431354fb78bbb4434f7540a62bf801edacd24c7
|
3 |
+
size 5338047535
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d84e6f015be9c125d88a2758a1337c7764bb7e9fb70df4c2c251755c5cf0246f
|
3 |
+
size 5177560290
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9bfb652ffccdfd68607f3c73579a32eb9e217ac2ebf72a00497ffec04e445af3
|
3 |
+
size 3446110912
|
The diff for this file is too large to render.
See raw diff
|
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"additional_special_tokens": [
|
3 |
+
"<|im_start|>",
|
4 |
+
"<|im_end|>"
|
5 |
+
],
|
6 |
+
"eos_token": {
|
7 |
+
"content": "<|im_end|>",
|
8 |
+
"lstrip": false,
|
9 |
+
"normalized": false,
|
10 |
+
"rstrip": false,
|
11 |
+
"single_word": false
|
12 |
+
},
|
13 |
+
"pad_token": {
|
14 |
+
"content": "<|endoftext|>",
|
15 |
+
"lstrip": false,
|
16 |
+
"normalized": false,
|
17 |
+
"rstrip": false,
|
18 |
+
"single_word": false
|
19 |
+
}
|
20 |
+
}
|
The diff for this file is too large to render.
See raw diff
|
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_prefix_space": false,
|
3 |
+
"added_tokens_decoder": {
|
4 |
+
"151643": {
|
5 |
+
"content": "<|endoftext|>",
|
6 |
+
"lstrip": false,
|
7 |
+
"normalized": false,
|
8 |
+
"rstrip": false,
|
9 |
+
"single_word": false,
|
10 |
+
"special": true
|
11 |
+
},
|
12 |
+
"151644": {
|
13 |
+
"content": "<|im_start|>",
|
14 |
+
"lstrip": false,
|
15 |
+
"normalized": false,
|
16 |
+
"rstrip": false,
|
17 |
+
"single_word": false,
|
18 |
+
"special": true
|
19 |
+
},
|
20 |
+
"151645": {
|
21 |
+
"content": "<|im_end|>",
|
22 |
+
"lstrip": false,
|
23 |
+
"normalized": false,
|
24 |
+
"rstrip": false,
|
25 |
+
"single_word": false,
|
26 |
+
"special": true
|
27 |
+
}
|
28 |
+
},
|
29 |
+
"additional_special_tokens": [
|
30 |
+
"<|im_start|>",
|
31 |
+
"<|im_end|>"
|
32 |
+
],
|
33 |
+
"bos_token": null,
|
34 |
+
"chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
|
35 |
+
"clean_up_tokenization_spaces": false,
|
36 |
+
"eos_token": "<|im_end|>",
|
37 |
+
"errors": "replace",
|
38 |
+
"model_max_length": 32768,
|
39 |
+
"pad_token": "<|endoftext|>",
|
40 |
+
"split_special_tokens": false,
|
41 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
42 |
+
"unk_token": null
|
43 |
+
}
|
The diff for this file is too large to render.
See raw diff
|
|