Add MOE (mixture of experts) tag
#5
by
davanstrien
HF staff
- opened
README.md
CHANGED
@@ -1,5 +1,7 @@
|
|
1 |
---
|
2 |
-
|
|
|
|
|
3 |
---
|
4 |
|
5 |
# Yi based MOE 2x34B with mixtral architecture
|
@@ -63,5 +65,4 @@ while len(prompt) > 0:
|
|
63 |
print(tokenizer.decode(generation_output[0]))
|
64 |
prompt = input("please input prompt:")
|
65 |
|
66 |
-
```
|
67 |
-
|
|
|
1 |
---
|
2 |
+
license: cc-by-nc-4.0
|
3 |
+
tags:
|
4 |
+
- moe
|
5 |
---
|
6 |
|
7 |
# Yi based MOE 2x34B with mixtral architecture
|
|
|
65 |
print(tokenizer.decode(generation_output[0]))
|
66 |
prompt = input("please input prompt:")
|
67 |
|
68 |
+
```
|
|