Fix code
Browse files
README.md
CHANGED
@@ -95,6 +95,7 @@ python3 zipnn_decompress_path.py --path .
|
|
95 |
|
96 |
Now just run the local version of the model.
|
97 |
# Use a pipeline as a high-level helper
|
|
|
98 |
from transformers import pipeline
|
99 |
|
100 |
messages = [
|
@@ -102,12 +103,15 @@ messages = [
|
|
102 |
]
|
103 |
pipe = pipeline("text-generation", model="PATH_TO_MODEL") # "." if in directory
|
104 |
pipe(messages)
|
|
|
105 |
|
106 |
# Load model directly
|
|
|
107 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
108 |
|
109 |
tokenizer = AutoTokenizer.from_pretrained("PATH_TO_MODEL") # "." if in directory
|
110 |
model = AutoModelForCausalLM.from_pretrained("PATH_TO_MODEL") # "." if in directory
|
|
|
111 |
|
112 |
## Prompt Template
|
113 |
|
|
|
95 |
|
96 |
Now just run the local version of the model.
|
97 |
# Use a pipeline as a high-level helper
|
98 |
+
```python
|
99 |
from transformers import pipeline
|
100 |
|
101 |
messages = [
|
|
|
103 |
]
|
104 |
pipe = pipeline("text-generation", model="PATH_TO_MODEL") # "." if in directory
|
105 |
pipe(messages)
|
106 |
+
```
|
107 |
|
108 |
# Load model directly
|
109 |
+
```python
|
110 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
111 |
|
112 |
tokenizer = AutoTokenizer.from_pretrained("PATH_TO_MODEL") # "." if in directory
|
113 |
model = AutoModelForCausalLM.from_pretrained("PATH_TO_MODEL") # "." if in directory
|
114 |
+
```
|
115 |
|
116 |
## Prompt Template
|
117 |
|