|
--- |
|
pipeline_tag: text-generation |
|
inference: true |
|
widget: |
|
- text: 'Hello!' |
|
example_title: Hello world |
|
group: Python |
|
library_name: transformers |
|
--- |
|
|
|
This model is randomly initialized, using the config from [state-spaces/mamba-2.8b-hf](https://huggingface.co/state-spaces/mamba-2.8b-hf/blob/main/config.json) but with smaller size. |
|
|
|
Codes: |
|
```python |
|
import os |
|
|
|
import torch |
|
|
|
import transformers |
|
from huggingface_hub import create_repo, upload_folder |
|
|
|
source_model_id = 'state-spaces/mamba-2.8b-hf' |
|
tiny_random_name = 'mamba-tiny-random' |
|
save_path = f'/tmp/yujiepan/{tiny_random_name}' |
|
repo_id = f'yujiepan/{tiny_random_name}' |
|
|
|
config = transformers.AutoConfig.from_pretrained( |
|
source_model_id, trust_remote_code=True) |
|
config.hidden_size = 8 |
|
config.expand = 4 |
|
config.intermediate_size = 32 |
|
config.state_size = 8 |
|
config.num_hidden_layers = 2 |
|
config.n_layer = 2 |
|
config.torch_dtype = torch.bfloat16 |
|
|
|
model = transformers.AutoModelForCausalLM.from_config( |
|
config, torch_dtype=torch.bfloat16, |
|
trust_remote_code=True, |
|
) |
|
model.generation_config = transformers.GenerationConfig.from_pretrained( |
|
source_model_id, |
|
trust_remote_code=True, |
|
) |
|
|
|
transformers.set_seed(42) |
|
with torch.no_grad(): |
|
for name, p in sorted(model.named_parameters()): |
|
print(name, p.shape) |
|
torch.nn.init.uniform_(p, -0.5, 0.5) |
|
|
|
model.save_pretrained(save_path) |
|
tokenizer = transformers.AutoTokenizer.from_pretrained( |
|
source_model_id, trust_remote_code=True) |
|
|
|
result = transformers.pipelines.pipeline( |
|
'text-generation', |
|
model=model, tokenizer=tokenizer, |
|
device='cuda', |
|
max_new_tokens=16, |
|
)('Hello') |
|
print(result) |
|
|
|
model.save_pretrained(save_path) |
|
tokenizer.save_pretrained(save_path) |
|
|
|
os.system(f'ls -alh {save_path}') |
|
create_repo(repo_id, exist_ok=True) |
|
upload_folder(repo_id=repo_id, folder_path=save_path) |
|
``` |
|
|