aixsatoshi commited on
Commit
667e093
1 Parent(s): 0f9b99f

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +0 -33
README.md CHANGED
@@ -53,40 +53,7 @@ XML likeなinstructionテンプレートを採用しました
53
 
54
  ```
55
 
56
- Example
57
- ```
58
- from transformers import AutoModelForCausalLM, AutoTokenizer
59
-
60
- # Initialize the model and tokenizer
61
- device = "cuda" # the device to load the model onto
62
- model_name = "aixsatoshi/Honyaku-Multi-Translator-Swallow-ms7b"
63
- model = AutoModelForCausalLM.from_pretrained(model_name).to(device)
64
- tokenizer = AutoTokenizer.from_pretrained(model_name)
65
-
66
- # Move model to the appropriate device and set to bf16 precision
67
- model.to(device).to(dtype=torch.bfloat16)
68
-
69
- # Define the English prompt
70
- english_prompt = "What is your favourite condiment?"
71
-
72
- # Prepare the prompt for English to Japanese translation
73
- english_to_japanese_prompt = f"""
74
- <english>: {english_prompt} </english>
75
 
76
- <japanese>:
77
- """
78
-
79
- # Encode the prompt
80
- encoded_prompt = tokenizer(english_to_japanese_prompt, return_tensors="pt", padding=True).to(device)
81
-
82
- # Generate a response
83
- generated_ids = model.generate(**encoded_prompt, max_length=4096, do_sample=True)
84
-
85
- # Decode and print the response
86
- decoded_translation = tokenizer.decode(generated_ids[0], skip_special_tokens=True)
87
- print("English to Japanese Translation:")
88
- print(decoded_translation)
89
- ```
90
 
91
  ### Multi Language
92
 
 
53
 
54
  ```
55
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
 
58
  ### Multi Language
59