zetavg commited on
Commit
da8868f
1 Parent(s): 82f1bf5

add prompter from alpaca-lora

Browse files
llama_lora/utils/__init__.py ADDED
File without changes
llama_lora/utils/prompter.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ A dedicated helper to manage templates and prompt building.
3
+ From https://github.com/tloen/alpaca-lora/blob/main/utils/prompter.py
4
+ """
5
+
6
+ import json
7
+ import os.path as osp
8
+ from typing import Union
9
+
10
+
11
+ class Prompter(object):
12
+ __slots__ = ("template", "_verbose")
13
+
14
+ def __init__(self, template_name: str = "", verbose: bool = False):
15
+ self._verbose = verbose
16
+ if not template_name:
17
+ # Enforce the default here, so the constructor can be called with '' and will not break.
18
+ template_name = "alpaca"
19
+ file_name = osp.join("templates", f"{template_name}.json")
20
+ if not osp.exists(file_name):
21
+ raise ValueError(f"Can't read {file_name}")
22
+ with open(file_name) as fp:
23
+ self.template = json.load(fp)
24
+ if self._verbose:
25
+ print(
26
+ f"Using prompt template {template_name}: {self.template['description']}"
27
+ )
28
+
29
+ def generate_prompt(
30
+ self,
31
+ instruction: str,
32
+ input: Union[None, str] = None,
33
+ label: Union[None, str] = None,
34
+ ) -> str:
35
+ # returns the full prompt from instruction and optional input
36
+ # if a label (=response, =output) is provided, it's also appended.
37
+ if input:
38
+ res = self.template["prompt_input"].format(
39
+ instruction=instruction, input=input
40
+ )
41
+ else:
42
+ res = self.template["prompt_no_input"].format(
43
+ instruction=instruction
44
+ )
45
+ if label:
46
+ res = f"{res}{label}"
47
+ if self._verbose:
48
+ print(res)
49
+ return res
50
+
51
+ def get_response(self, output: str) -> str:
52
+ return output.split(self.template["response_split"])[1].strip()