tommymarto commited on
Commit
5d872c9
β€’
1 Parent(s): d4bde9f

first attempt to hf spaces

Browse files
Files changed (3) hide show
  1. README.md +1 -1
  2. gradio.py +17 -0
  3. src/demo.py +22 -0
README.md CHANGED
@@ -5,7 +5,7 @@ colorFrom: indigo
5
  colorTo: gray
6
  sdk: gradio
7
  sdk_version: 3.45.2
8
- app_file: app.py
9
  pinned: false
10
  ---
11
 
 
5
  colorTo: gray
6
  sdk: gradio
7
  sdk_version: 3.45.2
8
+ app_file: gradio.py
9
  pinned: false
10
  ---
11
 
gradio.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from hydra import compose, initialize
3
+ from omegaconf import OmegaConf
4
+
5
+ from .src.demo import App
6
+
7
+ def main():
8
+ with initialize(version_base=None, config_path="conf", job_name="test_app"):
9
+ cfg = compose(config_name="config", overrides=["db=mysql", "db.user=me"])
10
+
11
+ app = App(cfg)
12
+
13
+ webapp = gr.ChatInterface(fn=app.ask_chat, examples=["hello", "hola", "merhaba"], title="LLM4SciLit")
14
+ webapp.launch(share=True)
15
+
16
+ if __name__ == "__main__":
17
+ main()
src/demo.py CHANGED
@@ -94,6 +94,28 @@ class App:
94
  self.qa_model.initialize()
95
  print("Ready to answer your questions πŸ”₯πŸ”₯\n")
96
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
97
  def run_interactive(self) -> None:
98
  self._bootstrap()
99
  cli = CLIApp(self, self.cfg)
 
94
  self.qa_model.initialize()
95
  print("Ready to answer your questions πŸ”₯πŸ”₯\n")
96
 
97
+
98
+ ##################################################################################################
99
+ # App functionalities
100
+
101
+ def ask_paper(self, line):
102
+ """Ask a question about a paper."""
103
+ paper, line = shlex.split(line)
104
+ filter_dict = {"paper_title": paper}
105
+ print(f"\nLLM4SciLit: {self.qa_model.answer_question(line, filter_dict)['result']}\n")
106
+
107
+ def ask(self, line):
108
+ # print(f"\nLLM4SciLit: a bunch of nonsense\n")
109
+ print(f"\nLLM4SciLit: {self.qa_model.answer_question(line, {})['result']}\n")
110
+
111
+ def ask_chat(self, line, history):
112
+ # print(f"\nLLM4SciLit: a bunch of nonsense\n")
113
+ return self.qa_model.answer_question(line, {})['result']
114
+
115
+
116
+ ##################################################################################################
117
+ # App modes
118
+
119
  def run_interactive(self) -> None:
120
  self._bootstrap()
121
  cli = CLIApp(self, self.cfg)