Spaces:
Paused
Paused
using fastapi
Browse files- README.md +1 -1
- app.py +12 -12
- coreservice.py +48 -0
- js/extra.js +0 -0
- requirements.txt +2 -1
- start.py +3 -0
README.md
CHANGED
@@ -5,7 +5,7 @@ colorFrom: blue
|
|
5 |
colorTo: red
|
6 |
sdk: gradio
|
7 |
sdk_version: 3.50.2
|
8 |
-
app_file:
|
9 |
pinned: false
|
10 |
license: apache-2.0
|
11 |
---
|
|
|
5 |
colorTo: red
|
6 |
sdk: gradio
|
7 |
sdk_version: 3.50.2
|
8 |
+
app_file: start.py
|
9 |
pinned: false
|
10 |
license: apache-2.0
|
11 |
---
|
app.py
CHANGED
@@ -1,7 +1,5 @@
|
|
1 |
-
import io
|
2 |
import os
|
3 |
import re
|
4 |
-
import tarfile
|
5 |
|
6 |
from anthropic import AI_PROMPT, HUMAN_PROMPT, Anthropic
|
7 |
import gradio as gr
|
@@ -11,7 +9,12 @@ import arxiv
|
|
11 |
from arxiv_latex_extractor import get_paper_content
|
12 |
import requests
|
13 |
|
14 |
-
|
|
|
|
|
|
|
|
|
|
|
15 |
|
16 |
|
17 |
def replace_texttt(text):
|
@@ -72,10 +75,6 @@ class ContextualQA:
|
|
72 |
|
73 |
prompt = f"{HUMAN_PROMPT} {full_context} {HUMAN_PROMPT} {question} {AI_PROMPT}"
|
74 |
|
75 |
-
# save prompt on disk for examination
|
76 |
-
with open("prompt.txt", "w") as f:
|
77 |
-
f.write(prompt)
|
78 |
-
|
79 |
response = self.client.completions.create(
|
80 |
prompt=prompt,
|
81 |
stop_sequences=[HUMAN_PROMPT],
|
@@ -121,9 +120,7 @@ def load_context(paper_id):
|
|
121 |
context = f"{LEADING_PROMPT}\n{latex_source}"
|
122 |
qa_model.load_text(context)
|
123 |
|
124 |
-
# Usage
|
125 |
title, abstract = get_paper_info(paper_id)
|
126 |
-
# remove special symbols from title and abstract
|
127 |
title = replace_texttt(title)
|
128 |
abstract = replace_texttt(abstract)
|
129 |
|
@@ -193,7 +190,7 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
193 |
|
194 |
with gr.Column():
|
195 |
with gr.Row():
|
196 |
-
paper_id_input = gr.Textbox(label="Enter Paper ID", value="
|
197 |
btn_load = gr.Button("Load Paper")
|
198 |
qa_model = gr.State()
|
199 |
|
@@ -213,7 +210,7 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
213 |
gr.Markdown(
|
214 |
"## Acknowledgements\n"
|
215 |
"This project is made possible through the generous support of "
|
216 |
-
"[Anthropic](https://www.anthropic.com/), who provided free access to the `
|
217 |
)
|
218 |
|
219 |
btn_load.click(load_context, inputs=[paper_id_input], outputs=[qa_model, chatbot])
|
@@ -232,4 +229,7 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
232 |
|
233 |
btn_clear.click(clear_context, outputs=[chatbot])
|
234 |
|
235 |
-
demo.launch()
|
|
|
|
|
|
|
|
|
|
1 |
import os
|
2 |
import re
|
|
|
3 |
|
4 |
from anthropic import AI_PROMPT, HUMAN_PROMPT, Anthropic
|
5 |
import gradio as gr
|
|
|
9 |
from arxiv_latex_extractor import get_paper_content
|
10 |
import requests
|
11 |
|
12 |
+
from coreservice import app
|
13 |
+
from fastapi.staticfiles import StaticFiles
|
14 |
+
|
15 |
+
|
16 |
+
LEADING_PROMPT = "Read the following paper:"
|
17 |
+
|
18 |
|
19 |
|
20 |
def replace_texttt(text):
|
|
|
75 |
|
76 |
prompt = f"{HUMAN_PROMPT} {full_context} {HUMAN_PROMPT} {question} {AI_PROMPT}"
|
77 |
|
|
|
|
|
|
|
|
|
78 |
response = self.client.completions.create(
|
79 |
prompt=prompt,
|
80 |
stop_sequences=[HUMAN_PROMPT],
|
|
|
120 |
context = f"{LEADING_PROMPT}\n{latex_source}"
|
121 |
qa_model.load_text(context)
|
122 |
|
|
|
123 |
title, abstract = get_paper_info(paper_id)
|
|
|
124 |
title = replace_texttt(title)
|
125 |
abstract = replace_texttt(abstract)
|
126 |
|
|
|
190 |
|
191 |
with gr.Column():
|
192 |
with gr.Row():
|
193 |
+
paper_id_input = gr.Textbox(label="Enter Paper ID", value="2310.12103")
|
194 |
btn_load = gr.Button("Load Paper")
|
195 |
qa_model = gr.State()
|
196 |
|
|
|
210 |
gr.Markdown(
|
211 |
"## Acknowledgements\n"
|
212 |
"This project is made possible through the generous support of "
|
213 |
+
"[Anthropic](https://www.anthropic.com/), who provided free access to the `claude-2.0` API."
|
214 |
)
|
215 |
|
216 |
btn_load.click(load_context, inputs=[paper_id_input], outputs=[qa_model, chatbot])
|
|
|
229 |
|
230 |
btn_clear.click(clear_context, outputs=[chatbot])
|
231 |
|
232 |
+
# demo.launch()
|
233 |
+
|
234 |
+
app.mount("/js", StaticFiles(directory="js"), name="js")
|
235 |
+
gr.mount_gradio_app(app, demo, path="/")
|
coreservice.py
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI, Request, Response
|
2 |
+
|
3 |
+
filenames = ["js/extra.js"]
|
4 |
+
contents = "\n".join(
|
5 |
+
[f"<script type='text/javascript' src='{x}'></script>" for x in filenames]
|
6 |
+
)
|
7 |
+
|
8 |
+
ga_script = """
|
9 |
+
<!-- Google tag (gtag.js) -->
|
10 |
+
<script async src="https://www.googletagmanager.com/gtag/js?id=G-EZ77X5T529"></script>
|
11 |
+
<script>
|
12 |
+
window.dataLayer = window.dataLayer || [];
|
13 |
+
function gtag(){dataLayer.push(arguments);}
|
14 |
+
gtag('js', new Date());
|
15 |
+
|
16 |
+
gtag('config', 'G-EZ77X5T529');
|
17 |
+
</script>
|
18 |
+
"""
|
19 |
+
|
20 |
+
app = FastAPI()
|
21 |
+
|
22 |
+
|
23 |
+
@app.middleware("http")
|
24 |
+
async def insert_js(request: Request, call_next):
|
25 |
+
path = request.scope["path"] # get the request route
|
26 |
+
response = await call_next(request)
|
27 |
+
|
28 |
+
if path == "/":
|
29 |
+
response_body = ""
|
30 |
+
async for chunk in response.body_iterator:
|
31 |
+
response_body += chunk.decode()
|
32 |
+
|
33 |
+
charset_tag = '<meta charset="utf-8" />'
|
34 |
+
if charset_tag in response_body:
|
35 |
+
response_body = response_body.replace(charset_tag, charset_tag + ga_script)
|
36 |
+
|
37 |
+
response_body = response_body.replace("</body>", contents + "</body>")
|
38 |
+
|
39 |
+
del response.headers["content-length"]
|
40 |
+
|
41 |
+
return Response(
|
42 |
+
content=response_body,
|
43 |
+
status_code=response.status_code,
|
44 |
+
headers=dict(response.headers),
|
45 |
+
media_type=response.media_type,
|
46 |
+
)
|
47 |
+
|
48 |
+
return response
|
js/extra.js
ADDED
File without changes
|
requirements.txt
CHANGED
@@ -7,4 +7,5 @@ tqdm
|
|
7 |
numpy
|
8 |
arxiv
|
9 |
tiktoken
|
10 |
-
git+https://github.com/taesiri/arxiv_latex_extractor
|
|
|
|
7 |
numpy
|
8 |
arxiv
|
9 |
tiktoken
|
10 |
+
git+https://github.com/taesiri/arxiv_latex_extractor
|
11 |
+
fastapi
|
start.py
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
import subprocess
|
2 |
+
|
3 |
+
subprocess.run("uvicorn app:app --host 0.0.0.0 --port 7860", shell=True)
|