File size: 2,169 Bytes
c0de4ff
ab13803
c0de4ff
c0130f2
1e333df
c0130f2
 
 
 
 
 
 
 
3ebfb41
c0130f2
 
 
3ebfb41
 
ab13803
c0de4ff
c0130f2
 
 
 
c0de4ff
 
c0130f2
 
ab13803
 
c0de4ff
 
c0130f2
3ebfb41
c0130f2
 
 
8551c65
 
c0130f2
3ebfb41
c0130f2
 
3ebfb41
6f5cf75
 
c0130f2
 
 
 
ab13803
3ebfb41
c0de4ff
c0130f2
 
 
3ebfb41
ab13803
 
c0de4ff
c0130f2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
de1fdf4
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
from openai import AsyncOpenAI
import panel as pn
import httpx
import os

pn.extension()

# format this URL with query and number of rows
API_URL = "https://api.crossref.org/works"
# API_KEY = os.environ["ELSEVIER_API_KEY"]
# URL_FMT = "https://api.elsevier.com/content/search/scopus"
DEFAULT_PROMPT_TEMPLATE = """
Here are the papers related to {query}

Help me summarize these into bullet points, readable within 2 minutes.

{items}
"""


async def get_relevant_papers(query, rows):
    params = {
        "query.bibliographic": query,
        "rows": rows,
    }
    async with httpx.AsyncClient() as client:
        response = await client.get(API_URL, params=params)
    output = response.json()
    return output


async def process_inputs(contents, user, instance):
    output = await get_relevant_papers(contents, rows_input.value)
    instance.send(pn.pane.JSON(output), respond=False, user="Sources")

    items = []
    for item in output["message"]["items"]:
        abstract = item.get("abstract", "")
        title = item.get("title")
        url = item.get("URL")
        items.append(f"{title}({url}): {abstract}")

    prompt = prompt_template_input.value.format(
        query=contents, items=items
    )
    instance.send(f"This is the prompt I will use:\n{prompt}", respond=False)

    response = client.chat.completions.create(
        model="gpt-3.5-turbo",
        messages=[{"role": "user", "content": prompt}],
        stream=True,
    )
    message = ""
    async for chunk in await response:
        part = chunk.choices[0].delta.content
        if part is not None:
            message += part
            yield message


client = AsyncOpenAI()

# define widgets
prompt_template_input = pn.widgets.TextAreaInput(
    value=DEFAULT_PROMPT_TEMPLATE.strip(), height=500
)
rows_input = pn.widgets.IntInput(name="Number of rows", value=2)
chat_interface = pn.chat.ChatInterface(callback=process_inputs, callback_exception="verbose")

# layout
sidebar = pn.Column(prompt_template_input, rows_input)
main = pn.Column(chat_interface)
pn.template.FastListTemplate(
    sidebar=[sidebar],
    main=[main],
    title="Elsevier Summarizer",
).servable()