from openai import AsyncOpenAI import panel as pn import httpx import os pn.extension() # format this URL with query and number of rows API_URL = "https://api.crossref.org/works" # API_KEY = os.environ["ELSEVIER_API_KEY"] # URL_FMT = "https://api.elsevier.com/content/search/scopus" DEFAULT_PROMPT_TEMPLATE = """ Here are the papers related to {query} Help me summarize these into bullet points, readable within 2 minutes. {items} """ async def get_relevant_papers(query, rows): params = { "query.bibliographic": query, "rows": rows, } async with httpx.AsyncClient() as client: response = await client.get(API_URL, params=params) output = response.json() return output async def process_inputs(contents, user, instance): output = await get_relevant_papers(contents, rows_input.value) instance.send(pn.pane.JSON(output), respond=False, user="Sources") items = [] for item in output["message"]["items"]: abstract = item.get("abstract", "") title = item.get("title") url = item.get("URL") items.append(f"{title}({url}): {abstract}") prompt = prompt_template_input.value.format( query=contents, items=items ) instance.send(f"This is the prompt I will use:\n{prompt}", respond=False) response = client.chat.completions.create( model="gpt-3.5-turbo", messages=[{"role": "user", "content": prompt}], stream=True, ) message = "" async for chunk in await response: part = chunk.choices[0].delta.content if part is not None: message += part yield message client = AsyncOpenAI() # define widgets prompt_template_input = pn.widgets.TextAreaInput( value=DEFAULT_PROMPT_TEMPLATE.strip(), height=500 ) rows_input = pn.widgets.IntInput(name="Number of rows", value=2) chat_interface = pn.chat.ChatInterface(callback=process_inputs, callback_exception="verbose") # layout sidebar = pn.Column(prompt_template_input, rows_input) main = pn.Column(chat_interface) pn.template.FastListTemplate( sidebar=[sidebar], main=[main], title="Elsevier Summarizer", ).servable()