heikowagner commited on
Commit
6f8476a
1 Parent(s): ba5bdd4
app/VectorStore/chroma-collections.parquet CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9770f8b53664f3a358faee66aa23720c091943c176225f0bf2487bd1767d872a
3
- size 967
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d55d2a2454894e49ab1d0bd33e155ffbd38b57e9503bdae4107cdfdbd72b3fe2
3
+ size 1041
app/app.py CHANGED
@@ -45,10 +45,14 @@ else:
45
  st.write('You selected:', option['name'])
46
 
47
  chain = load_model.create_chain(llm, collection=option['name'], model_name=option['model_name'], metadata= option['metadata'])
48
- try:
49
- query = st.text_area('Ask a question:', 'Hallo how are you today?')
50
- result = chain({"query": query})
51
- ut.format_result_set(result)
52
- finally:
53
- del chain
54
- torch.cuda.empty_cache()
 
 
 
 
 
45
  st.write('You selected:', option['name'])
46
 
47
  chain = load_model.create_chain(llm, collection=option['name'], model_name=option['model_name'], metadata= option['metadata'])
48
+ query = st.text_area('Ask a question:', 'Hallo how are you today?')
49
+ result = chain({"query": query + " Add a Score of the propability that your answer is correct to your answer"})
50
+ ut.format_result_set(result)
51
+
52
+ #from langchain.chains import ConversationChain
53
+ #from langchain.memory import ConversationBufferMemory
54
+
55
+ #conversation = ConversationChain(
56
+ # llm=chat,
57
+ # memory=ConversationBufferMemory()
58
+ #)