Spaces:
Running
Running
Switch to tab view
Browse files
app.py
CHANGED
@@ -1,9 +1,13 @@
|
|
1 |
# Hint: this cheatsheet is magic! https://cheat-sheet.streamlit.app/
|
2 |
|
3 |
import constants
|
|
|
4 |
import pandas as pd
|
5 |
import streamlit as st
|
6 |
from transformers import BertForSequenceClassification, AutoTokenizer
|
|
|
|
|
|
|
7 |
|
8 |
|
9 |
@st.cache_data
|
@@ -13,16 +17,14 @@ def convert_df(df):
|
|
13 |
|
14 |
|
15 |
def compute_ALDi(inputs):
|
16 |
-
return 0
|
17 |
|
18 |
|
19 |
-
input_type = st.sidebar.radio(
|
20 |
-
"Select the input type:", [constants.CHOICE_FILE, constants.CHOICE_TEXT]
|
21 |
-
)
|
22 |
-
|
23 |
st.title(constants.TITLE)
|
24 |
|
25 |
-
|
|
|
|
|
26 |
sent = st.text_input("Arabic Sentence:", placeholder="Enter an Arabic sentence.")
|
27 |
|
28 |
# TODO: Check if this is needed!
|
@@ -32,37 +34,47 @@ if input_type == constants.CHOICE_TEXT:
|
|
32 |
ALDi_score = compute_ALDi(sent)
|
33 |
st.write(ALDi_score)
|
34 |
|
35 |
-
|
36 |
file = st.file_uploader("Upload a file", type=["txt"])
|
37 |
if file is not None:
|
38 |
df = pd.read_csv(file, sep="\t", header=None)
|
39 |
df.columns = ["Sentence"]
|
40 |
|
|
|
|
|
|
|
|
|
|
|
41 |
# TODO: Run the model
|
42 |
df["ALDi"] = df["Sentence"].apply(lambda s: compute_ALDi(s))
|
43 |
|
44 |
# A horizontal rule
|
45 |
st.markdown("""---""")
|
46 |
|
47 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
48 |
|
49 |
with col1:
|
|
|
|
|
|
|
|
|
|
|
|
|
50 |
# Add a download button
|
51 |
csv = convert_df(df)
|
52 |
-
|
53 |
st.download_button(
|
54 |
label=":file_folder: Download predictions as CSV",
|
55 |
data=csv,
|
56 |
file_name="ALDi_scores.csv",
|
57 |
mime="text/csv",
|
58 |
)
|
59 |
-
|
60 |
-
# Display the output
|
61 |
-
st.dataframe(
|
62 |
-
df,
|
63 |
-
hide_index=True,
|
64 |
-
)
|
65 |
-
|
66 |
-
with col2:
|
67 |
-
# TODO: Add the visualization
|
68 |
-
st.image("https://static.streamlit.io/examples/dog.jpg")
|
|
|
1 |
# Hint: this cheatsheet is magic! https://cheat-sheet.streamlit.app/
|
2 |
|
3 |
import constants
|
4 |
+
import numpy as np
|
5 |
import pandas as pd
|
6 |
import streamlit as st
|
7 |
from transformers import BertForSequenceClassification, AutoTokenizer
|
8 |
+
import random
|
9 |
+
import altair as alt
|
10 |
+
from altair import X, Y, Scale
|
11 |
|
12 |
|
13 |
@st.cache_data
|
|
|
17 |
|
18 |
|
19 |
def compute_ALDi(inputs):
|
20 |
+
return random.randint(0, 100) / 100
|
21 |
|
22 |
|
|
|
|
|
|
|
|
|
23 |
st.title(constants.TITLE)
|
24 |
|
25 |
+
tab1, tab2 = st.tabs(["Input a Sentence", "Upload a File"])
|
26 |
+
|
27 |
+
with tab1:
|
28 |
sent = st.text_input("Arabic Sentence:", placeholder="Enter an Arabic sentence.")
|
29 |
|
30 |
# TODO: Check if this is needed!
|
|
|
34 |
ALDi_score = compute_ALDi(sent)
|
35 |
st.write(ALDi_score)
|
36 |
|
37 |
+
with tab2:
|
38 |
file = st.file_uploader("Upload a file", type=["txt"])
|
39 |
if file is not None:
|
40 |
df = pd.read_csv(file, sep="\t", header=None)
|
41 |
df.columns = ["Sentence"]
|
42 |
|
43 |
+
df = pd.concat([df, df, df])
|
44 |
+
df = pd.concat([df, df, df])
|
45 |
+
df = pd.concat([df, df, df])
|
46 |
+
df.reset_index(drop=True, inplace=True)
|
47 |
+
|
48 |
# TODO: Run the model
|
49 |
df["ALDi"] = df["Sentence"].apply(lambda s: compute_ALDi(s))
|
50 |
|
51 |
# A horizontal rule
|
52 |
st.markdown("""---""")
|
53 |
|
54 |
+
chart = (
|
55 |
+
alt.Chart(df.reset_index())
|
56 |
+
.mark_area(color="violet", opacity=0.5)
|
57 |
+
.encode(
|
58 |
+
x=X(field="index", title="Sentence Index"),
|
59 |
+
y=Y("ALDi", scale=Scale(domain=[0, 1]))
|
60 |
+
)
|
61 |
+
)
|
62 |
+
st.altair_chart(chart.interactive(), use_container_width=True)
|
63 |
+
|
64 |
+
col1, col2 = st.columns([4, 1])
|
65 |
|
66 |
with col1:
|
67 |
+
# Display the output
|
68 |
+
st.table(
|
69 |
+
df,
|
70 |
+
)
|
71 |
+
|
72 |
+
with col2:
|
73 |
# Add a download button
|
74 |
csv = convert_df(df)
|
|
|
75 |
st.download_button(
|
76 |
label=":file_folder: Download predictions as CSV",
|
77 |
data=csv,
|
78 |
file_name="ALDi_scores.csv",
|
79 |
mime="text/csv",
|
80 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|