File size: 660 Bytes
a15e210
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
import streamlit as st

from app_models.toxicity_MODEL import text2toxicity


def run():
    st.title('Toxicity Detection')
    st.write('This tool classifies text as toxic or non-toxic using RuBERT.')

    user_input = st.text_area("Enter text to classify", "Type your text here...")

    if st.button('Classify'):
        toxicity_score = text2toxicity(user_input)
        st.write('Toxicity score:', toxicity_score)

        # Optional: Interpret the score for the user
        if toxicity_score > 0.5:
            st.write("This text is likely to be considered toxic.")
        else:
            st.write("This text is likely to be considered non-toxic.")