File size: 3,672 Bytes
a49b72a
5a2fd52
a49b72a
 
 
 
 
 
 
 
 
5a2fd52
a49b72a
 
 
 
 
 
 
5a2fd52
 
a49b72a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b7408c7
a49b72a
5a2fd52
 
4aac95b
5a2fd52
853beb7
a49b72a
 
 
 
 
5ed69e7
73915ba
a49b72a
 
 
73915ba
a49b72a
73915ba
a49b72a
 
 
 
 
 
 
 
 
5a2fd52
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline
import torch
import pickle
import streamlit as st
device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")

from translate import Translator

def init_session_state():
    if 'history' not in st.session_state:
        st.session_state.history = ""

# Initialize session state
init_session_state()
# model_name = "MoritzLaurer/mDeBERTa-v3-base-mnli-xnli"
# tokenizer = AutoTokenizer.from_pretrained(model_name)
# model = AutoModelForSequenceClassification.from_pretrained(model_name)

classifier = pipeline("zero-shot-classification", model="MoritzLaurer/mDeBERTa-v3-base-mnli-xnli")
pipe = pipeline("text-generation", model="TinyLlama/TinyLlama-1.1B-Chat-v1.0", torch_dtype=torch.bfloat16, device_map="auto")

# with open('chapter_titles.pkl', 'rb') as file:
#     titles_astiko = pickle.load(file)
# labels1 = ["κληρονομικό", "ακίνητα", "διαζύγιο"]
# # labels2 = ["αποδοχή κληρονομιάς", "αποποίηση", "διαθήκη"]
# # labels3 = ["μίσθωση", "κυριότητα", "έξωση", "απλήρωτα νοίκια"]


# titles_astiko = ["γάμος", "αλλοδαπός", "φορολογία", "κληρονομικά", "στέγη", "οικογενειακό", "εμπορικό","κλοπή","απάτη"]
# Load dictionary from the file using pickle
with open('my_dict.pickle', 'rb') as file:
    dictionary = pickle.load(file)

def classify(text,labels):
    output = classifier(text, labels, multi_label=False)
    
    return output


text = st.text_input('Enter some text:')  # Input field for new text

if text:

    labels = list(dictionary)
    
    output = classify(text,labels)

    output = output["labels"][0]

    labels = list(dictionary[output])

    output2 = classify(text,labels)

    output2 = output2["labels"][0]


    answer = dictionary[output][output2]

    # Create a translator object with specified source and target languages
    translator = Translator(from_lang='el', to_lang='en')
    translator2 = Translator(from_lang='en', to_lang='el')

 

# Translate the text from Greek to English
    answer = translator.translate(answer)
    text = translator.translate(text)

# text_to_translate2 = text[499:999]
# translated_text2 = translator.translate(text_to_translate2)



    st.session_state.history += "Based on the following information" + answer +"answer this question:" + text + "by reasoning step by step"  # Add new text to history
    # out = pipe(st.session_state.history)  # Generate output based on history
    content = "You are a friendly chatbot who answers question based only on this info:" + answer
    messages = [
    {
        "role": "system",
        "content": content,
    },
    {"role": "user", "content": text},
    ]
    prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
    outputs = pipe(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)


    st.text(outputs[0]['generated_text'])


    st.text(st.session_state.history)
    
    # translated_text2 = translator2.translate(outputs)

    # st.text(translated_text2)
    # st.text("History: " + st.session_state.history)

    # st.text(output)
    # st.text(output2)

    # st.text(answer)

# We use the tokenizer's chat template to format each message - see https://huggingface.co/docs/transformers/main/en/chat_templating

# <|system|>
# You are a friendly chatbot who always responds in the style of a pirate.</s>
# <|user|>
# How many helicopters can a human eat in one sitting?</s>
# <|assistant|>
# ...