Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -16,14 +16,10 @@ openai_api_key = os.getenv('GPT_KEY')
|
|
16 |
gc_key = os.getenv('GC_KEY')
|
17 |
token = os.getenv('GITHUB_TOKEN')
|
18 |
TOGETHER_API_KEY = os.getenv('TOGETHER_API_KEY')
|
19 |
-
MISTRAL_API_KEY = os.getenv('MISTRAL_API_KEY')
|
20 |
|
21 |
# Инициализация клиента для Together
|
22 |
client = Together(api_key=TOGETHER_API_KEY)
|
23 |
|
24 |
-
# Инициализация клиента для Mistral
|
25 |
-
client_mistral = Mistral(api_key=MISTRAL_API_KEY)
|
26 |
-
|
27 |
# Авторизация в сервисе GigaChat
|
28 |
chat_pro = GigaChat(credentials=gc_key, model='GigaChat-Pro', max_tokens=68, temperature=1, verify_ssl_certs=False)
|
29 |
chat_lite = GigaChat(credentials=gc_key, model='GigaChat', max_tokens=68, temperature=1, verify_ssl_certs=False)
|
@@ -295,50 +291,6 @@ def generate_message_meta_llama_3_1_8b(prompt):
|
|
295 |
except Exception as e:
|
296 |
return f"Ошибка при обращении к Meta-Llama-3.1-8B: {e}"
|
297 |
|
298 |
-
def generate_message_gemma_2_27b_it(prompt):
|
299 |
-
try:
|
300 |
-
response = client.chat.completions.create(
|
301 |
-
model="google/gemma-2-27b-it",
|
302 |
-
messages=[{"role": "user", "content": prompt}],
|
303 |
-
max_tokens=74,
|
304 |
-
temperature=0.8
|
305 |
-
)
|
306 |
-
cleaned_message = clean_message(response.choices[0].message.content.strip())
|
307 |
-
return cleaned_message
|
308 |
-
except Exception as e:
|
309 |
-
return f"Ошибка при обращении к Gemma-2-27b-it: {e}"
|
310 |
-
|
311 |
-
def generate_message_gemma_2_9b_it(prompt):
|
312 |
-
try:
|
313 |
-
response = client.chat.completions.create(
|
314 |
-
model="google/gemma-2-9b-it",
|
315 |
-
messages=[{"role": "user", "content": prompt}],
|
316 |
-
max_tokens=74,
|
317 |
-
temperature=0.8
|
318 |
-
)
|
319 |
-
cleaned_message = clean_message(response.choices[0].message.content.strip())
|
320 |
-
return cleaned_message
|
321 |
-
except Exception as e:
|
322 |
-
return f"Ошибка при обращении к Gemma-2-9b-it: {e}"
|
323 |
-
|
324 |
-
def generate_message_mistral(prompt):
|
325 |
-
try:
|
326 |
-
chat_response = client_mistral.chat.complete(
|
327 |
-
model="mistral-large-latest",
|
328 |
-
messages=[
|
329 |
-
{
|
330 |
-
"role": "user",
|
331 |
-
"content": prompt,
|
332 |
-
"max_tokens": 74,
|
333 |
-
"temperature": 0.8
|
334 |
-
},
|
335 |
-
]
|
336 |
-
)
|
337 |
-
cleaned_message = clean_message(chat_response.choices[0].message.content.strip())
|
338 |
-
return cleaned_message
|
339 |
-
except Exception as e:
|
340 |
-
return f"Ошибка при обращении к Mistral: {e}"
|
341 |
-
|
342 |
def generate_message_gpt4o_with_retry(prompt):
|
343 |
for _ in range(10): # Максимум 10 попыток
|
344 |
message = generate_message_gpt4o(prompt)
|
@@ -374,41 +326,6 @@ def generate_message_meta_llama_3_1_405b_with_retry(prompt):
|
|
374 |
return message
|
375 |
return message
|
376 |
|
377 |
-
def generate_message_meta_llama_3_1_70b_with_retry(prompt):
|
378 |
-
for _ in range(10):
|
379 |
-
message = generate_message_meta_llama_3_1_70b(prompt)
|
380 |
-
if len(message) <= 250:
|
381 |
-
return message
|
382 |
-
return message
|
383 |
-
|
384 |
-
def generate_message_meta_llama_3_1_8b_with_retry(prompt):
|
385 |
-
for _ in range(10):
|
386 |
-
message = generate_message_meta_llama_3_1_8b(prompt)
|
387 |
-
if len(message) <= 250:
|
388 |
-
return message
|
389 |
-
return message
|
390 |
-
|
391 |
-
def generate_message_gemma_2_27b_it_with_retry(prompt):
|
392 |
-
for _ in range(10):
|
393 |
-
message = generate_message_gemma_2_27b_it(prompt)
|
394 |
-
if len(message) <= 250:
|
395 |
-
return message
|
396 |
-
return message
|
397 |
-
|
398 |
-
def generate_message_gemma_2_9b_it_with_retry(prompt):
|
399 |
-
for _ in range(10):
|
400 |
-
message = generate_message_gemma_2_9b_it(prompt)
|
401 |
-
if len(message) <= 250:
|
402 |
-
return message
|
403 |
-
return message
|
404 |
-
|
405 |
-
def generate_message_mistral_with_retry(prompt):
|
406 |
-
for _ in range(10):
|
407 |
-
message = generate_message_mistral(prompt)
|
408 |
-
if len(message) <= 250:
|
409 |
-
return message
|
410 |
-
return message
|
411 |
-
|
412 |
|
413 |
def generate_messages(description, advantages, key_message, approach, *selected_values):
|
414 |
|
@@ -422,85 +339,44 @@ def generate_messages(description, advantages, key_message, approach, *selected_
|
|
422 |
"gigachat_pro": None,
|
423 |
"gigachat_lite": None,
|
424 |
"gigachat_plus": None,
|
425 |
-
"meta_llama_3_1_405b": None
|
426 |
-
"meta_llama_3_1_70b": None,
|
427 |
-
"meta_llama_3_1_8b": None,
|
428 |
-
"gemma_2_27b_it": None,
|
429 |
-
"gemma_2_9b_it": None,
|
430 |
-
"mistral": None # Добавляем Mistral
|
431 |
}
|
432 |
|
433 |
-
yield results["prompt"], "", "", "", "", ""
|
434 |
|
435 |
# Generating messages using existing models (as before)
|
436 |
results["gpt4o"] = generate_message_gpt4o_with_retry(standard_prompt)
|
437 |
gpt4o_length = len(results["gpt4o"])
|
438 |
gpt4o_display = f"{results['gpt4o']}\n\n------\nКоличество знаков: {gpt4o_length}"
|
439 |
-
yield results["prompt"], gpt4o_display, "", "", "", ""
|
440 |
|
441 |
results["gigachat_pro"] = generate_message_gigachat_pro_with_retry(standard_prompt)
|
442 |
gigachat_pro_length = len(results["gigachat_pro"])
|
443 |
gigachat_pro_display = f"{results['gigachat_pro']}\n\n------\nКоличество знаков: {gigachat_pro_length}"
|
444 |
-
yield results["prompt"], gpt4o_display, gigachat_pro_display, "", "", ""
|
445 |
|
446 |
time.sleep(2)
|
447 |
|
448 |
results["gigachat_lite"] = generate_message_gigachat_lite_with_retry(standard_prompt)
|
449 |
gigachat_lite_length = len(results["gigachat_lite"])
|
450 |
gigachat_lite_display = f"{results['gigachat_lite']}\n\n------\nКоличество знаков: {gigachat_lite_length}"
|
451 |
-
yield results["prompt"], gpt4o_display, gigachat_pro_display, gigachat_lite_display, "", ""
|
452 |
|
453 |
time.sleep(2)
|
454 |
|
455 |
results["gigachat_plus"] = generate_message_gigachat_plus_with_retry(standard_prompt)
|
456 |
gigachat_plus_length = len(results["gigachat_plus"])
|
457 |
gigachat_plus_display = f"{results['gigachat_plus']}\n\n------\nКоличество знаков: {gigachat_plus_length}"
|
458 |
-
yield results["prompt"], gpt4o_display, gigachat_pro_display, gigachat_lite_display, gigachat_plus_display, ""
|
459 |
|
460 |
time.sleep(2)
|
461 |
|
462 |
results["meta_llama_3_1_405b"] = generate_message_meta_llama_3_1_405b_with_retry(standard_prompt)
|
463 |
meta_llama_405b_length = len(results["meta_llama_3_1_405b"])
|
464 |
meta_llama_405b_display = f"{results['meta_llama_3_1_405b']}\n\n------\nКоличество знаков: {meta_llama_405b_length}"
|
465 |
-
yield results["prompt"], gpt4o_display, gigachat_pro_display, gigachat_lite_display, gigachat_plus_display, meta_llama_405b_display
|
466 |
-
|
467 |
-
time.sleep(4)
|
468 |
-
|
469 |
-
results["meta_llama_3_1_70b"] = generate_message_meta_llama_3_1_70b_with_retry(standard_prompt)
|
470 |
-
meta_llama_70b_length = len(results["meta_llama_3_1_70b"])
|
471 |
-
meta_llama_70b_display = f"{results['meta_llama_3_1_70b']}\n\n------\nКоличество знаков: {meta_llama_70b_length}"
|
472 |
-
yield results["prompt"], gpt4o_display, gigachat_pro_display, gigachat_lite_display, gigachat_plus_display, meta_llama_405b_display, meta_llama_70b_display, "", "", "", ""
|
473 |
-
|
474 |
-
time.sleep(4)
|
475 |
-
|
476 |
-
results["meta_llama_3_1_8b"] = generate_message_meta_llama_3_1_8b_with_retry(standard_prompt)
|
477 |
-
meta_llama_8b_length = len(results["meta_llama_3_1_8b"])
|
478 |
-
meta_llama_8b_display = f"{results['meta_llama_3_1_8b']}\n\n------\nКоличество знаков: {meta_llama_8b_length}"
|
479 |
-
yield results["prompt"], gpt4o_display, gigachat_pro_display, gigachat_lite_display, gigachat_plus_display, meta_llama_405b_display, meta_llama_70b_display, meta_llama_8b_display, "", "", ""
|
480 |
|
481 |
-
time.sleep(
|
482 |
-
|
483 |
-
results["gemma_2_27b_it"] = generate_message_gemma_2_27b_it_with_retry(standard_prompt)
|
484 |
-
gemma_27b_length = len(results["gemma_2_27b_it"])
|
485 |
-
gemma_27b_display = f"{results['gemma_2_27b_it']}\n\n------\nКоличество знаков: {gemma_27b_length}"
|
486 |
-
yield results["prompt"], gpt4o_display, gigachat_pro_display, gigachat_lite_display, gigachat_plus_display, meta_llama_405b_display, meta_llama_70b_display, meta_llama_8b_display, gemma_27b_display, "", ""
|
487 |
-
|
488 |
-
time.sleep(4)
|
489 |
-
|
490 |
-
results["gemma_2_9b_it"] = generate_message_gemma_2_9b_it_with_retry(standard_prompt)
|
491 |
-
gemma_9b_length = len(results["gemma_2_9b_it"])
|
492 |
-
gemma_9b_display = f"{results['gemma_2_9b_it']}\n\n------\nКоличество знаков: {gemma_9b_length}"
|
493 |
-
yield results["prompt"], gpt4o_display, gigachat_pro_display, gigachat_lite_display, gigachat_plus_display, meta_llama_405b_display, meta_llama_70b_display, meta_llama_8b_display, gemma_27b_display, gemma_9b_display, ""
|
494 |
-
|
495 |
-
time.sleep(4)
|
496 |
-
|
497 |
-
# Добавляем Mistral
|
498 |
-
results["mistral"] = generate_message_mistral_with_retry(standard_prompt)
|
499 |
-
mistral_length = len(results["mistral"])
|
500 |
-
mistral_display = f"{results['mistral']}\n\n------\nКоличество знаков: {mistral_length}"
|
501 |
-
yield results["prompt"], gpt4o_display, gigachat_pro_display, gigachat_lite_display, gigachat_plus_display, meta_llama_405b_display, meta_llama_70b_display, meta_llama_8b_display, gemma_27b_display, gemma_9b_display, mistral_display
|
502 |
-
|
503 |
-
time.sleep(4)
|
504 |
|
505 |
return results
|
506 |
|
@@ -590,38 +466,13 @@ def perform_personalization_meta_llama_405b(standard_message, personalization_pr
|
|
590 |
full_prompt = f"{personalization_prompt}\n\nТекст для адаптации:\n{standard_message}"
|
591 |
return generate_message_meta_llama_3_1_405b_with_retry(full_prompt)
|
592 |
|
593 |
-
def perform_personalization_meta_llama_70b(standard_message, personalization_prompt):
|
594 |
-
full_prompt = f"{personalization_prompt}\n\nТекст для адаптации:\n{standard_message}"
|
595 |
-
return generate_message_meta_llama_3_1_70b_with_retry(full_prompt)
|
596 |
-
|
597 |
-
def perform_personalization_meta_llama_8b(standard_message, personalization_prompt):
|
598 |
-
full_prompt = f"{personalization_prompt}\n\nТекст для адаптации:\n{standard_message}"
|
599 |
-
return generate_message_meta_llama_3_1_8b_with_retry(full_prompt)
|
600 |
-
|
601 |
-
def perform_personalization_gemma_27b_it(standard_message, personalization_prompt):
|
602 |
-
full_prompt = f"{personalization_prompt}\n\nТекст для адаптации:\n{standard_message}"
|
603 |
-
return generate_message_gemma_2_27b_it_with_retry(full_prompt)
|
604 |
-
|
605 |
-
def perform_personalization_gemma_9b_it(standard_message, personalization_prompt):
|
606 |
-
full_prompt = f"{personalization_prompt}\n\nТекст для адаптации:\n{standard_message}"
|
607 |
-
return generate_message_gemma_2_9b_it_with_retry(full_prompt)
|
608 |
-
|
609 |
-
def perform_personalization_mistral(standard_message, personalization_prompt):
|
610 |
-
full_prompt = f"{personalization_prompt}\n\nТекст для адаптации:\n{standard_message}"
|
611 |
-
return generate_message_mistral_with_retry(full_prompt)
|
612 |
-
|
613 |
# Updated function to include additional models in personalization
|
614 |
def personalize_messages_with_yield(
|
615 |
gpt4o_message,
|
616 |
gigachat_pro_message,
|
617 |
gigachat_lite_message,
|
618 |
gigachat_plus_message,
|
619 |
-
meta_llama_405b_message,
|
620 |
-
meta_llama_70b_message,
|
621 |
-
meta_llama_8b_message,
|
622 |
-
gemma_27b_message,
|
623 |
-
gemma_9b_message,
|
624 |
-
mistral_message,
|
625 |
key_message,
|
626 |
approach,
|
627 |
*selected_values
|
@@ -655,31 +506,6 @@ def personalize_messages_with_yield(
|
|
655 |
meta_llama_405b_display = f"{personalized_message_meta_llama_405b}\n\n------\nКоличество знаков: {meta_llama_405b_length}"
|
656 |
yield personalization_prompt, gpt4o_display, gigachat_pro_display, gigachat_lite_display, gigachat_plus_display, meta_llama_405b_display, "", "", "", "", ""
|
657 |
|
658 |
-
personalized_message_meta_llama_70b = perform_personalization_meta_llama_70b(meta_llama_70b_message, personalization_prompt)
|
659 |
-
meta_llama_70b_length = len(personalized_message_meta_llama_70b)
|
660 |
-
meta_llama_70b_display = f"{personalized_message_meta_llama_70b}\n\n------\nКоличество знаков: {meta_llama_70b_length}"
|
661 |
-
yield personalization_prompt, gpt4o_display, gigachat_pro_display, gigachat_lite_display, gigachat_plus_display, meta_llama_405b_display, meta_llama_70b_display, "", "", "", ""
|
662 |
-
|
663 |
-
personalized_message_meta_llama_8b = perform_personalization_meta_llama_8b(meta_llama_8b_message, personalization_prompt)
|
664 |
-
meta_llama_8b_length = len(personalized_message_meta_llama_8b)
|
665 |
-
meta_llama_8b_display = f"{personalized_message_meta_llama_8b}\n\n------\nКоличество знаков: {meta_llama_8b_length}"
|
666 |
-
yield personalization_prompt, gpt4o_display, gigachat_pro_display, gigachat_lite_display, gigachat_plus_display, meta_llama_405b_display, meta_llama_70b_display, meta_llama_8b_display, "", "", ""
|
667 |
-
|
668 |
-
personalized_message_gemma_27b_it = perform_personalization_gemma_27b_it(gemma_27b_message, personalization_prompt)
|
669 |
-
gemma_27b_length = len(personalized_message_gemma_27b_it)
|
670 |
-
gemma_27b_display = f"{personalized_message_gemma_27b_it}\n\n------\nКоличество знаков: {gemma_27b_length}"
|
671 |
-
yield personalization_prompt, gpt4o_display, gigachat_pro_display, gigachat_lite_display, gigachat_plus_display, meta_llama_405b_display, meta_llama_70b_display, meta_llama_8b_display, gemma_27b_display, "", ""
|
672 |
-
|
673 |
-
personalized_message_gemma_9b_it = perform_personalization_gemma_9b_it(gemma_9b_message, personalization_prompt)
|
674 |
-
gemma_9b_length = len(personalized_message_gemma_9b_it)
|
675 |
-
gemma_9b_display = f"{personalized_message_gemma_9b_it}\n\n------\nКоличество знаков: {gemma_9b_length}"
|
676 |
-
yield personalization_prompt, gpt4o_display, gigachat_pro_display, gigachat_lite_display, gigachat_plus_display, meta_llama_405b_display, meta_llama_70b_display, meta_llama_8b_display, gemma_27b_display, gemma_9b_display, ""
|
677 |
-
|
678 |
-
personalized_message_mistral = perform_personalization_mistral(mistral_message, personalization_prompt)
|
679 |
-
mistral_length = len(personalized_message_mistral)
|
680 |
-
mistral_display = f"{personalized_message_mistral}\n\n------\nКоличество знаков: {mistral_length}"
|
681 |
-
yield personalization_prompt, gpt4o_display, gigachat_pro_display, gigachat_lite_display, gigachat_plus_display, meta_llama_405b_display, meta_llama_70b_display, meta_llama_8b_display, gemma_27b_display, gemma_9b_display, mistral_display
|
682 |
-
|
683 |
|
684 |
# Функция для генерации промпта проверки текста
|
685 |
def generate_error_check_prompt():
|
@@ -726,46 +552,6 @@ def generate_error_check_prompt():
|
|
726 |
return prompt
|
727 |
|
728 |
|
729 |
-
# Функция для выполнения проверки текста с использованием yield
|
730 |
-
def check_errors_with_yield(*personalized_messages):
|
731 |
-
if len(personalized_messages) < 10: # Adjusted for the inclusion of Mistral
|
732 |
-
yield "", "", "", "", "", "", "", "", "", "", "Ошибка: недостаточно сообщений для проверки"
|
733 |
-
return
|
734 |
-
|
735 |
-
error_check_prompt = generate_error_check_prompt()
|
736 |
-
yield error_check_prompt, "", "", "", "", "", "", "", "", "", "Промпт для проверки текста сгенерирован"
|
737 |
-
|
738 |
-
error_message_gpt4o = perform_personalization(f"{error_check_prompt}\n\n{personalized_messages[0]}", "")
|
739 |
-
yield error_check_prompt, error_message_gpt4o, "", "", "", "", "", "", "", "", "Результат проверки GPT-4o сгенерирован"
|
740 |
-
|
741 |
-
error_message_gigachat_pro = perform_personalization_gigachat(f"{error_check_prompt}\n\n{personalized_messages[1]}", "", "gigachat_pro")
|
742 |
-
yield error_check_prompt, error_message_gpt4o, error_message_gigachat_pro, "", "", "", "", "", "", "", "Результат проверки GigaChat-Pro сгенерирован"
|
743 |
-
|
744 |
-
error_message_gigachat_lite = perform_personalization_gigachat(f"{error_check_prompt}\n\n{personalized_messages[2]}", "", "gigachat_lite")
|
745 |
-
yield error_check_prompt, error_message_gpt4o, error_message_gigachat_pro, error_message_gigachat_lite, "", "", "", "", "", "", "Результат проверки GigaChat-Lite сгенерирован"
|
746 |
-
|
747 |
-
error_message_gigachat_plus = perform_personalization_gigachat(f"{error_check_prompt}\n\n{personalized_messages[3]}", "", "gigachat_plus")
|
748 |
-
yield error_check_prompt, error_message_gpt4o, error_message_gigachat_pro, error_message_gigachat_lite, error_message_gigachat_plus, "", "", "", "", "", "Результат проверки GigaChat-Plus сгенерирован"
|
749 |
-
|
750 |
-
error_message_meta_llama_405b = perform_personalization(f"{error_check_prompt}\n\n{personalized_messages[4]}", "")
|
751 |
-
yield error_check_prompt, error_message_gpt4o, error_message_gigachat_pro, error_message_gigachat_lite, error_message_gigachat_plus, error_message_meta_llama_405b, "", "", "", "", "Результат проверки Meta-Llama-3.1-405B сгенерирован"
|
752 |
-
|
753 |
-
error_message_meta_llama_70b = perform_personalization_meta_llama_70b(f"{error_check_prompt}\n\n{personalized_messages[5]}", "")
|
754 |
-
yield error_check_prompt, error_message_gpt4o, error_message_gigachat_pro, error_message_gigachat_lite, error_message_gigachat_plus, error_message_meta_llama_405b, error_message_meta_llama_70b, "", "", "", "Результат проверки Meta-Llama-3.1-70B сгенерирован"
|
755 |
-
|
756 |
-
error_message_meta_llama_8b = perform_personalization_meta_llama_8b(f"{error_check_prompt}\n\n{personalized_messages[6]}", "")
|
757 |
-
yield error_check_prompt, error_message_gpt4o, error_message_gigachat_pro, error_message_gigachat_lite, error_message_gigachat_plus, error_message_meta_llama_405b, error_message_meta_llama_70b, error_message_meta_llama_8b, "", "", "Результат проверки Meta-Llama-3.1-8B сгенерирован"
|
758 |
-
|
759 |
-
error_message_gemma_27b_it = perform_personalization_gemma_27b_it(f"{error_check_prompt}\n\n{personalized_messages[7]}", "")
|
760 |
-
yield error_check_prompt, error_message_gpt4o, error_message_gigachat_pro, error_message_gigachat_lite, error_message_gigachat_plus, error_message_meta_llama_405b, error_message_meta_llama_70b, error_message_meta_llama_8b, error_message_gemma_27b_it, "", "Результат проверки Gemma-2-27B-IT сгенерирован"
|
761 |
-
|
762 |
-
error_message_gemma_9b_it = perform_personalization_gemma_9b_it(f"{error_check_prompt}\n\n{personalized_messages[8]}", "")
|
763 |
-
yield error_check_prompt, error_message_gpt4o, error_message_gigachat_pro, error_message_gigachat_lite, error_message_gigachat_plus, error_message_meta_llama_405b, error_message_meta_llama_70b, error_message_meta_llama_8b, error_message_gemma_27b_it, error_message_gemma_9b_it, "", "Результат проверки Gemma-2-9B-IT сгенерирован"
|
764 |
-
|
765 |
-
error_message_mistral = perform_personalization_mistral(f"{error_check_prompt}\n\n{personalized_messages[9]}", "")
|
766 |
-
yield error_check_prompt, error_message_gpt4o, error_message_gigachat_pro, error_message_gigachat_lite, error_message_gigachat_plus, error_message_meta_llama_405b, error_message_meta_llama_70b, error_message_meta_llama_8b, error_message_gemma_27b_it, error_message_gemma_9b_it, error_message_mistral, "Все результаты проверки сгенерированы"
|
767 |
-
|
768 |
-
|
769 |
def save_to_github(personalized_message, model_name, comment, corrected_message, description, advantages, non_personalized_prompt, non_personalized_message, gender, generation, psychotype, business_stage, industry, legal_form, key_message, approach):
|
770 |
# Собираем все данные в один словарь
|
771 |
data_to_save = {
|
@@ -813,11 +599,6 @@ def personalize_and_save(
|
|
813 |
gigachat_lite_message,
|
814 |
gigachat_plus_message,
|
815 |
meta_llama_405b_message,
|
816 |
-
meta_llama_70b_message,
|
817 |
-
meta_llama_8b_message,
|
818 |
-
gemma_27b_message,
|
819 |
-
gemma_9b_message,
|
820 |
-
mistral_message,
|
821 |
description,
|
822 |
advantages,
|
823 |
key_message, # Обратите внимание, что key_message здесь не передается в selected_values
|
@@ -831,11 +612,6 @@ def personalize_and_save(
|
|
831 |
gigachat_lite_message,
|
832 |
gigachat_plus_message,
|
833 |
meta_llama_405b_message,
|
834 |
-
meta_llama_70b_message,
|
835 |
-
meta_llama_8b_message,
|
836 |
-
gemma_27b_message,
|
837 |
-
gemma_9b_message,
|
838 |
-
mistral_message,
|
839 |
key_message,
|
840 |
approach,
|
841 |
*selected_values # Только признаки для персонализации
|
@@ -881,10 +657,10 @@ def reset_button_text():
|
|
881 |
|
882 |
def clear_unnecessary_fields():
|
883 |
return (
|
884 |
-
"", "", "", "", "",
|
885 |
-
"", "", "", "", "",
|
886 |
-
"", "", "", "", "",
|
887 |
-
"", "", "", "", "",
|
888 |
"", "" # Дополнительное пустое значение
|
889 |
)
|
890 |
|
@@ -1001,11 +777,6 @@ with gr.Row():
|
|
1001 |
personalized_output_text_gigachat_lite,
|
1002 |
personalized_output_text_gigachat_plus,
|
1003 |
personalized_output_text_meta_llama_405b,
|
1004 |
-
personalized_output_text_meta_llama_70b,
|
1005 |
-
personalized_output_text_meta_llama_8b,
|
1006 |
-
personalized_output_text_gemma_27b,
|
1007 |
-
personalized_output_text_gemma_9b,
|
1008 |
-
personalized_output_text_mistral,
|
1009 |
comment_gpt4o,
|
1010 |
corrected_gpt4o,
|
1011 |
comment_gigachat_pro,
|
@@ -1015,17 +786,7 @@ with gr.Row():
|
|
1015 |
comment_gigachat_plus,
|
1016 |
corrected_gigachat_plus,
|
1017 |
comment_meta_llama_405b,
|
1018 |
-
corrected_meta_llama_405b
|
1019 |
-
comment_meta_llama_70b,
|
1020 |
-
corrected_meta_llama_70b,
|
1021 |
-
comment_meta_llama_8b,
|
1022 |
-
corrected_meta_llama_8b,
|
1023 |
-
comment_gemma_27b,
|
1024 |
-
corrected_gemma_27b,
|
1025 |
-
comment_gemma_9b,
|
1026 |
-
corrected_gemma_9b,
|
1027 |
-
comment_mistral,
|
1028 |
-
corrected_mistral
|
1029 |
]
|
1030 |
)
|
1031 |
|
@@ -1038,12 +799,7 @@ with gr.Row():
|
|
1038 |
output_text_gigachat_pro,
|
1039 |
output_text_gigachat_lite,
|
1040 |
output_text_gigachat_plus,
|
1041 |
-
output_text_meta_llama_405b
|
1042 |
-
output_text_meta_llama_70b,
|
1043 |
-
output_text_meta_llama_8b,
|
1044 |
-
output_text_gemma_27b,
|
1045 |
-
output_text_gemma_9b,
|
1046 |
-
output_text_mistral
|
1047 |
]
|
1048 |
)
|
1049 |
|
@@ -1057,11 +813,6 @@ with gr.Row():
|
|
1057 |
personalized_output_text_gigachat_lite,
|
1058 |
personalized_output_text_gigachat_plus,
|
1059 |
personalized_output_text_meta_llama_405b,
|
1060 |
-
personalized_output_text_meta_llama_70b,
|
1061 |
-
personalized_output_text_meta_llama_8b,
|
1062 |
-
personalized_output_text_gemma_27b,
|
1063 |
-
personalized_output_text_gemma_9b,
|
1064 |
-
personalized_output_text_mistral, # Поля для персонализированных сообщений
|
1065 |
comment_gpt4o,
|
1066 |
corrected_gpt4o,
|
1067 |
comment_gigachat_pro,
|
@@ -1071,17 +822,7 @@ with gr.Row():
|
|
1071 |
comment_gigachat_plus,
|
1072 |
corrected_gigachat_plus,
|
1073 |
comment_meta_llama_405b,
|
1074 |
-
corrected_meta_llama_405b
|
1075 |
-
comment_meta_llama_70b,
|
1076 |
-
corrected_meta_llama_70b,
|
1077 |
-
comment_meta_llama_8b,
|
1078 |
-
corrected_meta_llama_8b,
|
1079 |
-
comment_gemma_27b,
|
1080 |
-
corrected_gemma_27b,
|
1081 |
-
comment_gemma_9b,
|
1082 |
-
corrected_gemma_9b,
|
1083 |
-
comment_mistral,
|
1084 |
-
corrected_mistral # Поля для комментариев и откорректированных сообщений
|
1085 |
]
|
1086 |
)
|
1087 |
|
@@ -1092,12 +833,7 @@ with gr.Row():
|
|
1092 |
output_text_gigachat_pro,
|
1093 |
output_text_gigachat_lite,
|
1094 |
output_text_gigachat_plus,
|
1095 |
-
output_text_meta_llama_405b
|
1096 |
-
output_text_meta_llama_70b,
|
1097 |
-
output_text_meta_llama_8b,
|
1098 |
-
output_text_gemma_27b,
|
1099 |
-
output_text_gemma_9b,
|
1100 |
-
output_text_mistral, # Входные данные для персонализации
|
1101 |
description_input,
|
1102 |
advantages_input,
|
1103 |
key_message_input, # Add key_message_input
|
@@ -1109,12 +845,7 @@ with gr.Row():
|
|
1109 |
personalized_output_text_gigachat_pro,
|
1110 |
personalized_output_text_gigachat_lite,
|
1111 |
personalized_output_text_gigachat_plus,
|
1112 |
-
personalized_output_text_meta_llama_405b
|
1113 |
-
personalized_output_text_meta_llama_70b,
|
1114 |
-
personalized_output_text_meta_llama_8b,
|
1115 |
-
personalized_output_text_gemma_27b,
|
1116 |
-
personalized_output_text_gemma_9b,
|
1117 |
-
personalized_output_text_mistral # Выходные данные для персонализации
|
1118 |
]
|
1119 |
)
|
1120 |
|
@@ -1140,21 +871,11 @@ with gr.Row():
|
|
1140 |
output_text_gigachat_lite,
|
1141 |
output_text_gigachat_plus,
|
1142 |
output_text_meta_llama_405b,
|
1143 |
-
output_text_meta_llama_70b,
|
1144 |
-
output_text_meta_llama_8b,
|
1145 |
-
output_text_gemma_27b,
|
1146 |
-
output_text_gemma_9b,
|
1147 |
-
output_text_mistral,
|
1148 |
personalized_output_text_gpt4o,
|
1149 |
personalized_output_text_gigachat_pro,
|
1150 |
personalized_output_text_gigachat_lite,
|
1151 |
personalized_output_text_gigachat_plus,
|
1152 |
personalized_output_text_meta_llama_405b,
|
1153 |
-
personalized_output_text_meta_llama_70b,
|
1154 |
-
personalized_output_text_meta_llama_8b,
|
1155 |
-
personalized_output_text_gemma_27b,
|
1156 |
-
personalized_output_text_gemma_9b,
|
1157 |
-
personalized_output_text_mistral,
|
1158 |
comment_gpt4o,
|
1159 |
corrected_gpt4o,
|
1160 |
comment_gigachat_pro,
|
@@ -1164,17 +885,7 @@ with gr.Row():
|
|
1164 |
comment_gigachat_plus,
|
1165 |
corrected_gigachat_plus,
|
1166 |
comment_meta_llama_405b,
|
1167 |
-
corrected_meta_llama_405b
|
1168 |
-
comment_meta_llama_70b,
|
1169 |
-
corrected_meta_llama_70b,
|
1170 |
-
comment_meta_llama_8b,
|
1171 |
-
corrected_meta_llama_8b,
|
1172 |
-
comment_gemma_27b,
|
1173 |
-
corrected_gemma_27b,
|
1174 |
-
comment_gemma_9b,
|
1175 |
-
corrected_gemma_9b,
|
1176 |
-
comment_mistral,
|
1177 |
-
corrected_mistral
|
1178 |
]
|
1179 |
)
|
1180 |
|
@@ -1349,170 +1060,5 @@ with gr.Row():
|
|
1349 |
outputs=[save_meta_llama_405b_btn]
|
1350 |
)
|
1351 |
|
1352 |
-
save_meta_llama_70b_btn.click(
|
1353 |
-
fn=prepare_button_text,
|
1354 |
-
inputs=[],
|
1355 |
-
outputs=[save_meta_llama_70b_btn]
|
1356 |
-
).then(
|
1357 |
-
fn=lambda personalized_message, comment, corrected_message, description, advantages, non_personalized_prompt, non_personalized_message, gender, generation, psychotype, business_stage, industry, legal_form, key_message, approach:
|
1358 |
-
save_to_github(personalized_message, "Meta-Llama-3.1-70B", comment, corrected_message, description, advantages, non_personalized_prompt, non_personalized_message, gender, generation, psychotype, business_stage, industry, legal_form, key_message, approach),
|
1359 |
-
inputs=[
|
1360 |
-
personalized_output_text_meta_llama_70b,
|
1361 |
-
comment_meta_llama_70b,
|
1362 |
-
corrected_meta_llama_70b,
|
1363 |
-
description_input,
|
1364 |
-
advantages_input,
|
1365 |
-
prompt_display,
|
1366 |
-
output_text_meta_llama_70b,
|
1367 |
-
selections[0], # Пол
|
1368 |
-
selections[1], # Поколение
|
1369 |
-
selections[2], # Психотип
|
1370 |
-
selections[3], # Стадия бизнеса
|
1371 |
-
selections[4], # Отрасль
|
1372 |
-
selections[5], # ОПФ
|
1373 |
-
key_message_input, # Ключевое сообщение
|
1374 |
-
approach_input # Подход
|
1375 |
-
],
|
1376 |
-
outputs=None
|
1377 |
-
).then(
|
1378 |
-
fn=update_button_text,
|
1379 |
-
outputs=[save_meta_llama_70b_btn]
|
1380 |
-
).then(
|
1381 |
-
fn=reset_button_text,
|
1382 |
-
outputs=[save_meta_llama_70b_btn]
|
1383 |
-
)
|
1384 |
-
|
1385 |
-
save_meta_llama_8b_btn.click(
|
1386 |
-
fn=prepare_button_text,
|
1387 |
-
inputs=[],
|
1388 |
-
outputs=[save_meta_llama_8b_btn]
|
1389 |
-
).then(
|
1390 |
-
fn=lambda personalized_message, comment, corrected_message, description, advantages, non_personalized_prompt, non_personalized_message, gender, generation, psychotype, business_stage, industry, legal_form, key_message, approach:
|
1391 |
-
save_to_github(personalized_message, "Meta-Llama-3.1-8B", comment, corrected_message, description, advantages, non_personalized_prompt, non_personalized_message, gender, generation, psychotype, business_stage, industry, legal_form, key_message, approach),
|
1392 |
-
inputs=[
|
1393 |
-
personalized_output_text_meta_llama_8b,
|
1394 |
-
comment_meta_llama_8b,
|
1395 |
-
corrected_meta_llama_8b,
|
1396 |
-
description_input,
|
1397 |
-
advantages_input,
|
1398 |
-
prompt_display,
|
1399 |
-
output_text_meta_llama_8b,
|
1400 |
-
selections[0], # Пол
|
1401 |
-
selections[1], # Поколение
|
1402 |
-
selections[2], # Психотип
|
1403 |
-
selections[3], # Стадия бизнеса
|
1404 |
-
selections[4], # Отрасль
|
1405 |
-
selections[5], # ОПФ
|
1406 |
-
key_message_input, # Ключевое сообщение
|
1407 |
-
approach_input # Подход
|
1408 |
-
],
|
1409 |
-
outputs=None
|
1410 |
-
).then(
|
1411 |
-
fn=update_button_text,
|
1412 |
-
outputs=[save_meta_llama_8b_btn]
|
1413 |
-
).then(
|
1414 |
-
fn=reset_button_text,
|
1415 |
-
outputs=[save_meta_llama_8b_btn]
|
1416 |
-
)
|
1417 |
-
|
1418 |
-
save_gemma_27b_btn.click(
|
1419 |
-
fn=prepare_button_text,
|
1420 |
-
inputs=[],
|
1421 |
-
outputs=[save_gemma_27b_btn]
|
1422 |
-
).then(
|
1423 |
-
fn=lambda personalized_message, comment, corrected_message, description, advantages, non_personalized_prompt, non_personalized_message, gender, generation, psychotype, business_stage, industry, legal_form, key_message, approach:
|
1424 |
-
save_to_github(personalized_message, "Gemma-2-27B-IT", comment, corrected_message, description, advantages, non_personalized_prompt, non_personalized_message, gender, generation, psychotype, business_stage, industry, legal_form, key_message, approach),
|
1425 |
-
inputs=[
|
1426 |
-
personalized_output_text_gemma_27b,
|
1427 |
-
comment_gemma_27b,
|
1428 |
-
corrected_gemma_27b,
|
1429 |
-
description_input,
|
1430 |
-
advantages_input,
|
1431 |
-
prompt_display,
|
1432 |
-
output_text_gemma_27b,
|
1433 |
-
selections[0], # Пол
|
1434 |
-
selections[1], # Поколение
|
1435 |
-
selections[2], # Психотип
|
1436 |
-
selections[3], # Стадия бизнеса
|
1437 |
-
selections[4], # Отрасль
|
1438 |
-
selections[5], # ОПФ
|
1439 |
-
key_message_input, # Ключевое сообщение
|
1440 |
-
approach_input # Подход
|
1441 |
-
],
|
1442 |
-
outputs=None
|
1443 |
-
).then(
|
1444 |
-
fn=update_button_text,
|
1445 |
-
outputs=[save_gemma_27b_btn]
|
1446 |
-
).then(
|
1447 |
-
fn=reset_button_text,
|
1448 |
-
outputs=[save_gemma_27b_btn]
|
1449 |
-
)
|
1450 |
-
|
1451 |
-
save_gemma_9b_btn.click(
|
1452 |
-
fn=prepare_button_text,
|
1453 |
-
inputs=[],
|
1454 |
-
outputs=[save_gemma_9b_btn]
|
1455 |
-
).then(
|
1456 |
-
fn=lambda personalized_message, comment, corrected_message, description, advantages, non_personalized_prompt, non_personalized_message, gender, generation, psychotype, business_stage, industry, legal_form, key_message, approach:
|
1457 |
-
save_to_github(personalized_message, "Gemma-2-9B-IT", comment, corrected_message, description, advantages, non_personalized_prompt, non_personalized_message, gender, generation, psychotype, business_stage, industry, legal_form, key_message, approach),
|
1458 |
-
inputs=[
|
1459 |
-
personalized_output_text_gemma_9b,
|
1460 |
-
comment_gemma_9b,
|
1461 |
-
corrected_gemma_9b,
|
1462 |
-
description_input,
|
1463 |
-
advantages_input,
|
1464 |
-
prompt_display,
|
1465 |
-
output_text_gemma_9b,
|
1466 |
-
selections[0], # Пол
|
1467 |
-
selections[1], # Поколение
|
1468 |
-
selections[2], # Психотип
|
1469 |
-
selections[3], # Стадия бизнеса
|
1470 |
-
selections[4], # Отрасль
|
1471 |
-
selections[5], # ОПФ
|
1472 |
-
key_message_input, # Ключевое сообщение
|
1473 |
-
approach_input # Подход
|
1474 |
-
],
|
1475 |
-
outputs=None
|
1476 |
-
).then(
|
1477 |
-
fn=update_button_text,
|
1478 |
-
outputs=[save_gemma_9b_btn]
|
1479 |
-
).then(
|
1480 |
-
fn=reset_button_text,
|
1481 |
-
outputs=[save_gemma_9b_btn]
|
1482 |
-
)
|
1483 |
-
|
1484 |
-
save_mistral_btn.click(
|
1485 |
-
fn=prepare_button_text,
|
1486 |
-
inputs=[],
|
1487 |
-
outputs=[save_mistral_btn]
|
1488 |
-
).then(
|
1489 |
-
fn=lambda personalized_message, comment, corrected_message, description, advantages, non_personalized_prompt, non_personalized_message, gender, generation, psychotype, business_stage, industry, legal_form, key_message, approach:
|
1490 |
-
save_to_github(personalized_message, "Mistral", comment, corrected_message, description, advantages, non_personalized_prompt, non_personalized_message, gender, generation, psychotype, business_stage, industry, legal_form, key_message, approach),
|
1491 |
-
inputs=[
|
1492 |
-
personalized_output_text_mistral,
|
1493 |
-
comment_mistral,
|
1494 |
-
corrected_mistral,
|
1495 |
-
description_input,
|
1496 |
-
advantages_input,
|
1497 |
-
prompt_display,
|
1498 |
-
output_text_mistral,
|
1499 |
-
selections[0], # Пол
|
1500 |
-
selections[1], # Поколение
|
1501 |
-
selections[2], # Психотип
|
1502 |
-
selections[3], # Стадия бизнеса
|
1503 |
-
selections[4], # Отрасль
|
1504 |
-
selections[5], # ОПФ
|
1505 |
-
key_message_input, # Ключевое сообщение
|
1506 |
-
approach_input # Подход
|
1507 |
-
],
|
1508 |
-
outputs=None
|
1509 |
-
).then(
|
1510 |
-
fn=update_button_text,
|
1511 |
-
outputs=[save_mistral_btn]
|
1512 |
-
).then(
|
1513 |
-
fn=reset_button_text,
|
1514 |
-
outputs=[save_mistral_btn]
|
1515 |
-
)
|
1516 |
-
|
1517 |
|
1518 |
demo.launch()
|
|
|
16 |
gc_key = os.getenv('GC_KEY')
|
17 |
token = os.getenv('GITHUB_TOKEN')
|
18 |
TOGETHER_API_KEY = os.getenv('TOGETHER_API_KEY')
|
|
|
19 |
|
20 |
# Инициализация клиента для Together
|
21 |
client = Together(api_key=TOGETHER_API_KEY)
|
22 |
|
|
|
|
|
|
|
23 |
# Авторизация в сервисе GigaChat
|
24 |
chat_pro = GigaChat(credentials=gc_key, model='GigaChat-Pro', max_tokens=68, temperature=1, verify_ssl_certs=False)
|
25 |
chat_lite = GigaChat(credentials=gc_key, model='GigaChat', max_tokens=68, temperature=1, verify_ssl_certs=False)
|
|
|
291 |
except Exception as e:
|
292 |
return f"Ошибка при обращении к Meta-Llama-3.1-8B: {e}"
|
293 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
294 |
def generate_message_gpt4o_with_retry(prompt):
|
295 |
for _ in range(10): # Максимум 10 попыток
|
296 |
message = generate_message_gpt4o(prompt)
|
|
|
326 |
return message
|
327 |
return message
|
328 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
329 |
|
330 |
def generate_messages(description, advantages, key_message, approach, *selected_values):
|
331 |
|
|
|
339 |
"gigachat_pro": None,
|
340 |
"gigachat_lite": None,
|
341 |
"gigachat_plus": None,
|
342 |
+
"meta_llama_3_1_405b": None
|
|
|
|
|
|
|
|
|
|
|
343 |
}
|
344 |
|
345 |
+
yield results["prompt"], "", "", "", "", ""
|
346 |
|
347 |
# Generating messages using existing models (as before)
|
348 |
results["gpt4o"] = generate_message_gpt4o_with_retry(standard_prompt)
|
349 |
gpt4o_length = len(results["gpt4o"])
|
350 |
gpt4o_display = f"{results['gpt4o']}\n\n------\nКоличество знаков: {gpt4o_length}"
|
351 |
+
yield results["prompt"], gpt4o_display, "", "", "", ""
|
352 |
|
353 |
results["gigachat_pro"] = generate_message_gigachat_pro_with_retry(standard_prompt)
|
354 |
gigachat_pro_length = len(results["gigachat_pro"])
|
355 |
gigachat_pro_display = f"{results['gigachat_pro']}\n\n------\nКоличество знаков: {gigachat_pro_length}"
|
356 |
+
yield results["prompt"], gpt4o_display, gigachat_pro_display, "", "", ""
|
357 |
|
358 |
time.sleep(2)
|
359 |
|
360 |
results["gigachat_lite"] = generate_message_gigachat_lite_with_retry(standard_prompt)
|
361 |
gigachat_lite_length = len(results["gigachat_lite"])
|
362 |
gigachat_lite_display = f"{results['gigachat_lite']}\n\n------\nКоличество знаков: {gigachat_lite_length}"
|
363 |
+
yield results["prompt"], gpt4o_display, gigachat_pro_display, gigachat_lite_display, "", ""
|
364 |
|
365 |
time.sleep(2)
|
366 |
|
367 |
results["gigachat_plus"] = generate_message_gigachat_plus_with_retry(standard_prompt)
|
368 |
gigachat_plus_length = len(results["gigachat_plus"])
|
369 |
gigachat_plus_display = f"{results['gigachat_plus']}\n\n------\nКоличество знаков: {gigachat_plus_length}"
|
370 |
+
yield results["prompt"], gpt4o_display, gigachat_pro_display, gigachat_lite_display, gigachat_plus_display, ""
|
371 |
|
372 |
time.sleep(2)
|
373 |
|
374 |
results["meta_llama_3_1_405b"] = generate_message_meta_llama_3_1_405b_with_retry(standard_prompt)
|
375 |
meta_llama_405b_length = len(results["meta_llama_3_1_405b"])
|
376 |
meta_llama_405b_display = f"{results['meta_llama_3_1_405b']}\n\n------\nКоличество знаков: {meta_llama_405b_length}"
|
377 |
+
yield results["prompt"], gpt4o_display, gigachat_pro_display, gigachat_lite_display, gigachat_plus_display, meta_llama_405b_display
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
378 |
|
379 |
+
time.sleep(2)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
380 |
|
381 |
return results
|
382 |
|
|
|
466 |
full_prompt = f"{personalization_prompt}\n\nТекст для адаптации:\n{standard_message}"
|
467 |
return generate_message_meta_llama_3_1_405b_with_retry(full_prompt)
|
468 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
469 |
# Updated function to include additional models in personalization
|
470 |
def personalize_messages_with_yield(
|
471 |
gpt4o_message,
|
472 |
gigachat_pro_message,
|
473 |
gigachat_lite_message,
|
474 |
gigachat_plus_message,
|
475 |
+
meta_llama_405b_message,
|
|
|
|
|
|
|
|
|
|
|
476 |
key_message,
|
477 |
approach,
|
478 |
*selected_values
|
|
|
506 |
meta_llama_405b_display = f"{personalized_message_meta_llama_405b}\n\n------\nКоличество знаков: {meta_llama_405b_length}"
|
507 |
yield personalization_prompt, gpt4o_display, gigachat_pro_display, gigachat_lite_display, gigachat_plus_display, meta_llama_405b_display, "", "", "", "", ""
|
508 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
509 |
|
510 |
# Функция для генерации промпта проверки текста
|
511 |
def generate_error_check_prompt():
|
|
|
552 |
return prompt
|
553 |
|
554 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
555 |
def save_to_github(personalized_message, model_name, comment, corrected_message, description, advantages, non_personalized_prompt, non_personalized_message, gender, generation, psychotype, business_stage, industry, legal_form, key_message, approach):
|
556 |
# Собираем все данные в один словарь
|
557 |
data_to_save = {
|
|
|
599 |
gigachat_lite_message,
|
600 |
gigachat_plus_message,
|
601 |
meta_llama_405b_message,
|
|
|
|
|
|
|
|
|
|
|
602 |
description,
|
603 |
advantages,
|
604 |
key_message, # Обратите внимание, что key_message здесь не передается в selected_values
|
|
|
612 |
gigachat_lite_message,
|
613 |
gigachat_plus_message,
|
614 |
meta_llama_405b_message,
|
|
|
|
|
|
|
|
|
|
|
615 |
key_message,
|
616 |
approach,
|
617 |
*selected_values # Только признаки для персонализации
|
|
|
657 |
|
658 |
def clear_unnecessary_fields():
|
659 |
return (
|
660 |
+
"", "", "", "", "", # personalized outputs and prompts
|
661 |
+
"", "", "", "", "", # comment fields
|
662 |
+
"", "", "", "", "", # corrected message fields
|
663 |
+
"", "", "", "", "", # оставшиеся поля
|
664 |
"", "" # Дополнительное пустое значение
|
665 |
)
|
666 |
|
|
|
777 |
personalized_output_text_gigachat_lite,
|
778 |
personalized_output_text_gigachat_plus,
|
779 |
personalized_output_text_meta_llama_405b,
|
|
|
|
|
|
|
|
|
|
|
780 |
comment_gpt4o,
|
781 |
corrected_gpt4o,
|
782 |
comment_gigachat_pro,
|
|
|
786 |
comment_gigachat_plus,
|
787 |
corrected_gigachat_plus,
|
788 |
comment_meta_llama_405b,
|
789 |
+
corrected_meta_llama_405b
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
790 |
]
|
791 |
)
|
792 |
|
|
|
799 |
output_text_gigachat_pro,
|
800 |
output_text_gigachat_lite,
|
801 |
output_text_gigachat_plus,
|
802 |
+
output_text_meta_llama_405b
|
|
|
|
|
|
|
|
|
|
|
803 |
]
|
804 |
)
|
805 |
|
|
|
813 |
personalized_output_text_gigachat_lite,
|
814 |
personalized_output_text_gigachat_plus,
|
815 |
personalized_output_text_meta_llama_405b,
|
|
|
|
|
|
|
|
|
|
|
816 |
comment_gpt4o,
|
817 |
corrected_gpt4o,
|
818 |
comment_gigachat_pro,
|
|
|
822 |
comment_gigachat_plus,
|
823 |
corrected_gigachat_plus,
|
824 |
comment_meta_llama_405b,
|
825 |
+
corrected_meta_llama_405b
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
826 |
]
|
827 |
)
|
828 |
|
|
|
833 |
output_text_gigachat_pro,
|
834 |
output_text_gigachat_lite,
|
835 |
output_text_gigachat_plus,
|
836 |
+
output_text_meta_llama_405b
|
|
|
|
|
|
|
|
|
|
|
837 |
description_input,
|
838 |
advantages_input,
|
839 |
key_message_input, # Add key_message_input
|
|
|
845 |
personalized_output_text_gigachat_pro,
|
846 |
personalized_output_text_gigachat_lite,
|
847 |
personalized_output_text_gigachat_plus,
|
848 |
+
personalized_output_text_meta_llama_405b
|
|
|
|
|
|
|
|
|
|
|
849 |
]
|
850 |
)
|
851 |
|
|
|
871 |
output_text_gigachat_lite,
|
872 |
output_text_gigachat_plus,
|
873 |
output_text_meta_llama_405b,
|
|
|
|
|
|
|
|
|
|
|
874 |
personalized_output_text_gpt4o,
|
875 |
personalized_output_text_gigachat_pro,
|
876 |
personalized_output_text_gigachat_lite,
|
877 |
personalized_output_text_gigachat_plus,
|
878 |
personalized_output_text_meta_llama_405b,
|
|
|
|
|
|
|
|
|
|
|
879 |
comment_gpt4o,
|
880 |
corrected_gpt4o,
|
881 |
comment_gigachat_pro,
|
|
|
885 |
comment_gigachat_plus,
|
886 |
corrected_gigachat_plus,
|
887 |
comment_meta_llama_405b,
|
888 |
+
corrected_meta_llama_405b
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
889 |
]
|
890 |
)
|
891 |
|
|
|
1060 |
outputs=[save_meta_llama_405b_btn]
|
1061 |
)
|
1062 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1063 |
|
1064 |
demo.launch()
|