Spaces:
Running
Running
File size: 1,633 Bytes
7db0ae4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 |
import sys
import os
import io
sys.path.insert(0, os.path.abspath("../.."))
from litellm import completion
import litellm
litellm.success_callback = ["langsmith"]
litellm.set_verbose = True
import time
def test_langsmith_logging():
try:
response = completion(
model="claude-instant-1.2",
messages=[{"role": "user", "content": "what llm are u"}],
max_tokens=10,
temperature=0.2,
)
print(response)
except Exception as e:
print(e)
# test_langsmith_logging()
def test_langsmith_logging_with_metadata():
try:
response = completion(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "what llm are u"}],
max_tokens=10,
temperature=0.2,
metadata={
"run_name": "litellmRUN",
"project_name": "litellm-completion",
},
)
print(response)
except Exception as e:
print(e)
# test_langsmith_logging_with_metadata()
def test_langsmith_logging_with_streaming_and_metadata():
try:
response = completion(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "what llm are u"}],
max_tokens=10,
temperature=0.2,
metadata={
"run_name": "litellmRUN",
"project_name": "litellm-completion",
},
stream=True,
)
for chunk in response:
continue
except Exception as e:
print(e)
test_langsmith_logging_with_streaming_and_metadata()
|