llamaSMS / main.py
Tri4's picture
Create main.py (#1)
98d78ef verified
raw
history blame
599 Bytes
from flask import Flask, request
from twilio.twiml.messaging_response import MessagingResponse
sms_chain = LLMChain(
llm=Baseten(model="YOUR-MODEL-VERSION-ID"),
prompt=prompt,
memory=ConversationBufferWindowMemory(k=2),
llm_kwargs={"max_length": 4096}
)
app = Flask(__name__)
@app.route("/sms", methods=['GET', 'POST'])
def sms():
resp = MessagingResponse()
inb_msg = request.form['Body'].lower().strip()
output = sms_chain.predict(sms_input=inb_msg)
print(output)
resp.message(output)
return str(resp)
if __name__ == "__main__":
app.run(debug=True)