Create notebooks/demo Notebook
Browse files- notebooks/demo Notebook +67 -0
notebooks/demo Notebook
ADDED
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "markdown",
|
5 |
+
"metadata": {},
|
6 |
+
"source": [
|
7 |
+
"# Shasha Demo Notebook\n",
|
8 |
+
"This notebook shows how to programmatically invoke our AI inference pipeline, run sentiment analysis, and generate code examples."
|
9 |
+
]
|
10 |
+
},
|
11 |
+
{
|
12 |
+
"cell_type": "code",
|
13 |
+
"execution_count": null,
|
14 |
+
"metadata": {},
|
15 |
+
"outputs": [],
|
16 |
+
"source": [
|
17 |
+
"# 1. Setup inference client\n",
|
18 |
+
"from hf_client import get_inference_client\n",
|
19 |
+
"# Initialize client for Qwen3-32B (fallback on Groq if unavailable)\n",
|
20 |
+
"client = get_inference_client('Qwen/Qwen3-32B', provider='auto')\n",
|
21 |
+
"# Example chat completion request\n",
|
22 |
+
"resp = client.chat.completions.create(\n",
|
23 |
+
" model='Qwen/Qwen3-32B',\n",
|
24 |
+
" messages=[{'role':'user','content':'Write a Python function to reverse a string.'}]\n",
|
25 |
+
")\n",
|
26 |
+
"print(resp.choices[0].message.content)"
|
27 |
+
]
|
28 |
+
},
|
29 |
+
{
|
30 |
+
"cell_type": "code",
|
31 |
+
"execution_count": null,
|
32 |
+
"metadata": {},
|
33 |
+
"outputs": [],
|
34 |
+
"source": [
|
35 |
+
"# 2. Sentiment Analysis with Transformers.js (Python demo)\n",
|
36 |
+
"from transformers import pipeline\n",
|
37 |
+
"# Using OpenAI provider for sentiment\n",
|
38 |
+
"sentiment = pipeline('sentiment-analysis', model='openai/gpt-4', trust_remote_code=True)\n",
|
39 |
+
"print(sentiment('I love building AI-powered tools!'))"
|
40 |
+
]
|
41 |
+
},
|
42 |
+
{
|
43 |
+
"cell_type": "markdown",
|
44 |
+
"metadata": {},
|
45 |
+
"source": [
|
46 |
+
"---\n",
|
47 |
+
"## Next steps:\n",
|
48 |
+
"- Try different models (Gemini Pro, Fireworks AI) by changing the model= parameter.\n",
|
49 |
+
"- Explore custom plugins via plugins.py to integrate with Slack or GitHub.\n",
|
50 |
+
"- Use auth.py to load private files from Google Drive."
|
51 |
+
]
|
52 |
+
}
|
53 |
+
],
|
54 |
+
"metadata": {
|
55 |
+
"kernelspec": {
|
56 |
+
"display_name": "Python 3",
|
57 |
+
"language": "python",
|
58 |
+
"name": "python3"
|
59 |
+
},
|
60 |
+
"language_info": {
|
61 |
+
"name": "python",
|
62 |
+
"version": "3.x"
|
63 |
+
}
|
64 |
+
},
|
65 |
+
"nbformat": 4,
|
66 |
+
"nbformat_minor": 5
|
67 |
+
}
|