Trudy commited on
Commit
7f2a14a
·
0 Parent(s):

Initial commit: Gemini Realtime Console

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .dockerignore +11 -0
  2. .gcloudignore +5 -0
  3. .gitattributes +13 -0
  4. .gitignore +34 -0
  5. CONTRIBUTING.md +33 -0
  6. Dockerfile +33 -0
  7. LICENSE +202 -0
  8. README.md +135 -0
  9. app.yaml +29 -0
  10. docker-compose.yml +14 -0
  11. package-lock.json +0 -0
  12. package.json +73 -0
  13. public/favicon.ico +3 -0
  14. public/index.html +49 -0
  15. public/robots.txt +3 -0
  16. readme/thumbnail.png +3 -0
  17. server/index.js +117 -0
  18. src/App.scss +171 -0
  19. src/App.test.tsx +25 -0
  20. src/App.tsx +80 -0
  21. src/components/altair/Altair.tsx +107 -0
  22. src/components/audio-pulse/AudioPulse.tsx +64 -0
  23. src/components/audio-pulse/audio-pulse.scss +51 -0
  24. src/components/control-tray/ControlTray.tsx +265 -0
  25. src/components/control-tray/control-tray.scss +211 -0
  26. src/components/ios-modal/IOSModal.scss +57 -0
  27. src/components/ios-modal/IOSModal.tsx +51 -0
  28. src/components/logger/Logger.tsx +273 -0
  29. src/components/logger/logger.scss +116 -0
  30. src/components/logger/mock-logs.ts +151 -0
  31. src/components/side-panel/SidePanel.tsx +185 -0
  32. src/components/side-panel/side-panel.scss +309 -0
  33. src/contexts/LiveAPIContext.tsx +46 -0
  34. src/hooks/use-live-api.ts +116 -0
  35. src/hooks/use-media-stream-mux.ts +23 -0
  36. src/hooks/use-screen-capture.ts +77 -0
  37. src/hooks/use-webcam.ts +123 -0
  38. src/index.css +13 -0
  39. src/index.tsx +35 -0
  40. src/lib/audio-recorder.ts +417 -0
  41. src/lib/audio-streamer.ts +270 -0
  42. src/lib/audioworklet-registry.ts +43 -0
  43. src/lib/multimodal-live-client.ts +313 -0
  44. src/lib/platform.ts +6 -0
  45. src/lib/store-logger.ts +65 -0
  46. src/lib/utils.ts +86 -0
  47. src/lib/worklets/audio-processing.ts +73 -0
  48. src/lib/worklets/safari-audio-processing.ts +99 -0
  49. src/lib/worklets/vol-meter.ts +65 -0
  50. src/multimodal-live-types.ts +242 -0
.dockerignore ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ node_modules
2
+ npm-debug.log
3
+ build
4
+ .git
5
+ .gitignore
6
+ README.md
7
+ .env
8
+ .env.local
9
+ .env.development.local
10
+ .env.test.local
11
+ .env.production.local
.gcloudignore ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ # Ignore everything except app.yaml and the build directory
2
+ *
3
+ !app.yaml
4
+ !build
5
+ !build/**
.gitattributes ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.png filter=lfs diff=lfs merge=lfs -text
2
+ *.jpg filter=lfs diff=lfs merge=lfs -text
3
+ *.jpeg filter=lfs diff=lfs merge=lfs -text
4
+ *.gif filter=lfs diff=lfs merge=lfs -text
5
+ *.ico filter=lfs diff=lfs merge=lfs -text
6
+ *.mov filter=lfs diff=lfs merge=lfs -text
7
+ *.mp4 filter=lfs diff=lfs merge=lfs -text
8
+ *.mp3 filter=lfs diff=lfs merge=lfs -text
9
+ *.wav filter=lfs diff=lfs merge=lfs -text
10
+ *.webm filter=lfs diff=lfs merge=lfs -text
11
+ *.woff2 filter=lfs diff=lfs merge=lfs -text
12
+ *.ttf filter=lfs diff=lfs merge=lfs -text
13
+ build/** filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2
+
3
+ # dependencies
4
+ /node_modules
5
+ /.pnp
6
+ .pnp.js
7
+
8
+ # testing
9
+ /coverage
10
+
11
+ # production
12
+ /build
13
+
14
+ # misc
15
+ .DS_Store
16
+ .env
17
+ .env.local
18
+ .env.development.local
19
+ .env.test.local
20
+ .env.production.local
21
+
22
+ npm-debug.log*
23
+ yarn-debug.log*
24
+ yarn-error.log*
25
+
26
+ # Environment variables
27
+ .env.*
28
+
29
+ # Server
30
+ /server/dist
31
+
32
+ # IDE
33
+ .idea/
34
+ .vscode/
CONTRIBUTING.md ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # How to contribute
2
+
3
+ We'd love to accept your patches and contributions to this project.
4
+
5
+ ## Before you begin
6
+
7
+ ### Sign our Contributor License Agreement
8
+
9
+ Contributions to this project must be accompanied by a
10
+ [Contributor License Agreement](https://cla.developers.google.com/about) (CLA).
11
+ You (or your employer) retain the copyright to your contribution; this simply
12
+ gives us permission to use and redistribute your contributions as part of the
13
+ project.
14
+
15
+ If you or your current employer have already signed the Google CLA (even if it
16
+ was for a different project), you probably don't need to do it again.
17
+
18
+ Visit <https://cla.developers.google.com/> to see your current agreements or to
19
+ sign a new one.
20
+
21
+ ### Review our community guidelines
22
+
23
+ This project follows
24
+ [Google's Open Source Community Guidelines](https://opensource.google/conduct/).
25
+
26
+ ## Contribution process
27
+
28
+ ### Code reviews
29
+
30
+ All submissions, including submissions by project members, require review. We
31
+ use GitHub pull requests for this purpose. Consult
32
+ [GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
33
+ information on using pull requests.
Dockerfile ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use the official Node.js image
2
+ FROM node:20
3
+
4
+ # Create and change to the app directory
5
+ WORKDIR /usr/src/app
6
+
7
+ # Copy application dependency manifests to the container image
8
+ COPY package*.json ./
9
+
10
+ # Install production dependencies
11
+ RUN npm install
12
+
13
+ # Copy application code and environment variables
14
+ COPY . .
15
+
16
+ # Build the React app
17
+ RUN npm run build
18
+
19
+ # Install serve to properly serve the static files
20
+ RUN npm install -g serve
21
+
22
+ # Expose the ports the app runs on
23
+ EXPOSE 3000
24
+ EXPOSE 3001
25
+
26
+ # Create a script to run both services
27
+ RUN echo '#!/bin/bash\n\
28
+ npm run start-server & \
29
+ serve -s build -l 3000\n\
30
+ wait' > start-services.sh && chmod +x start-services.sh
31
+
32
+ # Run both services
33
+ CMD ["./start-services.sh"]
LICENSE ADDED
@@ -0,0 +1,202 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ Apache License
3
+ Version 2.0, January 2004
4
+ http://www.apache.org/licenses/
5
+
6
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7
+
8
+ 1. Definitions.
9
+
10
+ "License" shall mean the terms and conditions for use, reproduction,
11
+ and distribution as defined by Sections 1 through 9 of this document.
12
+
13
+ "Licensor" shall mean the copyright owner or entity authorized by
14
+ the copyright owner that is granting the License.
15
+
16
+ "Legal Entity" shall mean the union of the acting entity and all
17
+ other entities that control, are controlled by, or are under common
18
+ control with that entity. For the purposes of this definition,
19
+ "control" means (i) the power, direct or indirect, to cause the
20
+ direction or management of such entity, whether by contract or
21
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
22
+ outstanding shares, or (iii) beneficial ownership of such entity.
23
+
24
+ "You" (or "Your") shall mean an individual or Legal Entity
25
+ exercising permissions granted by this License.
26
+
27
+ "Source" form shall mean the preferred form for making modifications,
28
+ including but not limited to software source code, documentation
29
+ source, and configuration files.
30
+
31
+ "Object" form shall mean any form resulting from mechanical
32
+ transformation or translation of a Source form, including but
33
+ not limited to compiled object code, generated documentation,
34
+ and conversions to other media types.
35
+
36
+ "Work" shall mean the work of authorship, whether in Source or
37
+ Object form, made available under the License, as indicated by a
38
+ copyright notice that is included in or attached to the work
39
+ (an example is provided in the Appendix below).
40
+
41
+ "Derivative Works" shall mean any work, whether in Source or Object
42
+ form, that is based on (or derived from) the Work and for which the
43
+ editorial revisions, annotations, elaborations, or other modifications
44
+ represent, as a whole, an original work of authorship. For the purposes
45
+ of this License, Derivative Works shall not include works that remain
46
+ separable from, or merely link (or bind by name) to the interfaces of,
47
+ the Work and Derivative Works thereof.
48
+
49
+ "Contribution" shall mean any work of authorship, including
50
+ the original version of the Work and any modifications or additions
51
+ to that Work or Derivative Works thereof, that is intentionally
52
+ submitted to Licensor for inclusion in the Work by the copyright owner
53
+ or by an individual or Legal Entity authorized to submit on behalf of
54
+ the copyright owner. For the purposes of this definition, "submitted"
55
+ means any form of electronic, verbal, or written communication sent
56
+ to the Licensor or its representatives, including but not limited to
57
+ communication on electronic mailing lists, source code control systems,
58
+ and issue tracking systems that are managed by, or on behalf of, the
59
+ Licensor for the purpose of discussing and improving the Work, but
60
+ excluding communication that is conspicuously marked or otherwise
61
+ designated in writing by the copyright owner as "Not a Contribution."
62
+
63
+ "Contributor" shall mean Licensor and any individual or Legal Entity
64
+ on behalf of whom a Contribution has been received by Licensor and
65
+ subsequently incorporated within the Work.
66
+
67
+ 2. Grant of Copyright License. Subject to the terms and conditions of
68
+ this License, each Contributor hereby grants to You a perpetual,
69
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70
+ copyright license to reproduce, prepare Derivative Works of,
71
+ publicly display, publicly perform, sublicense, and distribute the
72
+ Work and such Derivative Works in Source or Object form.
73
+
74
+ 3. Grant of Patent License. Subject to the terms and conditions of
75
+ this License, each Contributor hereby grants to You a perpetual,
76
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77
+ (except as stated in this section) patent license to make, have made,
78
+ use, offer to sell, sell, import, and otherwise transfer the Work,
79
+ where such license applies only to those patent claims licensable
80
+ by such Contributor that are necessarily infringed by their
81
+ Contribution(s) alone or by combination of their Contribution(s)
82
+ with the Work to which such Contribution(s) was submitted. If You
83
+ institute patent litigation against any entity (including a
84
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
85
+ or a Contribution incorporated within the Work constitutes direct
86
+ or contributory patent infringement, then any patent licenses
87
+ granted to You under this License for that Work shall terminate
88
+ as of the date such litigation is filed.
89
+
90
+ 4. Redistribution. You may reproduce and distribute copies of the
91
+ Work or Derivative Works thereof in any medium, with or without
92
+ modifications, and in Source or Object form, provided that You
93
+ meet the following conditions:
94
+
95
+ (a) You must give any other recipients of the Work or
96
+ Derivative Works a copy of this License; and
97
+
98
+ (b) You must cause any modified files to carry prominent notices
99
+ stating that You changed the files; and
100
+
101
+ (c) You must retain, in the Source form of any Derivative Works
102
+ that You distribute, all copyright, patent, trademark, and
103
+ attribution notices from the Source form of the Work,
104
+ excluding those notices that do not pertain to any part of
105
+ the Derivative Works; and
106
+
107
+ (d) If the Work includes a "NOTICE" text file as part of its
108
+ distribution, then any Derivative Works that You distribute must
109
+ include a readable copy of the attribution notices contained
110
+ within such NOTICE file, excluding those notices that do not
111
+ pertain to any part of the Derivative Works, in at least one
112
+ of the following places: within a NOTICE text file distributed
113
+ as part of the Derivative Works; within the Source form or
114
+ documentation, if provided along with the Derivative Works; or,
115
+ within a display generated by the Derivative Works, if and
116
+ wherever such third-party notices normally appear. The contents
117
+ of the NOTICE file are for informational purposes only and
118
+ do not modify the License. You may add Your own attribution
119
+ notices within Derivative Works that You distribute, alongside
120
+ or as an addendum to the NOTICE text from the Work, provided
121
+ that such additional attribution notices cannot be construed
122
+ as modifying the License.
123
+
124
+ You may add Your own copyright statement to Your modifications and
125
+ may provide additional or different license terms and conditions
126
+ for use, reproduction, or distribution of Your modifications, or
127
+ for any such Derivative Works as a whole, provided Your use,
128
+ reproduction, and distribution of the Work otherwise complies with
129
+ the conditions stated in this License.
130
+
131
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
132
+ any Contribution intentionally submitted for inclusion in the Work
133
+ by You to the Licensor shall be under the terms and conditions of
134
+ this License, without any additional terms or conditions.
135
+ Notwithstanding the above, nothing herein shall supersede or modify
136
+ the terms of any separate license agreement you may have executed
137
+ with Licensor regarding such Contributions.
138
+
139
+ 6. Trademarks. This License does not grant permission to use the trade
140
+ names, trademarks, service marks, or product names of the Licensor,
141
+ except as required for reasonable and customary use in describing the
142
+ origin of the Work and reproducing the content of the NOTICE file.
143
+
144
+ 7. Disclaimer of Warranty. Unless required by applicable law or
145
+ agreed to in writing, Licensor provides the Work (and each
146
+ Contributor provides its Contributions) on an "AS IS" BASIS,
147
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148
+ implied, including, without limitation, any warranties or conditions
149
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150
+ PARTICULAR PURPOSE. You are solely responsible for determining the
151
+ appropriateness of using or redistributing the Work and assume any
152
+ risks associated with Your exercise of permissions under this License.
153
+
154
+ 8. Limitation of Liability. In no event and under no legal theory,
155
+ whether in tort (including negligence), contract, or otherwise,
156
+ unless required by applicable law (such as deliberate and grossly
157
+ negligent acts) or agreed to in writing, shall any Contributor be
158
+ liable to You for damages, including any direct, indirect, special,
159
+ incidental, or consequential damages of any character arising as a
160
+ result of this License or out of the use or inability to use the
161
+ Work (including but not limited to damages for loss of goodwill,
162
+ work stoppage, computer failure or malfunction, or any and all
163
+ other commercial damages or losses), even if such Contributor
164
+ has been advised of the possibility of such damages.
165
+
166
+ 9. Accepting Warranty or Additional Liability. While redistributing
167
+ the Work or Derivative Works thereof, You may choose to offer,
168
+ and charge a fee for, acceptance of support, warranty, indemnity,
169
+ or other liability obligations and/or rights consistent with this
170
+ License. However, in accepting such obligations, You may act only
171
+ on Your own behalf and on Your sole responsibility, not on behalf
172
+ of any other Contributor, and only if You agree to indemnify,
173
+ defend, and hold each Contributor harmless for any liability
174
+ incurred by, or claims asserted against, such Contributor by reason
175
+ of your accepting any such warranty or additional liability.
176
+
177
+ END OF TERMS AND CONDITIONS
178
+
179
+ APPENDIX: How to apply the Apache License to your work.
180
+
181
+ To apply the Apache License to your work, attach the following
182
+ boilerplate notice, with the fields enclosed by brackets "[]"
183
+ replaced with your own identifying information. (Don't include
184
+ the brackets!) The text should be enclosed in the appropriate
185
+ comment syntax for the file format. We also recommend that a
186
+ file or class name and description of purpose be included on the
187
+ same "printed page" as the copyright notice for easier
188
+ identification within third-party archives.
189
+
190
+ Copyright [yyyy] [name of copyright owner]
191
+
192
+ Licensed under the Apache License, Version 2.0 (the "License");
193
+ you may not use this file except in compliance with the License.
194
+ You may obtain a copy of the License at
195
+
196
+ http://www.apache.org/licenses/LICENSE-2.0
197
+
198
+ Unless required by applicable law or agreed to in writing, software
199
+ distributed under the License is distributed on an "AS IS" BASIS,
200
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201
+ See the License for the specific language governing permissions and
202
+ limitations under the License.
README.md ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Gemini Realtime Console
3
+ emoji: 🎯
4
+ colorFrom: blue
5
+ colorTo: purple
6
+ sdk: docker
7
+ pinned: false
8
+ ---
9
+
10
+ # Multimodal Live API - Web console
11
+
12
+ This repository contains a react-based starter app for using the [Multimodal Live API](<[https://ai.google.dev/gemini-api](https://ai.google.dev/api/multimodal-live)>) over a websocket. It provides modules for streaming audio playback, recording user media such as from a microphone, webcam or screen capture as well as a unified log view to aid in development of your application.
13
+
14
+ [![Multimodal Live API Demo](readme/thumbnail.png)](https://www.youtube.com/watch?v=J_q7JY1XxFE)
15
+
16
+ Watch the demo of the Multimodal Live API [here](https://www.youtube.com/watch?v=J_q7JY1XxFE).
17
+
18
+ ## Usage
19
+
20
+ To get started, [create a free Gemini API key](https://aistudio.google.com/apikey) and add it to the `.env` file. Then:
21
+
22
+ ```
23
+ $ npm install && npm start
24
+ ```
25
+
26
+ We have provided several example applications on other branches of this repository:
27
+
28
+ - [demos/GenExplainer](https://github.com/google-gemini/multimodal-live-api-web-console/tree/demos/genexplainer)
29
+ - [demos/GenWeather](https://github.com/google-gemini/multimodal-live-api-web-console/tree/demos/genweather)
30
+ - [demos/GenList](https://github.com/google-gemini/multimodal-live-api-web-console/tree/demos/genlist)
31
+
32
+ ## Example
33
+
34
+ Below is an example of an entire application that will use Google Search grounding and then render graphs using [vega-embed](https://github.com/vega/vega-embed):
35
+
36
+ ```typescript
37
+ import { type FunctionDeclaration, SchemaType } from "@google/generative-ai";
38
+ import { useEffect, useRef, useState, memo } from "react";
39
+ import vegaEmbed from "vega-embed";
40
+ import { useLiveAPIContext } from "../../contexts/LiveAPIContext";
41
+
42
+ export const declaration: FunctionDeclaration = {
43
+ name: "render_altair",
44
+ description: "Displays an altair graph in json format.",
45
+ parameters: {
46
+ type: SchemaType.OBJECT,
47
+ properties: {
48
+ json_graph: {
49
+ type: SchemaType.STRING,
50
+ description:
51
+ "JSON STRING representation of the graph to render. Must be a string, not a json object",
52
+ },
53
+ },
54
+ required: ["json_graph"],
55
+ },
56
+ };
57
+
58
+ export function Altair() {
59
+ const [jsonString, setJSONString] = useState<string>("");
60
+ const { client, setConfig } = useLiveAPIContext();
61
+
62
+ useEffect(() => {
63
+ setConfig({
64
+ model: "models/gemini-2.0-flash-exp",
65
+ systemInstruction: {
66
+ parts: [
67
+ {
68
+ text: 'You are my helpful assistant. Any time I ask you for a graph call the "render_altair" function I have provided you. Dont ask for additional information just make your best judgement.',
69
+ },
70
+ ],
71
+ },
72
+ tools: [{ googleSearch: {} }, { functionDeclarations: [declaration] }],
73
+ });
74
+ }, [setConfig]);
75
+
76
+ useEffect(() => {
77
+ const onToolCall = (toolCall: ToolCall) => {
78
+ console.log(`got toolcall`, toolCall);
79
+ const fc = toolCall.functionCalls.find(
80
+ (fc) => fc.name === declaration.name
81
+ );
82
+ if (fc) {
83
+ const str = (fc.args as any).json_graph;
84
+ setJSONString(str);
85
+ }
86
+ };
87
+ client.on("toolcall", onToolCall);
88
+ return () => {
89
+ client.off("toolcall", onToolCall);
90
+ };
91
+ }, [client]);
92
+
93
+ const embedRef = useRef<HTMLDivElement>(null);
94
+
95
+ useEffect(() => {
96
+ if (embedRef.current && jsonString) {
97
+ vegaEmbed(embedRef.current, JSON.parse(jsonString));
98
+ }
99
+ }, [embedRef, jsonString]);
100
+ return <div className="vega-embed" ref={embedRef} />;
101
+ }
102
+ ```
103
+
104
+ ## development
105
+
106
+ This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app).
107
+ Project consists of:
108
+
109
+ - an Event-emitting websocket-client to ease communication between the websocket and the front-end
110
+ - communication layer for processing audio in and out
111
+ - a boilerplate view for starting to build your apps and view logs
112
+
113
+ ## Available Scripts
114
+
115
+ In the project directory, you can run:
116
+
117
+ ### `npm start`
118
+
119
+ Runs the app in the development mode.\
120
+ Open [http://localhost:3000](http://localhost:3000) to view it in the browser.
121
+
122
+ The page will reload if you make edits.\
123
+ You will also see any lint errors in the console.
124
+
125
+ ### `npm run build`
126
+
127
+ Builds the app for production to the `build` folder.\
128
+ It correctly bundles React in production mode and optimizes the build for the best performance.
129
+
130
+ The build is minified and the filenames include the hashes.\
131
+ Your app is ready to be deployed!
132
+
133
+ See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information.
134
+
135
+ _This is an experiment showcasing the Multimodal Live API, not an official Google product. We'll do our best to support and maintain this experiment but your mileage may vary. We encourage open sourcing projects as a way of learning from each other. Please respect our and other creators' rights, including copyright and trademark rights when present, when sharing these works and creating derivative work. If you want more info on Google's policy, you can find that [here](https://developers.google.com/terms/site-policies)._
app.yaml ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2024 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ runtime: nodejs20
16
+ env: standard
17
+
18
+ handlers:
19
+ # serve static files
20
+ - url: /(.*\..+)$
21
+ static_files: build/\1
22
+ upload: build/(.*\..+)$
23
+
24
+ # Catch all handler to index.html
25
+ - url: /.*
26
+ static_files: build/index.html
27
+ secure: always
28
+ redirect_http_response_code: 301
29
+ upload: buid/index.html
docker-compose.yml ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ version: '3.8'
2
+
3
+ services:
4
+ web:
5
+ build: .
6
+ ports:
7
+ - "3000:3000" # React frontend
8
+ - "3001:3001" # Backend server
9
+ environment:
10
+ - NODE_ENV=production
11
+ - GEMINI_API_KEY=${GEMINI_API_KEY}
12
+ volumes:
13
+ - .:/usr/src/app
14
+ - /usr/src/app/node_modules
package-lock.json ADDED
The diff for this file is too large to render. See raw diff
 
package.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "multimodal-live-api-web-console",
3
+ "version": "0.1.0",
4
+ "dependencies": {
5
+ "classnames": "^2.5.1",
6
+ "dotenv-flow": "^4.1.0",
7
+ "eventemitter3": "^5.0.1",
8
+ "lodash": "^4.17.21",
9
+ "react": "^18.3.1",
10
+ "react-dom": "^18.3.1",
11
+ "react-icons": "^5.3.0",
12
+ "react-scripts": "5.0.1",
13
+ "react-select": "^5.8.3",
14
+ "react-syntax-highlighter": "^15.6.1",
15
+ "sass": "^1.80.6",
16
+ "vega": "^5.30.0",
17
+ "vega-embed": "^6.29.0",
18
+ "vega-lite": "^5.22.0",
19
+ "web-vitals": "^2.1.4",
20
+ "zustand": "^5.0.1",
21
+ "express": "^4.18.2",
22
+ "ws": "^8.16.0",
23
+ "dotenv": "^16.4.1",
24
+ "websocket": "^1.0.34"
25
+ },
26
+ "scripts": {
27
+ "start-https": "HTTPS=true react-scripts start",
28
+ "start": "react-scripts start",
29
+ "build": "react-scripts build",
30
+ "test": "react-scripts test",
31
+ "eject": "react-scripts eject",
32
+ "startone": "PORT=3001 react-scripts start",
33
+ "start-server": "node server/index.js",
34
+ "start-network": "./scripts/start-network.sh"
35
+ },
36
+ "eslintConfig": {
37
+ "extends": [
38
+ "react-app",
39
+ "react-app/jest"
40
+ ]
41
+ },
42
+ "browserslist": {
43
+ "production": [
44
+ ">0.2%",
45
+ "not dead",
46
+ "not op_mini all"
47
+ ],
48
+ "development": [
49
+ "last 1 chrome version",
50
+ "last 1 firefox version",
51
+ "last 1 safari version"
52
+ ]
53
+ },
54
+ "devDependencies": {
55
+ "@google/generative-ai": "^0.21.0",
56
+ "@testing-library/jest-dom": "^5.17.0",
57
+ "@testing-library/react": "^13.4.0",
58
+ "@testing-library/user-event": "^13.5.0",
59
+ "@types/jest": "^27.5.2",
60
+ "@types/lodash": "^4.17.13",
61
+ "@types/node": "^16.18.119",
62
+ "@types/react": "^18.3.12",
63
+ "@types/react-dom": "^18.3.1",
64
+ "@types/react-syntax-highlighter": "^15.5.13",
65
+ "ts-node": "^10.9.2",
66
+ "typescript": "^5.6.3",
67
+ "@types/express": "^4.17.21",
68
+ "@types/ws": "^8.5.10"
69
+ },
70
+ "overrides": {
71
+ "typescript": "^5.6.3"
72
+ }
73
+ }
public/favicon.ico ADDED

Git LFS Details

  • SHA256: 158f684cdd35473689da5675ac28cd5ec11f4f58166490377cfc7440c0653025
  • Pointer size: 130 Bytes
  • Size of remote file: 15.1 kB
public/index.html ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!doctype html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="utf-8" />
5
+ <link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
6
+ <meta name="viewport" content="width=device-width, initial-scale=1" />
7
+ <meta name="theme-color" content="#000000" />
8
+ <meta
9
+ name="description"
10
+ content="Web site created using create-react-app"
11
+ />
12
+ <link rel="preconnect" href="https://fonts.googleapis.com" />
13
+ <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
14
+ <link
15
+ href="https://fonts.googleapis.com/css2?family=Space+Mono:ital,wght@0,400;0,700;1,400;1,700&display=swap"
16
+ rel="stylesheet"
17
+ />
18
+ <link
19
+ href="https://fonts.googleapis.com/css2?family=Material+Symbols+Outlined:opsz,wght,FILL,[email protected],100..700,0..1,-50..200&display=block"
20
+ rel="stylesheet"
21
+ />
22
+ <link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" />
23
+ <!--
24
+ Notice the use of %PUBLIC_URL% in the tags above.
25
+ It will be replaced with the URL of the `public` folder during the build.
26
+ Only files inside the `public` folder can be referenced from the HTML.
27
+
28
+ Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
29
+ work correctly both with client-side routing and a non-root public URL.
30
+ Learn how to configure a non-root public URL by running `npm run build`.
31
+ -->
32
+ <title>Multimodal Live - Console</title>
33
+ </head>
34
+
35
+ <body>
36
+ <noscript>You need to enable JavaScript to run this app.</noscript>
37
+ <div id="root"></div>
38
+ <!--
39
+ This HTML file is a template.
40
+ If you open it directly in the browser, you will see an empty page.
41
+
42
+ You can add webfonts, meta tags, or analytics to this file.
43
+ The build step will place the bundled scripts into the <body> tag.
44
+
45
+ To begin the development, run `npm start` or `yarn start`.
46
+ To create a production bundle, use `npm run build` or `yarn build`.
47
+ -->
48
+ </body>
49
+ </html>
public/robots.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ # https://www.robotstxt.org/robotstxt.html
2
+ User-agent: *
3
+ Disallow:
readme/thumbnail.png ADDED

Git LFS Details

  • SHA256: c212e082f1aef2706e732425a05cf24f4ff276fab6366a216b823ec78e584cf9
  • Pointer size: 132 Bytes
  • Size of remote file: 1.43 MB
server/index.js ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const express = require('express');
2
+ const path = require('node:path');
3
+ const { WebSocketServer, WebSocket } = require('ws');
4
+ const http = require('node:http');
5
+ require('dotenv').config();
6
+
7
+ const app = express();
8
+ const server = http.createServer(app);
9
+ const wss = new WebSocketServer({ server });
10
+
11
+ // Serve static files from the React app build directory
12
+ app.use(express.static(path.join(__dirname, '../build')));
13
+
14
+ // Make API key available to the WebSocket server but not to the client
15
+ const GEMINI_API_KEY = process.env.GEMINI_API_KEY;
16
+
17
+ if (!GEMINI_API_KEY) {
18
+ console.error('GEMINI_API_KEY environment variable is not set!');
19
+ process.exit(1);
20
+ }
21
+
22
+ // Create a WebSocket connection to Gemini for each client
23
+ const createGeminiWebSocket = (clientWs) => {
24
+ const geminiWs = new WebSocket(
25
+ `wss://generativelanguage.googleapis.com/ws/google.ai.generativelanguage.v1alpha.GenerativeService.BidiGenerateContent?key=${GEMINI_API_KEY}`
26
+ );
27
+
28
+ // Set up event handlers before connecting
29
+ geminiWs.on('open', () => {
30
+ console.log('Connected to Gemini API');
31
+ // If there's a pending setup message, send it now
32
+ if (geminiWs.pendingSetup) {
33
+ console.log('Sending pending setup:', geminiWs.pendingSetup);
34
+ geminiWs.send(JSON.stringify(geminiWs.pendingSetup));
35
+ geminiWs.pendingSetup = null;
36
+ }
37
+ });
38
+
39
+ geminiWs.on('message', (data) => {
40
+ try {
41
+ // Convert the message to a Blob before sending to client
42
+ const message = data.toString();
43
+ console.log('Received from Gemini:', message);
44
+
45
+ // Create a Blob from the message
46
+ const blob = Buffer.from(message);
47
+ clientWs.send(blob, { binary: true });
48
+ } catch (error) {
49
+ console.error('Error handling Gemini message:', error);
50
+ }
51
+ });
52
+
53
+ geminiWs.on('error', (error) => {
54
+ console.error('Gemini WebSocket error:', error);
55
+ });
56
+
57
+ geminiWs.on('close', (code, reason) => {
58
+ console.log('Gemini WebSocket closed:', code, reason.toString());
59
+ });
60
+
61
+ return geminiWs;
62
+ };
63
+
64
+ wss.on('connection', (ws) => {
65
+ console.log('Client connected');
66
+ let geminiWs = null;
67
+
68
+ ws.on('message', async (message) => {
69
+ try {
70
+ const data = JSON.parse(message);
71
+ console.log('Received from client:', data);
72
+
73
+ // Initialize Gemini connection when receiving setup message
74
+ if (data.setup) {
75
+ console.log('Initializing Gemini connection with config:', data.setup);
76
+ geminiWs = createGeminiWebSocket(ws);
77
+
78
+ // Store setup message to send once connection is established
79
+ if (geminiWs.readyState !== WebSocket.OPEN) {
80
+ geminiWs.pendingSetup = data;
81
+ } else {
82
+ geminiWs.send(JSON.stringify(data));
83
+ }
84
+ return;
85
+ }
86
+
87
+ // Forward message to Gemini if connection exists
88
+ if (geminiWs && geminiWs.readyState === WebSocket.OPEN) {
89
+ console.log('Forwarding to Gemini:', data);
90
+ geminiWs.send(JSON.stringify(data));
91
+ } else if (geminiWs) {
92
+ console.log('Waiting for Gemini connection to be ready...');
93
+ } else {
94
+ console.error('No Gemini connection established');
95
+ }
96
+ } catch (error) {
97
+ console.error('Error processing message:', error);
98
+ }
99
+ });
100
+
101
+ ws.on('close', () => {
102
+ console.log('Client disconnected');
103
+ if (geminiWs) {
104
+ geminiWs.close();
105
+ }
106
+ });
107
+ });
108
+
109
+ // Handle any remaining requests by returning the React app
110
+ app.get('*', (req, res) => {
111
+ res.sendFile(path.join(__dirname, '../build', 'index.html'));
112
+ });
113
+
114
+ const PORT = process.env.PORT || 3001;
115
+ server.listen(PORT, () => {
116
+ console.log(`Server is running on port ${PORT}`);
117
+ });
src/App.scss ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ :root {
2
+ --text: white;
3
+ --gray-200: #b4b8bb;
4
+ --gray-300: #80868b;
5
+ --gray-500: #5f6368;
6
+ --gray-600: #444444;
7
+ --gray-700: #202020;
8
+ --gray-800: #171717;
9
+ --gray-900: #111111;
10
+ --gray-1000: #0a0a0a;
11
+ --border-stroke: #444444;
12
+ --accent-blue: rgb(161, 228, 242);
13
+ --accent-blue-active-bg: #001233;
14
+ --accent-blue-active: #98beff;
15
+ --accent-blue-headers: #448dff;
16
+ --accent-green: rgb(168, 218, 181);
17
+
18
+ --midnight-blue: rgb(0, 18, 51);
19
+ --blue-30: #99beff;
20
+
21
+ --accent-red: #ff4600;
22
+
23
+ --background: var(--gray-900);
24
+ --color: var(--text);
25
+
26
+ scrollbar-color: var(--gray-600) var(--gray-900);
27
+ scrollbar-width: thin;
28
+
29
+ --font-family: "Space Mono", monospace;
30
+
31
+ /* */
32
+ --Neutral-00: #000;
33
+ --Neutral-5: #181a1b;
34
+ --Neutral-10: #1c1f21;
35
+ --Neutral-15: #232729;
36
+ --Neutral-20: #2a2f31;
37
+ --Neutral-30: #404547;
38
+ --Neutral-50: #707577;
39
+ --Neutral-60: #888d8f;
40
+ --Neutral-80: #c3c6c7;
41
+ --Neutral-90: #e1e2e3;
42
+
43
+ --Green-500: #0d9c53;
44
+ --Green-700: #025022;
45
+
46
+ --Blue-500: #1f94ff;
47
+ --Blue-800: #0f3557;
48
+
49
+ --Red-400: #ff9c7a;
50
+ --Red-500: #ff4600;
51
+ --Red-600: #e03c00;
52
+ --Red-700: #bd3000;
53
+ }
54
+
55
+ body {
56
+ font-family: "Space Mono", monospace;
57
+ background: var(--Neutral-30);
58
+ }
59
+
60
+ .material-symbols-outlined {
61
+ &.filled {
62
+ font-variation-settings:
63
+ "FILL" 1,
64
+ "wght" 400,
65
+ "GRAD" 0,
66
+ "opsz" 24;
67
+ }
68
+ }
69
+
70
+ .space-mono-regular {
71
+ font-family: "Space Mono", monospace;
72
+ font-weight: 400;
73
+ font-style: normal;
74
+ }
75
+
76
+ .space-mono-bold {
77
+ font-family: "Space Mono", monospace;
78
+ font-weight: 700;
79
+ font-style: normal;
80
+ }
81
+
82
+ .space-mono-regular-italic {
83
+ font-family: "Space Mono", monospace;
84
+ font-weight: 400;
85
+ font-style: italic;
86
+ }
87
+
88
+ .space-mono-bold-italic {
89
+ font-family: "Space Mono", monospace;
90
+ font-weight: 700;
91
+ font-style: italic;
92
+ }
93
+
94
+ .hidden {
95
+ display: none;
96
+ }
97
+
98
+ .flex {
99
+ display: flex;
100
+ }
101
+
102
+ .h-screen-full {
103
+ height: 100vh;
104
+ }
105
+
106
+ .w-screen-full {
107
+ width: 100vw;
108
+ }
109
+
110
+ .flex-col {
111
+ flex-direction: column;
112
+ }
113
+
114
+ @media (prefers-reduced-motion: no-preference) {}
115
+
116
+ .streaming-console {
117
+ background: var(--Neutral-5);
118
+ color: var(--gray-300);
119
+ display: flex;
120
+ height: 100vh;
121
+ width: 100vw;
122
+
123
+ a,
124
+ a:visited,
125
+ a:active {
126
+ color: var(--gray-300);
127
+ }
128
+
129
+ .disabled {
130
+ pointer-events: none;
131
+
132
+ >* {
133
+ pointer-events: none;
134
+ }
135
+ }
136
+
137
+ main {
138
+ position: relative;
139
+ display: flex;
140
+ flex-direction: column;
141
+ align-items: center;
142
+ justify-content: center;
143
+ flex-grow: 1;
144
+ gap: 1rem;
145
+ max-width: 100%;
146
+ overflow: hidden;
147
+ }
148
+
149
+ .main-app-area {
150
+ display: flex;
151
+ flex: 1;
152
+ align-items: center;
153
+ justify-content: center;
154
+ }
155
+
156
+ .function-call {
157
+ position: absolute;
158
+ top: 0;
159
+ width: 100%;
160
+ height: 50%;
161
+ overflow-y: auto;
162
+ }
163
+ }
164
+
165
+ /* video player */
166
+ .stream {
167
+ flex-grow: 1;
168
+ max-width: 90%;
169
+ border-radius: 32px;
170
+ max-height: fit-content;
171
+ }
src/App.test.tsx ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import React from 'react';
18
+ import { render, screen } from '@testing-library/react';
19
+ import App from './App';
20
+
21
+ test('renders learn react link', () => {
22
+ render(<App />);
23
+ const linkElement = screen.getByText(/learn react/i);
24
+ expect(linkElement).toBeInTheDocument();
25
+ });
src/App.tsx ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import { useEffect, useRef, useState } from "react";
18
+ import "./App.scss";
19
+ import { LiveAPIProvider } from "./contexts/LiveAPIContext";
20
+ import SidePanel from "./components/side-panel/SidePanel";
21
+ import { Altair } from "./components/altair/Altair";
22
+ import ControlTray from "./components/control-tray/ControlTray";
23
+ import { IOSModal } from "./components/ios-modal/IOSModal";
24
+ import { isIOS } from "./lib/platform";
25
+ import cn from "classnames";
26
+
27
+ function App() {
28
+ // this video reference is used for displaying the active stream, whether that is the webcam or screen capture
29
+ // feel free to style as you see fit
30
+ const videoRef = useRef<HTMLVideoElement>(null);
31
+ // either the screen capture, the video or null, if null we hide it
32
+ const [videoStream, setVideoStream] = useState<MediaStream | null>(null);
33
+ const [showIOSModal, setShowIOSModal] = useState(false);
34
+
35
+ useEffect(() => {
36
+ // Only show the modal on iOS devices
37
+ if (isIOS()) {
38
+ setShowIOSModal(true);
39
+ }
40
+ }, []);
41
+
42
+ return (
43
+ <div className="App">
44
+ <LiveAPIProvider>
45
+ <div className="streaming-console">
46
+ <SidePanel />
47
+ <main>
48
+ <div className="main-app-area">
49
+ {/* APP goes here */}
50
+ <Altair />
51
+ <video
52
+ className={cn("stream", {
53
+ hidden: !videoRef.current || !videoStream,
54
+ })}
55
+ ref={videoRef}
56
+ autoPlay
57
+ playsInline
58
+ />
59
+ </div>
60
+
61
+ <ControlTray
62
+ videoRef={videoRef}
63
+ supportsVideo={true}
64
+ onVideoStreamChange={setVideoStream}
65
+ >
66
+ {/* put your own buttons here */}
67
+ </ControlTray>
68
+ </main>
69
+ </div>
70
+ </LiveAPIProvider>
71
+
72
+ <IOSModal
73
+ isOpen={showIOSModal}
74
+ onClose={() => setShowIOSModal(false)}
75
+ />
76
+ </div>
77
+ );
78
+ }
79
+
80
+ export default App;
src/components/altair/Altair.tsx ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+ import { type FunctionDeclaration, SchemaType } from "@google/generative-ai";
17
+ import { useEffect, useRef, useState, memo } from "react";
18
+ import vegaEmbed from "vega-embed";
19
+ import { useLiveAPIContext } from "../../contexts/LiveAPIContext";
20
+ import { ToolCall } from "../../multimodal-live-types";
21
+
22
+ const declaration: FunctionDeclaration = {
23
+ name: "render_altair",
24
+ description: "Displays an altair graph in json format.",
25
+ parameters: {
26
+ type: SchemaType.OBJECT,
27
+ properties: {
28
+ json_graph: {
29
+ type: SchemaType.STRING,
30
+ description:
31
+ "JSON STRING representation of the graph to render. Must be a string, not a json object",
32
+ },
33
+ },
34
+ required: ["json_graph"],
35
+ },
36
+ };
37
+
38
+ function AltairComponent() {
39
+ const [jsonString, setJSONString] = useState<string>("");
40
+ const { client, setConfig } = useLiveAPIContext();
41
+
42
+ useEffect(() => {
43
+ setConfig({
44
+ model: "models/gemini-2.0-flash-exp",
45
+ generationConfig: {
46
+ responseModalities: "audio",
47
+ speechConfig: {
48
+ voiceConfig: { prebuiltVoiceConfig: { voiceName: "Aoede" } },
49
+ },
50
+ },
51
+ systemInstruction: {
52
+ parts: [
53
+ {
54
+ text: 'You are my helpful assistant. Any time I ask you for a graph call the "render_altair" function I have provided you. Dont ask for additional information just make your best judgement.',
55
+ },
56
+ ],
57
+ },
58
+ tools: [
59
+ // there is a free-tier quota for search
60
+ { googleSearch: {} },
61
+ { functionDeclarations: [declaration] },
62
+ ],
63
+ });
64
+ }, [setConfig]);
65
+
66
+ useEffect(() => {
67
+ const onToolCall = (toolCall: ToolCall) => {
68
+ console.log(`got toolcall`, toolCall);
69
+ const fc = toolCall.functionCalls.find(
70
+ (fc) => fc.name === declaration.name,
71
+ );
72
+ if (fc) {
73
+ const str = (fc.args as any).json_graph;
74
+ setJSONString(str);
75
+ }
76
+ // send data for the response of your tool call
77
+ // in this case Im just saying it was successful
78
+ if (toolCall.functionCalls.length) {
79
+ setTimeout(
80
+ () =>
81
+ client.sendToolResponse({
82
+ functionResponses: toolCall.functionCalls.map((fc) => ({
83
+ response: { output: { success: true } },
84
+ id: fc.id,
85
+ })),
86
+ }),
87
+ 200,
88
+ );
89
+ }
90
+ };
91
+ client.on("toolcall", onToolCall);
92
+ return () => {
93
+ client.off("toolcall", onToolCall);
94
+ };
95
+ }, [client]);
96
+
97
+ const embedRef = useRef<HTMLDivElement>(null);
98
+
99
+ useEffect(() => {
100
+ if (embedRef.current && jsonString) {
101
+ vegaEmbed(embedRef.current, JSON.parse(jsonString));
102
+ }
103
+ }, [embedRef, jsonString]);
104
+ return <div className="vega-embed" ref={embedRef} />;
105
+ }
106
+
107
+ export const Altair = memo(AltairComponent);
src/components/audio-pulse/AudioPulse.tsx ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import "./audio-pulse.scss";
18
+ import React from "react";
19
+ import { useEffect, useRef } from "react";
20
+ import c from "classnames";
21
+
22
+ const lineCount = 3;
23
+
24
+ export type AudioPulseProps = {
25
+ active: boolean;
26
+ volume: number;
27
+ hover?: boolean;
28
+ };
29
+
30
+ export default function AudioPulse({ active, volume, hover }: AudioPulseProps) {
31
+ const lines = useRef<HTMLDivElement[]>([]);
32
+
33
+ useEffect(() => {
34
+ let timeout: number | null = null;
35
+ const update = () => {
36
+ lines.current.forEach(
37
+ (line, i) =>
38
+ (line.style.height = `${Math.min(
39
+ 24,
40
+ 4 + volume * (i === 1 ? 400 : 60),
41
+ )}px`),
42
+ );
43
+ timeout = window.setTimeout(update, 100);
44
+ };
45
+
46
+ update();
47
+
48
+ return () => clearTimeout((timeout as number)!);
49
+ }, [volume]);
50
+
51
+ return (
52
+ <div className={c("audioPulse", { active, hover })}>
53
+ {Array(lineCount)
54
+ .fill(null)
55
+ .map((_, i) => (
56
+ <div
57
+ key={i}
58
+ ref={(el) => (lines.current[i] = el!)}
59
+ style={{ animationDelay: `${i * 133}ms` }}
60
+ />
61
+ ))}
62
+ </div>
63
+ );
64
+ }
src/components/audio-pulse/audio-pulse.scss ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .audioPulse {
2
+ display: flex;
3
+ width: 24px;
4
+ justify-content: space-evenly;
5
+ align-items: center;
6
+ transition: all 0.5s;
7
+
8
+ & > div {
9
+ background-color: var(--Neutral-30);
10
+ border-radius: 1000px;
11
+ width: 4px;
12
+ min-height: 4px;
13
+ border-radius: 1000px;
14
+ transition: height 0.1s;
15
+ }
16
+
17
+ &.hover > div {
18
+ animation: hover 1.4s infinite alternate ease-in-out;
19
+ }
20
+
21
+ height: 4px;
22
+ transition: opacity 0.333s;
23
+
24
+ &.active {
25
+ opacity: 1;
26
+
27
+ & > div {
28
+ background-color: var(--Neutral-80);
29
+ }
30
+ }
31
+ }
32
+
33
+ @keyframes hover {
34
+ from {
35
+ transform: translateY(0);
36
+ }
37
+
38
+ to {
39
+ transform: translateY(-3.5px);
40
+ }
41
+ }
42
+
43
+ @keyframes pulse {
44
+ from {
45
+ scale: 1 1;
46
+ }
47
+
48
+ to {
49
+ scale: 1.2 1.2;
50
+ }
51
+ }
src/components/control-tray/ControlTray.tsx ADDED
@@ -0,0 +1,265 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import cn from "classnames";
18
+
19
+ import { memo, ReactNode, RefObject, useEffect, useRef, useState } from "react";
20
+ import { useLiveAPIContext } from "../../contexts/LiveAPIContext";
21
+ import { UseMediaStreamResult } from "../../hooks/use-media-stream-mux";
22
+ import { useScreenCapture } from "../../hooks/use-screen-capture";
23
+ import { useWebcam } from "../../hooks/use-webcam";
24
+ import { AudioRecorder } from "../../lib/audio-recorder";
25
+ import { audioContext } from "../../lib/utils";
26
+ import { isIOS } from "../../lib/platform";
27
+ import AudioPulse from "../audio-pulse/AudioPulse";
28
+ import "./control-tray.scss";
29
+
30
+ export type ControlTrayProps = {
31
+ videoRef: RefObject<HTMLVideoElement>;
32
+ children?: ReactNode;
33
+ supportsVideo: boolean;
34
+ onVideoStreamChange?: (stream: MediaStream | null) => void;
35
+ };
36
+
37
+ type MediaStreamButtonProps = {
38
+ isStreaming: boolean;
39
+ onIcon: string;
40
+ offIcon: string;
41
+ start: () => Promise<any>;
42
+ stop: () => any;
43
+ };
44
+
45
+ /**
46
+ * button used for triggering webcam or screen-capture
47
+ */
48
+ const MediaStreamButton = memo(
49
+ ({ isStreaming, onIcon, offIcon, start, stop }: MediaStreamButtonProps) =>
50
+ isStreaming ? (
51
+ <button className="action-button" onClick={stop}>
52
+ <span className="material-symbols-outlined">{onIcon}</span>
53
+ </button>
54
+ ) : (
55
+ <button className="action-button" onClick={start}>
56
+ <span className="material-symbols-outlined">{offIcon}</span>
57
+ </button>
58
+ ),
59
+ );
60
+
61
+ function ControlTray({
62
+ videoRef,
63
+ children,
64
+ onVideoStreamChange = () => {},
65
+ supportsVideo,
66
+ }: ControlTrayProps) {
67
+ const videoStreams = [useWebcam(), useScreenCapture()];
68
+ const [activeVideoStream, setActiveVideoStream] =
69
+ useState<MediaStream | null>(null);
70
+ const [webcam, screenCapture] = videoStreams;
71
+ const [inVolume, setInVolume] = useState(0);
72
+ const [audioRecorder] = useState(() => new AudioRecorder());
73
+ const [muted, setMuted] = useState(false);
74
+ const renderCanvasRef = useRef<HTMLCanvasElement>(null);
75
+ const connectButtonRef = useRef<HTMLButtonElement>(null);
76
+ const [simulatedVolume, setSimulatedVolume] = useState(0);
77
+ const isIOSDevice = isIOS();
78
+
79
+ const { client, connected, connect, disconnect, volume } =
80
+ useLiveAPIContext();
81
+
82
+ // Add iOS detection
83
+ const isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
84
+
85
+ useEffect(() => {
86
+ if (!connected && connectButtonRef.current) {
87
+ connectButtonRef.current.focus();
88
+ }
89
+ }, [connected]);
90
+
91
+ // Add iOS volume simulation effect
92
+ useEffect(() => {
93
+ if (isIOSDevice && connected && !muted) {
94
+ const interval = setInterval(() => {
95
+ // Create a smooth pulsing effect
96
+ const pulse = (Math.sin(Date.now() / 500) + 1) / 2; // Values between 0 and 1
97
+ setSimulatedVolume(0.02 + pulse * 0.03); // Small range for subtle effect
98
+ }, 50);
99
+
100
+ return () => clearInterval(interval);
101
+ }
102
+ }, [connected, muted, isIOSDevice]);
103
+
104
+ useEffect(() => {
105
+ document.documentElement.style.setProperty(
106
+ "--volume",
107
+ `${Math.max(5, Math.min((isIOSDevice ? simulatedVolume : inVolume) * 200, 8))}px`,
108
+ );
109
+ }, [inVolume, simulatedVolume, isIOSDevice]);
110
+
111
+ useEffect(() => {
112
+ const onData = (base64: string) => {
113
+ client.sendRealtimeInput([
114
+ {
115
+ mimeType: "audio/pcm;rate=16000",
116
+ data: base64,
117
+ },
118
+ ]);
119
+ };
120
+
121
+ if (connected && !muted && audioRecorder) {
122
+ audioRecorder.on("data", onData).on("volume", setInVolume).start();
123
+ } else {
124
+ audioRecorder.stop();
125
+ }
126
+
127
+ return () => {
128
+ audioRecorder.off("data", onData).off("volume", setInVolume);
129
+ };
130
+ }, [connected, client, muted, audioRecorder]);
131
+
132
+ useEffect(() => {
133
+ if (videoRef.current) {
134
+ videoRef.current.srcObject = activeVideoStream;
135
+ }
136
+
137
+ let timeoutId = -1;
138
+
139
+ function sendVideoFrame() {
140
+ const video = videoRef.current;
141
+ const canvas = renderCanvasRef.current;
142
+
143
+ if (!video || !canvas) {
144
+ return;
145
+ }
146
+
147
+ const ctx = canvas.getContext("2d")!;
148
+ canvas.width = video.videoWidth * 0.25;
149
+ canvas.height = video.videoHeight * 0.25;
150
+ if (canvas.width + canvas.height > 0) {
151
+ ctx.drawImage(videoRef.current, 0, 0, canvas.width, canvas.height);
152
+ const base64 = canvas.toDataURL("image/jpeg", 1.0);
153
+ const data = base64.slice(base64.indexOf(",") + 1, Infinity);
154
+ client.sendRealtimeInput([{ mimeType: "image/jpeg", data }]);
155
+ }
156
+ if (connected) {
157
+ timeoutId = window.setTimeout(sendVideoFrame, 1000 / 0.5);
158
+ }
159
+ }
160
+ if (connected && activeVideoStream !== null) {
161
+ requestAnimationFrame(sendVideoFrame);
162
+ }
163
+ return () => {
164
+ clearTimeout(timeoutId);
165
+ };
166
+ }, [connected, activeVideoStream, client, videoRef]);
167
+
168
+ //handler for swapping from one video-stream to the next
169
+ const changeStreams = (next?: UseMediaStreamResult) => async () => {
170
+ if (next) {
171
+ const mediaStream = await next.start();
172
+ if (mediaStream) {
173
+ setActiveVideoStream(mediaStream);
174
+ onVideoStreamChange(mediaStream);
175
+ } else {
176
+ setActiveVideoStream(null);
177
+ onVideoStreamChange(null);
178
+ }
179
+ } else {
180
+ setActiveVideoStream(null);
181
+ onVideoStreamChange(null);
182
+ }
183
+
184
+ videoStreams.filter((msr) => msr !== next).forEach((msr) => msr.stop());
185
+ };
186
+
187
+ return (
188
+ <section className="control-tray">
189
+ <canvas style={{ display: "none" }} ref={renderCanvasRef} />
190
+ <nav className={cn("actions-nav", { disabled: !connected })}>
191
+ <button
192
+ className={cn("action-button mic-button")}
193
+ onClick={() => setMuted(!muted)}
194
+ >
195
+ {!muted ? (
196
+ <span className="material-symbols-outlined filled">mic</span>
197
+ ) : (
198
+ <span className="material-symbols-outlined filled">mic_off</span>
199
+ )}
200
+ </button>
201
+
202
+ <div className="action-button no-action outlined">
203
+ <AudioPulse volume={volume} active={connected} hover={false} />
204
+ </div>
205
+
206
+ {supportsVideo && (
207
+ <>
208
+ <MediaStreamButton
209
+ isStreaming={screenCapture.isStreaming}
210
+ start={changeStreams(screenCapture)}
211
+ stop={changeStreams()}
212
+ onIcon="cancel_presentation"
213
+ offIcon="present_to_all"
214
+ />
215
+ <MediaStreamButton
216
+ isStreaming={webcam.isStreaming}
217
+ start={changeStreams(webcam)}
218
+ stop={changeStreams()}
219
+ onIcon="videocam_off"
220
+ offIcon="videocam"
221
+ />
222
+ </>
223
+ )}
224
+ {children}
225
+ </nav>
226
+
227
+ <div className={cn("connection-container", { connected })}>
228
+ <div className="connection-button-container">
229
+ <button
230
+ ref={connectButtonRef}
231
+ className={cn("action-button connect-toggle", { connected })}
232
+ onClick={async () => {
233
+ console.log('🔄 Connection button clicked');
234
+ try {
235
+ if (connected) {
236
+ console.log('📴 Disconnecting...');
237
+ await disconnect();
238
+ console.log('✅ Disconnected successfully');
239
+ } else {
240
+ console.log('🔌 Starting connection...');
241
+ console.log('📱 Device info:', { isIOSDevice, isSafari });
242
+
243
+ // We already have mic permissions from the modal, just connect
244
+ console.log('📞 Calling connect()...');
245
+ await connect();
246
+ console.log('✅ Connected successfully');
247
+ }
248
+ } catch (err) {
249
+ console.error('❌ Failed to toggle connection:', err);
250
+ // Here you could add UI feedback about the error
251
+ }
252
+ }}
253
+ >
254
+ <span className="material-symbols-outlined filled">
255
+ {connected ? "pause" : "play_arrow"}
256
+ </span>
257
+ </button>
258
+ </div>
259
+ <span className="text-indicator">Streaming</span>
260
+ </div>
261
+ </section>
262
+ );
263
+ }
264
+
265
+ export default memo(ControlTray);
src/components/control-tray/control-tray.scss ADDED
@@ -0,0 +1,211 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .action-button {
2
+ display: flex;
3
+ align-items: center;
4
+ justify-content: center;
5
+ background: var(--Neutral-20);
6
+ color: var(--Neutral-60);
7
+ font-size: 1.25rem;
8
+ line-height: 1.75rem;
9
+ text-transform: lowercase;
10
+ cursor: pointer;
11
+ animation: opacity-pulse 3s ease-in infinite;
12
+ transition: all 0.2s ease-in-out;
13
+ width: 48px;
14
+ height: 48px;
15
+ border-radius: 18px;
16
+ border: 1px solid rgba(0, 0, 0, 0);
17
+ user-select: none;
18
+ cursor: pointer;
19
+
20
+ &:focus {
21
+ border: 2px solid var(--Neutral-20);
22
+ outline: 2px solid var(--Neutral-80);
23
+ }
24
+
25
+ &.outlined {
26
+ background: var(--Neutral-2);
27
+ border: 1px solid var(--Neutral-20);
28
+ }
29
+
30
+ .no-action {
31
+ pointer-events: none;
32
+ }
33
+
34
+ &:hover {
35
+ background: rgba(0, 0, 0, 0);
36
+ border: 1px solid var(--Neutral-20);
37
+ }
38
+
39
+ &.connected {
40
+ background: var(--Blue-800);
41
+ color: var(--Blue-500);
42
+
43
+ &:hover {
44
+ border: 1px solid var(--Blue-500);
45
+ }
46
+ }
47
+ }
48
+
49
+ @property --volume {
50
+ syntax: "length";
51
+ inherit: false;
52
+ initial-value: 0px;
53
+ }
54
+
55
+ .disabled .mic-button,
56
+ .mic-button.disabled {
57
+ &:before {
58
+ background: rgba(0, 0, 0, 0);
59
+ }
60
+ }
61
+
62
+ .mic-button {
63
+ position: relative;
64
+ background-color: var(--accent-red);
65
+ z-index: 1;
66
+ color: black;
67
+ transition: all 0.2s ease-in;
68
+
69
+ &:focus {
70
+ border: 2px solid var(--Neutral-20);
71
+ outline: 2px solid var(--Red-500);
72
+ }
73
+
74
+ &:hover {
75
+ background-color: var(--Red-400);
76
+ }
77
+
78
+ &:before {
79
+ position: absolute;
80
+ z-index: -1;
81
+ top: calc(var(--volume) * -1);
82
+ left: calc(var(--volume) * -1);
83
+ display: block;
84
+ content: "";
85
+ opacity: 0.35;
86
+ background-color: var(--Red-500);
87
+ width: calc(100% + var(--volume) * 2);
88
+ height: calc(100% + var(--volume) * 2);
89
+ border-radius: 24px;
90
+ transition: all 0.02s ease-in-out;
91
+ }
92
+ }
93
+
94
+ .connect-toggle {
95
+ &:focus {
96
+ border: 2px solid var(--Neutral-20);
97
+ outline: 2px solid var(--Neutral-80);
98
+ }
99
+
100
+ &:not(.connected) {
101
+ background-color: var(--Blue-500);
102
+ color: var(--Neutral-5);
103
+ }
104
+ }
105
+
106
+ .control-tray {
107
+ position: absolute;
108
+ bottom: 0;
109
+ left: 50%;
110
+ transform: translate(-50%, 0);
111
+ display: inline-flex;
112
+ justify-content: center;
113
+ align-items: flex-start;
114
+ gap: 8px;
115
+ padding-bottom: 18px;
116
+
117
+ .ios-error-message {
118
+ position: absolute;
119
+ bottom: 100%;
120
+ left: 50%;
121
+ transform: translateX(-50%);
122
+ margin-bottom: 10px;
123
+ padding: 8px 16px;
124
+ background-color: rgba(255, 0, 0, 0.1);
125
+ border: 1px solid var(--Red-500);
126
+ border-radius: 8px;
127
+ color: var(--Red-500);
128
+ font-size: 14px;
129
+ white-space: nowrap;
130
+ animation: fadeIn 0.3s ease-in-out;
131
+ }
132
+
133
+ .disabled .action-button,
134
+ .action-button.disabled {
135
+ background: rgba(0, 0, 0, 0);
136
+ border: 1px solid var(--Neutral-30, #404547);
137
+ color: var(--Neutral-30);
138
+ }
139
+
140
+ .connection-container {
141
+ display: flex;
142
+ flex-direction: column;
143
+ justify-content: center;
144
+ align-items: center;
145
+ gap: 4px;
146
+
147
+ .connection-button-container {
148
+ border-radius: 27px;
149
+ border: 1px solid var(--Neutral-30);
150
+ background: var(--Neutral-5);
151
+ padding: 10px;
152
+ }
153
+
154
+ .text-indicator {
155
+ font-size: 11px;
156
+ color: var(--Blue-500);
157
+ user-select: none;
158
+ }
159
+
160
+ &:not(.connected) {
161
+ .text-indicator {
162
+ opacity: 0;
163
+ }
164
+ }
165
+ }
166
+ }
167
+
168
+ .actions-nav {
169
+ background: var(--Neutral-5);
170
+ border: 1px solid var(--Neutral-30);
171
+ border-radius: 27px;
172
+ display: inline-flex;
173
+ gap: 12px;
174
+ align-items: center;
175
+ overflow: clip;
176
+ padding: 10px;
177
+
178
+ transition: all 0.6s ease-in;
179
+
180
+ &>* {
181
+ display: flex;
182
+ align-items: center;
183
+ flex-direction: column;
184
+ gap: 1rem;
185
+ }
186
+ }
187
+
188
+ @keyframes opacity-pulse {
189
+ 0% {
190
+ opacity: 0.9;
191
+ }
192
+
193
+ 50% {
194
+ opacity: 1;
195
+ }
196
+
197
+ 100% {
198
+ opacity: 0.9;
199
+ }
200
+ }
201
+
202
+ @keyframes fadeIn {
203
+ from {
204
+ opacity: 0;
205
+ transform: translate(-50%, 10px);
206
+ }
207
+ to {
208
+ opacity: 1;
209
+ transform: translate(-50%, 0);
210
+ }
211
+ }
src/components/ios-modal/IOSModal.scss ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .ios-modal-overlay {
2
+ position: fixed;
3
+ top: 0;
4
+ left: 0;
5
+ right: 0;
6
+ bottom: 0;
7
+ background-color: rgba(0, 0, 0, 0.5);
8
+ display: flex;
9
+ justify-content: center;
10
+ align-items: center;
11
+ z-index: 1000;
12
+ }
13
+
14
+ .ios-modal {
15
+ background: white;
16
+ padding: 2rem;
17
+ border-radius: 8px;
18
+ max-width: 90%;
19
+ width: 400px;
20
+ text-align: center;
21
+ box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);
22
+
23
+ h2 {
24
+ margin-top: 0;
25
+ color: #333;
26
+ }
27
+
28
+ p {
29
+ margin: 1rem 0;
30
+ color: #666;
31
+ line-height: 1.5;
32
+
33
+ &.error-message {
34
+ color: var(--Red-500);
35
+ background-color: rgba(255, 0, 0, 0.1);
36
+ padding: 8px;
37
+ border-radius: 4px;
38
+ margin: 1rem -8px;
39
+ font-weight: 500;
40
+ }
41
+ }
42
+ }
43
+
44
+ .ios-modal-button {
45
+ background-color: #007AFF;
46
+ color: white;
47
+ border: none;
48
+ padding: 0.8rem 2rem;
49
+ border-radius: 6px;
50
+ font-size: 1rem;
51
+ cursor: pointer;
52
+ margin-top: 1rem;
53
+
54
+ &:hover {
55
+ background-color: #0056b3;
56
+ }
57
+ }
src/components/ios-modal/IOSModal.tsx ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React, { useState } from 'react';
2
+ import './IOSModal.scss';
3
+
4
+ interface IOSModalProps {
5
+ isOpen: boolean;
6
+ onClose: () => void;
7
+ }
8
+
9
+ export const IOSModal: React.FC<IOSModalProps> = ({ isOpen, onClose }) => {
10
+ const [error, setError] = useState<string | null>(null);
11
+
12
+ const handleMicPermission = async () => {
13
+ try {
14
+ console.log('🎤 Requesting iOS microphone permission from modal...');
15
+ const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
16
+ console.log('✅ iOS microphone permission granted from modal!');
17
+ // Stop the stream since we don't need it yet - we'll request it again when recording starts
18
+ stream.getTracks().forEach(track => track.stop());
19
+ setError(null);
20
+ onClose();
21
+ } catch (err) {
22
+ console.error('❌ iOS microphone permission denied from modal:', err);
23
+ setError(err instanceof Error ? err.message : 'Failed to access microphone');
24
+ }
25
+ };
26
+
27
+ if (!isOpen) return null;
28
+
29
+ return (
30
+ <div className="ios-modal-overlay">
31
+ <div className="ios-modal">
32
+ <h2>Microphone Access Required</h2>
33
+ <p>
34
+ To use this app on iOS, we need permission to access your microphone.
35
+ Please tap "Allow" when prompted.
36
+ </p>
37
+ <p>
38
+ If you've denied permission, you'll need to enable it in your device settings.
39
+ </p>
40
+ {error && (
41
+ <p className="error-message">
42
+ Error: {error}
43
+ </p>
44
+ )}
45
+ <button onClick={handleMicPermission} className="ios-modal-button">
46
+ {error ? 'Try Again' : 'Got it'}
47
+ </button>
48
+ </div>
49
+ </div>
50
+ );
51
+ };
src/components/logger/Logger.tsx ADDED
@@ -0,0 +1,273 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import "./logger.scss";
18
+
19
+ import { Part } from "@google/generative-ai";
20
+ import cn from "classnames";
21
+ import { ReactNode } from "react";
22
+ import { useLoggerStore } from "../../lib/store-logger";
23
+ import SyntaxHighlighter from "react-syntax-highlighter";
24
+ import { vs2015 as dark } from "react-syntax-highlighter/dist/esm/styles/hljs";
25
+ import {
26
+ ClientContentMessage,
27
+ isClientContentMessage,
28
+ isInterrupted,
29
+ isModelTurn,
30
+ isServerContentMessage,
31
+ isToolCallCancellationMessage,
32
+ isToolCallMessage,
33
+ isToolResponseMessage,
34
+ isTurnComplete,
35
+ ModelTurn,
36
+ ServerContentMessage,
37
+ StreamingLog,
38
+ ToolCallCancellationMessage,
39
+ ToolCallMessage,
40
+ ToolResponseMessage,
41
+ } from "../../multimodal-live-types";
42
+
43
+ const formatTime = (d: Date) => d.toLocaleTimeString().slice(0, -3);
44
+
45
+ const LogEntry = ({
46
+ log,
47
+ MessageComponent,
48
+ }: {
49
+ log: StreamingLog;
50
+ MessageComponent: ({
51
+ message,
52
+ }: {
53
+ message: StreamingLog["message"];
54
+ }) => ReactNode;
55
+ }): JSX.Element => (
56
+ <li
57
+ className={cn(
58
+ `plain-log`,
59
+ `source-${log.type.slice(0, log.type.indexOf("."))}`,
60
+ {
61
+ receive: log.type.includes("receive"),
62
+ send: log.type.includes("send"),
63
+ },
64
+ )}
65
+ >
66
+ <span className="timestamp">{formatTime(log.date)}</span>
67
+ <span className="source">{log.type}</span>
68
+ <span className="message">
69
+ <MessageComponent message={log.message} />
70
+ </span>
71
+ {log.count && <span className="count">{log.count}</span>}
72
+ </li>
73
+ );
74
+
75
+ const PlainTextMessage = ({
76
+ message,
77
+ }: {
78
+ message: StreamingLog["message"];
79
+ }) => <span>{message as string}</span>;
80
+
81
+ type Message = { message: StreamingLog["message"] };
82
+
83
+ const AnyMessage = ({ message }: Message) => (
84
+ <pre>{JSON.stringify(message, null, " ")}</pre>
85
+ );
86
+
87
+ function tryParseCodeExecutionResult(output: string) {
88
+ try {
89
+ const json = JSON.parse(output);
90
+ return JSON.stringify(json, null, " ");
91
+ } catch (e) {
92
+ return output;
93
+ }
94
+ }
95
+
96
+ const RenderPart = ({ part }: { part: Part }) =>
97
+ part.text && part.text.length ? (
98
+ <p className="part part-text">{part.text}</p>
99
+ ) : part.executableCode ? (
100
+ <div className="part part-executableCode">
101
+ <h5>executableCode: {part.executableCode.language}</h5>
102
+ <SyntaxHighlighter
103
+ language={part.executableCode.language.toLowerCase()}
104
+ style={dark}
105
+ >
106
+ {part.executableCode.code}
107
+ </SyntaxHighlighter>
108
+ </div>
109
+ ) : part.codeExecutionResult ? (
110
+ <div className="part part-codeExecutionResult">
111
+ <h5>codeExecutionResult: {part.codeExecutionResult.outcome}</h5>
112
+ <SyntaxHighlighter language="json" style={dark}>
113
+ {tryParseCodeExecutionResult(part.codeExecutionResult.output)}
114
+ </SyntaxHighlighter>
115
+ </div>
116
+ ) : (
117
+ <div className="part part-inlinedata">
118
+ <h5>Inline Data: {part.inlineData?.mimeType}</h5>
119
+ </div>
120
+ );
121
+
122
+ const ClientContentLog = ({ message }: Message) => {
123
+ const { turns, turnComplete } = (message as ClientContentMessage)
124
+ .clientContent;
125
+ return (
126
+ <div className="rich-log client-content user">
127
+ <h4 className="roler-user">User</h4>
128
+ {turns.map((turn, i) => (
129
+ <div key={`message-turn-${i}`}>
130
+ {turn.parts
131
+ .filter((part) => !(part.text && part.text === "\n"))
132
+ .map((part, j) => (
133
+ <RenderPart part={part} key={`message-turh-${i}-part-${j}`} />
134
+ ))}
135
+ </div>
136
+ ))}
137
+ {!turnComplete ? <span>turnComplete: false</span> : ""}
138
+ </div>
139
+ );
140
+ };
141
+
142
+ const ToolCallLog = ({ message }: Message) => {
143
+ const { toolCall } = message as ToolCallMessage;
144
+ return (
145
+ <div className={cn("rich-log tool-call")}>
146
+ {toolCall.functionCalls.map((fc, i) => (
147
+ <div key={fc.id} className="part part-functioncall">
148
+ <h5>Function call: {fc.name}</h5>
149
+ <SyntaxHighlighter language="json" style={dark}>
150
+ {JSON.stringify(fc, null, " ")}
151
+ </SyntaxHighlighter>
152
+ </div>
153
+ ))}
154
+ </div>
155
+ );
156
+ };
157
+
158
+ const ToolCallCancellationLog = ({ message }: Message): JSX.Element => (
159
+ <div className={cn("rich-log tool-call-cancellation")}>
160
+ <span>
161
+ {" "}
162
+ ids:{" "}
163
+ {(message as ToolCallCancellationMessage).toolCallCancellation.ids.map(
164
+ (id) => (
165
+ <span className="inline-code" key={`cancel-${id}`}>
166
+ "{id}"
167
+ </span>
168
+ ),
169
+ )}
170
+ </span>
171
+ </div>
172
+ );
173
+
174
+ const ToolResponseLog = ({ message }: Message): JSX.Element => (
175
+ <div className={cn("rich-log tool-response")}>
176
+ {(message as ToolResponseMessage).toolResponse.functionResponses.map(
177
+ (fc) => (
178
+ <div key={`tool-response-${fc.id}`} className="part">
179
+ <h5>Function Response: {fc.id}</h5>
180
+ <SyntaxHighlighter language="json" style={dark}>
181
+ {JSON.stringify(fc.response, null, " ")}
182
+ </SyntaxHighlighter>
183
+ </div>
184
+ ),
185
+ )}
186
+ </div>
187
+ );
188
+
189
+ const ModelTurnLog = ({ message }: Message): JSX.Element => {
190
+ const serverContent = (message as ServerContentMessage).serverContent;
191
+ const { modelTurn } = serverContent as ModelTurn;
192
+ const { parts } = modelTurn;
193
+
194
+ return (
195
+ <div className="rich-log model-turn model">
196
+ <h4 className="role-model">Model</h4>
197
+ {parts
198
+ .filter((part) => !(part.text && part.text === "\n"))
199
+ .map((part, j) => (
200
+ <RenderPart part={part} key={`model-turn-part-${j}`} />
201
+ ))}
202
+ </div>
203
+ );
204
+ };
205
+
206
+ const CustomPlainTextLog = (msg: string) => () => (
207
+ <PlainTextMessage message={msg} />
208
+ );
209
+
210
+ export type LoggerFilterType = "conversations" | "tools" | "none";
211
+
212
+ export type LoggerProps = {
213
+ filter: LoggerFilterType;
214
+ };
215
+
216
+ const filters: Record<LoggerFilterType, (log: StreamingLog) => boolean> = {
217
+ tools: (log: StreamingLog) =>
218
+ isToolCallMessage(log.message) ||
219
+ isToolResponseMessage(log.message) ||
220
+ isToolCallCancellationMessage(log.message),
221
+ conversations: (log: StreamingLog) =>
222
+ isClientContentMessage(log.message) || isServerContentMessage(log.message),
223
+ none: () => true,
224
+ };
225
+
226
+ const component = (log: StreamingLog) => {
227
+ if (typeof log.message === "string") {
228
+ return PlainTextMessage;
229
+ }
230
+ if (isClientContentMessage(log.message)) {
231
+ return ClientContentLog;
232
+ }
233
+ if (isToolCallMessage(log.message)) {
234
+ return ToolCallLog;
235
+ }
236
+ if (isToolCallCancellationMessage(log.message)) {
237
+ return ToolCallCancellationLog;
238
+ }
239
+ if (isToolResponseMessage(log.message)) {
240
+ return ToolResponseLog;
241
+ }
242
+ if (isServerContentMessage(log.message)) {
243
+ const { serverContent } = log.message;
244
+ if (isInterrupted(serverContent)) {
245
+ return CustomPlainTextLog("interrupted");
246
+ }
247
+ if (isTurnComplete(serverContent)) {
248
+ return CustomPlainTextLog("turnComplete");
249
+ }
250
+ if (isModelTurn(serverContent)) {
251
+ return ModelTurnLog;
252
+ }
253
+ }
254
+ return AnyMessage;
255
+ };
256
+
257
+ export default function Logger({ filter = "none" }: LoggerProps) {
258
+ const { logs } = useLoggerStore();
259
+
260
+ const filterFn = filters[filter];
261
+
262
+ return (
263
+ <div className="logger">
264
+ <ul className="logger-list">
265
+ {logs.filter(filterFn).map((log, key) => {
266
+ return (
267
+ <LogEntry MessageComponent={component(log)} log={log} key={key} />
268
+ );
269
+ })}
270
+ </ul>
271
+ </div>
272
+ );
273
+ }
src/components/logger/logger.scss ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .logger {
2
+ color: var(--gray-300);
3
+ width: 100%;
4
+ max-width: 100%;
5
+ display: block;
6
+
7
+ .logger-list {
8
+ padding: 0 0px 0 25px;
9
+ overflow-x: hidden;
10
+ width: calc(100% - 45px);
11
+ }
12
+
13
+ .user h4 {
14
+ color: var(--Green-500);
15
+ }
16
+
17
+ .model h4 {
18
+ color: var(--Blue-500);
19
+ }
20
+
21
+ .rich-log {
22
+ display: flex;
23
+ justify-content: center;
24
+ gap: 4px;
25
+
26
+ pre {
27
+ overflow-x: auto;
28
+ }
29
+
30
+ display: block;
31
+
32
+ h4 {
33
+ font-size: 14px;
34
+ text-transform: uppercase;
35
+ padding: 8px 0;
36
+ margin: 0;
37
+ }
38
+
39
+ h5 {
40
+ margin: 0;
41
+ padding-bottom: 8px;
42
+ border-bottom: 1px solid var(--Neutral-20);
43
+ }
44
+
45
+ .part {
46
+ background: var(--Neutral-5);
47
+ padding: 14px;
48
+ margin-bottom: 4px;
49
+ color: var(--Neutral-90);
50
+ border-radius: 8px;
51
+ }
52
+ }
53
+
54
+ .plain-log {
55
+ &>* {
56
+ padding-right: 4px;
57
+ }
58
+ }
59
+
60
+ .inline-code:not(:last-child) {
61
+ font-style: italic;
62
+
63
+ &::after {
64
+ content: ", ";
65
+ }
66
+ }
67
+ }
68
+
69
+ .logger li {
70
+ display: block;
71
+ padding: 8px 0;
72
+ color: var(--Neutral-50, #707577);
73
+ font-family: "Space Mono";
74
+ font-size: 14px;
75
+ font-style: normal;
76
+ font-weight: 400;
77
+ line-height: normal;
78
+ }
79
+
80
+ .logger li .timestamp {
81
+ width: 70px;
82
+ flex-grow: 0;
83
+ flex-shrink: 0;
84
+ color: var(--Neutral-50);
85
+ }
86
+
87
+ .logger li .source {
88
+ flex-shrink: 0;
89
+ font-weight: bold;
90
+ }
91
+
92
+ .logger li.source-server,
93
+ .logger li.receive {
94
+ color: var(--Blue-500);
95
+ }
96
+
97
+ .logger li.source-client,
98
+ .logger li.send:not(.source-server) {
99
+ color: var(--Green-500);
100
+ }
101
+
102
+ .logger li .count {
103
+ background-color: var(--Neutral-5);
104
+ font-size: x-small;
105
+ padding: 0em 0.6em;
106
+ padding: 0.3em 0.5em;
107
+ line-height: 1em;
108
+ vertical-align: middle;
109
+ border-radius: 8px;
110
+ color: var(--Blue-500);
111
+ }
112
+
113
+ .logger li .message {
114
+ flex-grow: 1;
115
+ color: var(--Neutral-50);
116
+ }
src/components/logger/mock-logs.ts ADDED
@@ -0,0 +1,151 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ /**
18
+ * this module is just mock data, intended to make it easier to develop and style the logger
19
+ */
20
+ import type { StreamingLog } from "../../multimodal-live-types";
21
+
22
+ const soundLogs = (n: number): StreamingLog[] =>
23
+ new Array(n).fill(0).map(
24
+ (): StreamingLog => ({
25
+ date: new Date(),
26
+ type: "server.audio",
27
+ message: "buffer (11250)",
28
+ }),
29
+ );
30
+ //
31
+ const realtimeLogs = (n: number): StreamingLog[] =>
32
+ new Array(n).fill(0).map(
33
+ (): StreamingLog => ({
34
+ date: new Date(),
35
+ type: "client.realtimeInput",
36
+ message: "audio",
37
+ }),
38
+ );
39
+
40
+ export const mockLogs: StreamingLog[] = [
41
+ {
42
+ date: new Date(),
43
+ type: "client.open",
44
+ message: "connected to socket",
45
+ },
46
+ ...realtimeLogs(10),
47
+ ...soundLogs(10),
48
+ {
49
+ date: new Date(),
50
+ type: "receive.content",
51
+ message: {
52
+ serverContent: {
53
+ interrupted: true,
54
+ },
55
+ },
56
+ },
57
+ {
58
+ date: new Date(),
59
+ type: "receive.content",
60
+ message: {
61
+ serverContent: {
62
+ turnComplete: true,
63
+ },
64
+ },
65
+ },
66
+ //this one is just a string
67
+ // {
68
+ // date: new Date(),
69
+ // type: "server.send",
70
+ // message: {
71
+ // serverContent: {
72
+ // turnComplete: true,
73
+ // },
74
+ // },
75
+ // },
76
+ ...realtimeLogs(10),
77
+ ...soundLogs(20),
78
+ {
79
+ date: new Date(),
80
+ type: "receive.content",
81
+ message: {
82
+ serverContent: {
83
+ modelTurn: {
84
+ parts: [{ text: "Hey its text" }, { text: "more" }],
85
+ },
86
+ },
87
+ },
88
+ },
89
+ {
90
+ date: new Date(),
91
+ type: "client.send",
92
+ message: {
93
+ clientContent: {
94
+ turns: [
95
+ {
96
+ role: "User",
97
+ parts: [
98
+ {
99
+ text: "How much wood could a woodchuck chuck if a woodchuck could chuck wood",
100
+ },
101
+ ],
102
+ },
103
+ ],
104
+ turnComplete: true,
105
+ },
106
+ },
107
+ },
108
+ {
109
+ date: new Date(),
110
+ type: "server.toolCall",
111
+ message: {
112
+ toolCall: {
113
+ functionCalls: [
114
+ {
115
+ id: "akadjlasdfla-askls",
116
+ name: "take_photo",
117
+ args: {},
118
+ },
119
+ {
120
+ id: "akldjsjskldsj-102",
121
+ name: "move_camera",
122
+ args: { x: 20, y: 4 },
123
+ },
124
+ ],
125
+ },
126
+ },
127
+ },
128
+ {
129
+ date: new Date(),
130
+ type: "server.toolCallCancellation",
131
+ message: {
132
+ toolCallCancellation: {
133
+ ids: ["akladfjadslfk", "adkafsdljfsdk"],
134
+ },
135
+ },
136
+ },
137
+ {
138
+ date: new Date(),
139
+ type: "client.toolResponse",
140
+ message: {
141
+ toolResponse: {
142
+ functionResponses: [
143
+ {
144
+ response: { success: true },
145
+ id: "akslaj-10102",
146
+ },
147
+ ],
148
+ },
149
+ },
150
+ },
151
+ ];
src/components/side-panel/SidePanel.tsx ADDED
@@ -0,0 +1,185 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import cn from "classnames";
18
+ import { useEffect, useRef, useState } from "react";
19
+ import { RiSidebarFoldLine, RiSidebarUnfoldLine } from "react-icons/ri";
20
+ import Select from "react-select";
21
+ import { useLiveAPIContext } from "../../contexts/LiveAPIContext";
22
+ import { useLoggerStore } from "../../lib/store-logger";
23
+ import Logger from "../logger/Logger";
24
+ import type { LoggerFilterType } from "../logger/Logger";
25
+ import "./side-panel.scss";
26
+
27
+ const filterOptions = [
28
+ { value: "conversations", label: "Conversations" },
29
+ { value: "tools", label: "Tool Use" },
30
+ { value: "none", label: "All" },
31
+ ];
32
+
33
+ export default function SidePanel() {
34
+ const { connected, client } = useLiveAPIContext();
35
+ const [open, setOpen] = useState(window.innerWidth >= 768);
36
+ const [isMobile, setIsMobile] = useState(window.innerWidth < 768);
37
+ const loggerRef = useRef<HTMLDivElement>(null);
38
+ const loggerLastHeightRef = useRef<number>(-1);
39
+ const { log, logs } = useLoggerStore();
40
+
41
+ // Add effect to handle responsive behavior
42
+ useEffect(() => {
43
+ const handleResize = () => {
44
+ const mobileScreen = window.innerWidth < 768;
45
+ setIsMobile(mobileScreen);
46
+ setOpen(!mobileScreen);
47
+ };
48
+
49
+ // Initial check
50
+ handleResize();
51
+
52
+ // Add event listener for window resize
53
+ window.addEventListener('resize', handleResize);
54
+
55
+ // Cleanup
56
+ return () => window.removeEventListener('resize', handleResize);
57
+ }, []);
58
+
59
+ const [textInput, setTextInput] = useState("");
60
+ const [selectedOption, setSelectedOption] = useState<{
61
+ value: string;
62
+ label: string;
63
+ } | null>(null);
64
+ const inputRef = useRef<HTMLTextAreaElement>(null);
65
+
66
+ //scroll the log to the bottom when new logs come in
67
+ useEffect(() => {
68
+ const el = loggerRef.current;
69
+ if (el) {
70
+ const scrollHeight = el.scrollHeight;
71
+ if (scrollHeight !== loggerLastHeightRef.current) {
72
+ el.scrollTop = scrollHeight;
73
+ loggerLastHeightRef.current = scrollHeight;
74
+ }
75
+ }
76
+ }, []);
77
+
78
+ // listen for log events and store them
79
+ useEffect(() => {
80
+ client.on("log", log);
81
+ return () => {
82
+ client.off("log", log);
83
+ };
84
+ }, [client, log]);
85
+
86
+ const handleSubmit = () => {
87
+ client.send([{ text: textInput }]);
88
+
89
+ setTextInput("");
90
+ if (inputRef.current) {
91
+ inputRef.current.innerText = "";
92
+ }
93
+ };
94
+
95
+ return (
96
+ <div className={cn("side-panel", {
97
+ open,
98
+ mobile: isMobile
99
+ })}>
100
+ <header className="top">
101
+ <h2>Console</h2>
102
+ {open ? (
103
+ <button type="button" className="opener" onClick={() => setOpen(false)}>
104
+ <RiSidebarFoldLine color="#b4b8bb" />
105
+ </button>
106
+ ) : (
107
+ <button type="button" className="opener" onClick={() => setOpen(true)}>
108
+ <RiSidebarUnfoldLine color="#b4b8bb" />
109
+ </button>
110
+ )}
111
+ </header>
112
+ <section className="indicators">
113
+ <Select
114
+ className="react-select"
115
+ classNamePrefix="react-select"
116
+ styles={{
117
+ control: (baseStyles) => ({
118
+ ...baseStyles,
119
+ background: "var(--Neutral-15)",
120
+ color: "var(--Neutral-90)",
121
+ minHeight: "33px",
122
+ maxHeight: "33px",
123
+ border: 0,
124
+ }),
125
+ option: (styles, { isFocused, isSelected }) => ({
126
+ ...styles,
127
+ backgroundColor: isFocused
128
+ ? "var(--Neutral-30)"
129
+ : isSelected
130
+ ? "var(--Neutral-20)"
131
+ : undefined,
132
+ }),
133
+ }}
134
+ defaultValue={selectedOption}
135
+ options={filterOptions}
136
+ onChange={(e) => {
137
+ setSelectedOption(e);
138
+ }}
139
+ />
140
+ <div className={cn("streaming-indicator", { connected })}>
141
+ {connected
142
+ ? `🔵${open ? " Streaming" : ""}`
143
+ : `⏸️${open ? " Paused" : ""}`}
144
+ </div>
145
+ </section>
146
+ <div className="side-panel-container" ref={loggerRef}>
147
+ <Logger
148
+ filter={(selectedOption?.value as LoggerFilterType) || "none"}
149
+ />
150
+ </div>
151
+ <div className={cn("input-container", { disabled: !connected })}>
152
+ <div className="input-content">
153
+ <textarea
154
+ className="input-area"
155
+ ref={inputRef}
156
+ onKeyDown={(e) => {
157
+ if (e.key === "Enter" && !e.shiftKey) {
158
+ e.preventDefault();
159
+ e.stopPropagation();
160
+ handleSubmit();
161
+ }
162
+ }}
163
+ onChange={(e) => setTextInput(e.target.value)}
164
+ value={textInput}
165
+ />
166
+ <span
167
+ className={cn("input-content-placeholder", {
168
+ hidden: textInput.length,
169
+ })}
170
+ >
171
+ Type&nbsp;something...
172
+ </span>
173
+
174
+ <button
175
+ type="button"
176
+ className="send-button material-symbols-outlined filled"
177
+ onClick={handleSubmit}
178
+ >
179
+ send
180
+ </button>
181
+ </div>
182
+ </div>
183
+ </div>
184
+ );
185
+ }
src/components/side-panel/side-panel.scss ADDED
@@ -0,0 +1,309 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .side-panel {
2
+ background: var(--Neutral-00);
3
+ width: 40px; /* when closed */
4
+ display: flex;
5
+ flex-direction: column;
6
+ height: 100vh;
7
+ transition: all 0.2s ease-in;
8
+ font-family: Arial, sans-serif;
9
+ border-right: 1px solid var(--gray-600);
10
+ color: var(--Neutral-90, #e1e2e3);
11
+ font-family: var(--font-family);
12
+ font-size: 13px;
13
+ font-style: normal;
14
+ font-weight: 400;
15
+ line-height: 160%;
16
+
17
+ .react-select {
18
+ background: var(--Neutral-20);
19
+ color: var(--Neutral-90);
20
+ width: 193px;
21
+ height: 30px;
22
+
23
+ .react-select__single-value {
24
+ color: var(--Neutral-90);
25
+ }
26
+
27
+ .react-select__menu {
28
+ background: var(--Neutral-20);
29
+ color: var(--Neutral-90);
30
+ }
31
+
32
+ .react-select__option:hover,
33
+ .react-select__option:focus,
34
+ .react-select_option:focus-within {
35
+ background: var(--Neutral-30);
36
+ }
37
+ }
38
+
39
+ .hidden {
40
+ display: none !important;
41
+ }
42
+
43
+ &.open {
44
+ width: 400px;
45
+ height: 100vh;
46
+
47
+ .top h2 {
48
+ left: 0%;
49
+ display: block;
50
+ opacity: 1;
51
+ }
52
+ }
53
+
54
+ .top {
55
+ display: flex;
56
+ width: calc(100% - 45px);
57
+ justify-content: space-between;
58
+ align-items: center;
59
+ padding: 12px 20px 12px 25px;
60
+ border-bottom: 1px solid var(--Neutral-20);
61
+
62
+ h2 {
63
+ position: relative;
64
+ color: var(--Neutral-90, #e1e2e3);
65
+ font-family: "Google Sans";
66
+ font-size: 21px;
67
+ font-style: normal;
68
+ font-weight: 500;
69
+ line-height: 16px; /* 100% */
70
+
71
+ opacity: 0;
72
+ display: none;
73
+ left: -100%;
74
+ transition:
75
+ opacity 0.2s ease-in,
76
+ left 0.2s ease-in,
77
+ display 0.2s ease-in;
78
+ transition-behavior: allow-discrete;
79
+
80
+ @starting-style {
81
+ left: 0%;
82
+ opacity: 1;
83
+ }
84
+ }
85
+ }
86
+
87
+ .opener {
88
+ height: 30px;
89
+ transition: transform 0.2s ease-in;
90
+ }
91
+
92
+ &:not(.open) {
93
+ .side-panel-container {
94
+ opacity: 0;
95
+ display: none;
96
+ transition: all 0.2s ease-in allow-discrete;
97
+ transition-delay: 0.1s;
98
+ }
99
+
100
+ .indicators .streaming-indicator {
101
+ width: 30px;
102
+ opacity: 0;
103
+ }
104
+
105
+ .opener {
106
+ transform: translate(-50%, 0);
107
+ }
108
+
109
+ .input-container {
110
+ opacity: 0;
111
+ display: none;
112
+ transition: all 0.2s ease-in allow-discrete;
113
+ }
114
+ }
115
+
116
+ .indicators {
117
+ display: flex;
118
+ padding: 24px 25px;
119
+ justify-content: flex-end;
120
+ gap: 21px;
121
+ .streaming-indicator {
122
+ user-select: none;
123
+ border-radius: 4px;
124
+ border: 1px solid var(--Neutral-20, #2a2f31);
125
+ background: var(--Neutral-10, #1c1f21);
126
+ display: flex;
127
+ width: 136px;
128
+ height: 30px;
129
+ padding-left: 4px;
130
+ justify-content: center;
131
+ align-items: center;
132
+ gap: 6px;
133
+ flex-shrink: 0;
134
+ text-align: center;
135
+ font-family: "Space Mono";
136
+ font-size: 14px;
137
+ font-style: normal;
138
+ font-weight: 400;
139
+ line-height: normal;
140
+ transition: width 0.2s ease-in;
141
+
142
+ &.connected {
143
+ color: var(--Blue-500, #0d9c53);
144
+ }
145
+ }
146
+ }
147
+
148
+ .side-panel-container {
149
+ align-self: flex-end;
150
+ width: 400px;
151
+ flex-grow: 1;
152
+ overflow-x: hidden;
153
+ overflow-y: auto;
154
+ /*scrollbar-gutter: stable both-edges;*/
155
+ }
156
+
157
+ .input-container {
158
+ height: 50px;
159
+ flex-grow: 0;
160
+ flex-shrink: 0;
161
+ border-top: 1px solid var(--Neutral-20);
162
+ padding: 14px 25px;
163
+ overflow: hidden;
164
+
165
+ .input-content {
166
+ position: relative;
167
+ background: var(--Neutral-10);
168
+ border: 1px solid var(--Neutral-15);
169
+ height: 22px;
170
+ border-radius: 10px;
171
+ padding: 11px 18px;
172
+
173
+ .send-button {
174
+ position: absolute;
175
+ top: 50%;
176
+ right: 0;
177
+ transform: translate(0, -50%);
178
+ background: none;
179
+ border: 0;
180
+ color: var(--Neutral-20);
181
+ cursor: pointer;
182
+ transition: color 0.1s ease-in;
183
+ z-index: 2;
184
+
185
+ &:hover {
186
+ color: var(--Neutral-60);
187
+ }
188
+ }
189
+
190
+ .input-area {
191
+ background: none;
192
+ color: var(--Neutral-90);
193
+ field-sizing: content;
194
+ position: absolute;
195
+ top: 0;
196
+ left: 0;
197
+ z-index: 2;
198
+ display: inline-block;
199
+ width: calc(100% - 72px);
200
+ max-height: 20px;
201
+ outline: none;
202
+ --webkit-box-flex: 1;
203
+ flex: 1;
204
+ word-break: break-word;
205
+ overflow: auto;
206
+ padding: 14px 18px;
207
+ border: 0;
208
+ resize: none;
209
+ }
210
+
211
+ .input-content-placeholder {
212
+ position: absolute;
213
+ left: 0;
214
+ top: 0;
215
+ display: flex;
216
+ align-items: center;
217
+ z-index: 1;
218
+ height: 100%;
219
+ width: 100%;
220
+ pointer-events: none;
221
+ user-select: none;
222
+ padding: 0px 18px;
223
+ white-space: pre-wrap;
224
+ }
225
+ }
226
+ }
227
+
228
+ @media screen and (max-width: 768px) {
229
+ position: absolute;
230
+ z-index: 1000;
231
+
232
+ &:not(.open) {
233
+ width: 100%;
234
+ height: 40px;
235
+ border-right: none;
236
+ border-bottom: 1px solid var(--gray-600);
237
+
238
+ .top {
239
+ width: 100%;
240
+ padding: 12px;
241
+ justify-content: flex-end;
242
+ border-bottom: none;
243
+
244
+ .opener {
245
+ transform: rotate(90deg);
246
+ position: absolute;
247
+ right: 12px;
248
+ top: 50%;
249
+ transform: translateY(-50%) rotate(90deg);
250
+ }
251
+ }
252
+
253
+ .indicators {
254
+ display: none;
255
+ }
256
+ }
257
+
258
+ &.open {
259
+ width: 100%;
260
+ max-width: 100%;
261
+ }
262
+
263
+ .side-panel-container {
264
+ width: 100%;
265
+ }
266
+
267
+ .react-select {
268
+ width: 140px;
269
+ }
270
+
271
+ .indicators {
272
+ padding: 24px 15px;
273
+
274
+ .streaming-indicator {
275
+ width: 110px;
276
+ }
277
+ }
278
+ }
279
+ }
280
+
281
+ .side-panel-responses,
282
+ .side-panel-requests {
283
+ flex-grow: 1;
284
+ flex-shrink: 1;
285
+ overflow-x: hidden;
286
+ overflow-y: auto;
287
+ width: 100%;
288
+ display: block;
289
+ margin-left: 8px;
290
+ }
291
+
292
+ .top {
293
+ width: 100%;
294
+ flex-grow: 0;
295
+ flex-shrink: 0;
296
+ height: 30px;
297
+ display: flex;
298
+ align-self: flex-end;
299
+ align-items: center;
300
+ transition: all 0.2s ease-in;
301
+ }
302
+ .top button {
303
+ background: transparent;
304
+ border: 0;
305
+ cursor: pointer;
306
+ font-size: 1.25rem;
307
+ line-height: 1.75rem;
308
+ padding: 4px;
309
+ }
src/contexts/LiveAPIContext.tsx ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import { createContext, FC, type ReactNode, useContext } from "react";
18
+ import { useLiveAPI, type UseLiveAPIResults } from "../hooks/use-live-api";
19
+
20
+ const LiveAPIContext = createContext<UseLiveAPIResults | undefined>(undefined);
21
+
22
+ export type LiveAPIProviderProps = {
23
+ children: ReactNode;
24
+ url?: string;
25
+ };
26
+
27
+ export const LiveAPIProvider: FC<LiveAPIProviderProps> = ({
28
+ url = `${window.location.protocol === 'https:' ? 'wss:' : 'ws:'}//${window.location.host}/ws`,
29
+ children,
30
+ }) => {
31
+ const liveAPI = useLiveAPI({ url });
32
+
33
+ return (
34
+ <LiveAPIContext.Provider value={liveAPI}>
35
+ {children}
36
+ </LiveAPIContext.Provider>
37
+ );
38
+ };
39
+
40
+ export const useLiveAPIContext = () => {
41
+ const context = useContext(LiveAPIContext);
42
+ if (!context) {
43
+ throw new Error("useLiveAPIContext must be used within a LiveAPIProvider");
44
+ }
45
+ return context;
46
+ };
src/hooks/use-live-api.ts ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import { useCallback, useEffect, useMemo, useRef, useState } from "react";
18
+ import {
19
+ MultimodalLiveAPIClientConnection,
20
+ MultimodalLiveClient,
21
+ } from "../lib/multimodal-live-client";
22
+ import { LiveConfig } from "../multimodal-live-types";
23
+ import { AudioStreamer } from "../lib/audio-streamer";
24
+ import { audioContext } from "../lib/utils";
25
+ import VolMeterWorket from "../lib/worklets/vol-meter";
26
+
27
+ export type UseLiveAPIResults = {
28
+ client: MultimodalLiveClient;
29
+ setConfig: (config: LiveConfig) => void;
30
+ config: LiveConfig;
31
+ connected: boolean;
32
+ connect: () => Promise<void>;
33
+ disconnect: () => Promise<void>;
34
+ volume: number;
35
+ };
36
+
37
+ export function useLiveAPI({
38
+ url,
39
+ apiKey,
40
+ }: MultimodalLiveAPIClientConnection): UseLiveAPIResults {
41
+ const client = useMemo(
42
+ () => new MultimodalLiveClient({ url, apiKey }),
43
+ [url, apiKey],
44
+ );
45
+ const audioStreamerRef = useRef<AudioStreamer | null>(null);
46
+
47
+ const [connected, setConnected] = useState(false);
48
+ const [config, setConfig] = useState<LiveConfig>({
49
+ model: "models/gemini-2.0-flash-exp",
50
+ });
51
+ const [volume, setVolume] = useState(0);
52
+
53
+ // register audio for streaming server -> speakers
54
+ useEffect(() => {
55
+ if (!audioStreamerRef.current) {
56
+ audioContext({ id: "audio-out" }).then((audioCtx: AudioContext) => {
57
+ audioStreamerRef.current = new AudioStreamer(audioCtx);
58
+ audioStreamerRef.current
59
+ .addWorklet<any>("vumeter-out", VolMeterWorket, (ev: any) => {
60
+ setVolume(ev.data.volume);
61
+ })
62
+ .then(() => {
63
+ // Successfully added worklet
64
+ });
65
+ });
66
+ }
67
+ }, [audioStreamerRef]);
68
+
69
+ useEffect(() => {
70
+ const onClose = () => {
71
+ setConnected(false);
72
+ };
73
+
74
+ const stopAudioStreamer = () => audioStreamerRef.current?.stop();
75
+
76
+ const onAudio = (data: ArrayBuffer) =>
77
+ audioStreamerRef.current?.addPCM16(new Uint8Array(data));
78
+
79
+ client
80
+ .on("close", onClose)
81
+ .on("interrupted", stopAudioStreamer)
82
+ .on("audio", onAudio);
83
+
84
+ return () => {
85
+ client
86
+ .off("close", onClose)
87
+ .off("interrupted", stopAudioStreamer)
88
+ .off("audio", onAudio);
89
+ };
90
+ }, [client]);
91
+
92
+ const connect = useCallback(async () => {
93
+ console.log(config);
94
+ if (!config) {
95
+ throw new Error("config has not been set");
96
+ }
97
+ client.disconnect();
98
+ await client.connect(config);
99
+ setConnected(true);
100
+ }, [client, setConnected, config]);
101
+
102
+ const disconnect = useCallback(async () => {
103
+ client.disconnect();
104
+ setConnected(false);
105
+ }, [setConnected, client]);
106
+
107
+ return {
108
+ client,
109
+ config,
110
+ setConfig,
111
+ connected,
112
+ connect,
113
+ disconnect,
114
+ volume,
115
+ };
116
+ }
src/hooks/use-media-stream-mux.ts ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ export type UseMediaStreamResult = {
18
+ type: "webcam" | "screen";
19
+ start: () => Promise<MediaStream | null>;
20
+ stop: () => void;
21
+ isStreaming: boolean;
22
+ stream: MediaStream | null;
23
+ };
src/hooks/use-screen-capture.ts ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import { useState, useEffect } from "react";
18
+ import { UseMediaStreamResult } from "./use-media-stream-mux";
19
+
20
+ export function useScreenCapture(): UseMediaStreamResult {
21
+ const [stream, setStream] = useState<MediaStream | null>(null);
22
+ const [isStreaming, setIsStreaming] = useState(false);
23
+
24
+ useEffect(() => {
25
+ const handleStreamEnded = () => {
26
+ setIsStreaming(false);
27
+ setStream(null);
28
+ };
29
+ if (stream) {
30
+ stream
31
+ .getTracks()
32
+ .forEach((track) => track.addEventListener("ended", handleStreamEnded));
33
+ return () => {
34
+ stream
35
+ .getTracks()
36
+ .forEach((track) =>
37
+ track.removeEventListener("ended", handleStreamEnded),
38
+ );
39
+ };
40
+ }
41
+ }, [stream]);
42
+
43
+ const start = async () => {
44
+ try {
45
+ // const controller = new CaptureController();
46
+ // controller.setFocusBehavior("no-focus-change");
47
+ const mediaStream = await navigator.mediaDevices.getDisplayMedia({
48
+ video: true,
49
+ // controller
50
+ });
51
+ setStream(mediaStream);
52
+ setIsStreaming(true);
53
+ return mediaStream;
54
+ } catch (err) {
55
+ console.error('Failed to start screen capture:', err);
56
+ return null;
57
+ }
58
+ };
59
+
60
+ const stop = () => {
61
+ if (stream) {
62
+ stream.getTracks().forEach((track) => track.stop());
63
+ setStream(null);
64
+ setIsStreaming(false);
65
+ }
66
+ };
67
+
68
+ const result: UseMediaStreamResult = {
69
+ type: "screen",
70
+ start,
71
+ stop,
72
+ isStreaming,
73
+ stream,
74
+ };
75
+
76
+ return result;
77
+ }
src/hooks/use-webcam.ts ADDED
@@ -0,0 +1,123 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import { useState, useEffect } from "react";
18
+ import { UseMediaStreamResult } from "./use-media-stream-mux";
19
+
20
+ export function useWebcam(): UseMediaStreamResult {
21
+ const [stream, setStream] = useState<MediaStream | null>(null);
22
+ const [isStreaming, setIsStreaming] = useState(false);
23
+ const [availableCameras, setAvailableCameras] = useState<MediaDeviceInfo[]>([]);
24
+ const [currentCameraIndex, setCurrentCameraIndex] = useState(-1);
25
+
26
+ // Get list of available cameras on mount
27
+ useEffect(() => {
28
+ async function getCameras() {
29
+ try {
30
+ // First request permission to ensure we can enumerate video devices
31
+ await navigator.mediaDevices.getUserMedia({ video: true })
32
+ .then(stream => {
33
+ // Stop the stream immediately, we just needed permission
34
+ stream.getTracks().forEach(track => track.stop());
35
+ });
36
+
37
+ const devices = await navigator.mediaDevices.enumerateDevices();
38
+ const videoDevices = devices.filter(device => device.kind === 'videoinput');
39
+ setAvailableCameras(videoDevices);
40
+ console.log('Available cameras:', videoDevices);
41
+ } catch (err) {
42
+ console.error('Error getting cameras:', err);
43
+ }
44
+ }
45
+ getCameras();
46
+ }, []);
47
+
48
+ useEffect(() => {
49
+ const handleStreamEnded = () => {
50
+ setIsStreaming(false);
51
+ setStream(null);
52
+ };
53
+ if (stream) {
54
+ stream
55
+ .getTracks()
56
+ .forEach((track) => track.addEventListener("ended", handleStreamEnded));
57
+ return () => {
58
+ stream
59
+ .getTracks()
60
+ .forEach((track) =>
61
+ track.removeEventListener("ended", handleStreamEnded),
62
+ );
63
+ };
64
+ }
65
+ }, [stream]);
66
+
67
+ const start = async () => {
68
+ // If we're already streaming, cycle to next camera
69
+ if (isStreaming) {
70
+ const nextIndex = (currentCameraIndex + 1) % (availableCameras.length);
71
+ setCurrentCameraIndex(nextIndex);
72
+
73
+ // Stop current stream
74
+ if (stream) {
75
+ stream.getTracks().forEach((track) => track.stop());
76
+ }
77
+
78
+ // If we've cycled through all cameras, stop streaming
79
+ if (nextIndex === 0) {
80
+ setStream(null);
81
+ setIsStreaming(false);
82
+ return null;
83
+ }
84
+
85
+ const deviceId = availableCameras[nextIndex].deviceId;
86
+ const mediaStream = await navigator.mediaDevices.getUserMedia({
87
+ video: { deviceId: { exact: deviceId } }
88
+ });
89
+ setStream(mediaStream);
90
+ setIsStreaming(true);
91
+ return mediaStream;
92
+ } else {
93
+ // Start with first camera
94
+ setCurrentCameraIndex(0);
95
+ const deviceId = availableCameras[0]?.deviceId;
96
+ const mediaStream = await navigator.mediaDevices.getUserMedia({
97
+ video: deviceId ? { deviceId: { exact: deviceId } } : true
98
+ });
99
+ setStream(mediaStream);
100
+ setIsStreaming(true);
101
+ return mediaStream;
102
+ }
103
+ };
104
+
105
+ const stop = () => {
106
+ if (stream) {
107
+ stream.getTracks().forEach((track) => track.stop());
108
+ setStream(null);
109
+ setIsStreaming(false);
110
+ setCurrentCameraIndex(-1);
111
+ }
112
+ };
113
+
114
+ const result: UseMediaStreamResult = {
115
+ type: "webcam",
116
+ start,
117
+ stop,
118
+ isStreaming,
119
+ stream,
120
+ };
121
+
122
+ return result;
123
+ }
src/index.css ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ body {
2
+ margin: 0;
3
+ font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen',
4
+ 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue',
5
+ sans-serif;
6
+ -webkit-font-smoothing: antialiased;
7
+ -moz-osx-font-smoothing: grayscale;
8
+ }
9
+
10
+ code {
11
+ font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New',
12
+ monospace;
13
+ }
src/index.tsx ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import React from 'react';
18
+ import ReactDOM from 'react-dom/client';
19
+ import './index.css';
20
+ import App from './App';
21
+ import reportWebVitals from './reportWebVitals';
22
+
23
+ const root = ReactDOM.createRoot(
24
+ document.getElementById('root') as HTMLElement
25
+ );
26
+ root.render(
27
+ <React.StrictMode>
28
+ <App />
29
+ </React.StrictMode>
30
+ );
31
+
32
+ // If you want to start measuring performance in your app, pass a function
33
+ // to log results (for example: reportWebVitals(console.log))
34
+ // or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals
35
+ reportWebVitals();
src/lib/audio-recorder.ts ADDED
@@ -0,0 +1,417 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import { audioContext } from "./utils";
18
+ import AudioRecordingWorklet from "./worklets/audio-processing";
19
+ import SafariAudioRecordingWorklet from "./worklets/safari-audio-processing";
20
+ import VolMeterWorket from "./worklets/vol-meter";
21
+
22
+ import { createWorketFromSrc } from "./audioworklet-registry";
23
+ import EventEmitter from "eventemitter3";
24
+
25
+ function arrayBufferToBase64(buffer: ArrayBuffer) {
26
+ var binary = "";
27
+ var bytes = new Uint8Array(buffer);
28
+ var len = bytes.byteLength;
29
+ for (var i = 0; i < len; i++) {
30
+ binary += String.fromCharCode(bytes[i]);
31
+ }
32
+ return window.btoa(binary);
33
+ }
34
+
35
+ // Add Safari-specific audio context creation
36
+ async function createSafariAudioContext(sampleRate: number): Promise<AudioContext> {
37
+ console.log('Creating Safari audio context with options:', { sampleRate });
38
+
39
+ // Safari requires webkit prefix
40
+ const AudioContextClass = (window as any).webkitAudioContext || window.AudioContext;
41
+ console.log('Using AudioContext class:', AudioContextClass.name);
42
+
43
+ const ctx = new AudioContextClass({
44
+ sampleRate,
45
+ latencyHint: 'interactive'
46
+ });
47
+
48
+ console.log('Safari AudioContext initial state:', {
49
+ state: ctx.state,
50
+ sampleRate: ctx.sampleRate,
51
+ baseLatency: ctx.baseLatency,
52
+ destination: ctx.destination,
53
+ });
54
+
55
+ // Safari requires user interaction to start audio context
56
+ if (ctx.state === 'suspended') {
57
+ console.log('Attempting to resume suspended Safari audio context...');
58
+ try {
59
+ await ctx.resume();
60
+ console.log('Successfully resumed Safari audio context:', ctx.state);
61
+ } catch (err) {
62
+ console.error('Failed to resume Safari audio context:', err);
63
+ throw err;
64
+ }
65
+ }
66
+
67
+ return ctx;
68
+ }
69
+
70
+ export class AudioRecorder extends EventEmitter {
71
+ stream: MediaStream | undefined;
72
+ audioContext: AudioContext | undefined;
73
+ source: MediaStreamAudioSourceNode | undefined;
74
+ recording: boolean = false;
75
+ recordingWorklet: AudioWorkletNode | undefined;
76
+ vuWorklet: AudioWorkletNode | undefined;
77
+
78
+ private starting: Promise<void> | null = null;
79
+
80
+ // Add browser detection
81
+ isSafari: boolean;
82
+ isIOS: boolean;
83
+
84
+ constructor(public sampleRate = 16000) {
85
+ super();
86
+ this.isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
87
+ this.isIOS = /iPad|iPhone|iPod/.test(navigator.userAgent) && !(window as any).MSStream;
88
+ console.log('AudioRecorder initialized:', {
89
+ isSafari: this.isSafari,
90
+ isIOS: this.isIOS,
91
+ sampleRate: this.sampleRate,
92
+ userAgent: navigator.userAgent,
93
+ webAudioSupport: !!(window.AudioContext || (window as any).webkitAudioContext),
94
+ mediaDevicesSupport: !!navigator.mediaDevices
95
+ });
96
+ }
97
+
98
+ async start() {
99
+ if (!navigator.mediaDevices?.getUserMedia) {
100
+ console.error('MediaDevices API not available:', {
101
+ mediaDevices: !!navigator.mediaDevices,
102
+ getUserMedia: !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia)
103
+ });
104
+ throw new Error("Could not request user media");
105
+ }
106
+
107
+ console.log('Starting AudioRecorder with full environment info:', {
108
+ userAgent: navigator.userAgent,
109
+ platform: navigator.platform,
110
+ vendor: navigator.vendor,
111
+ audioWorkletSupport: !!(window.AudioWorklet),
112
+ sampleRate: this.sampleRate,
113
+ existingAudioContext: !!this.audioContext,
114
+ existingStream: !!this.stream,
115
+ isSafari: this.isSafari
116
+ });
117
+
118
+ this.starting = new Promise(async (resolve, reject) => {
119
+ try {
120
+ if (this.isSafari) {
121
+ // Safari implementation
122
+ console.log('Safari detected - using Safari-specific audio initialization');
123
+
124
+ // 1. First get audio permissions
125
+ console.log('Requesting audio permissions first for Safari...');
126
+ const constraints = {
127
+ audio: {
128
+ echoCancellation: false,
129
+ noiseSuppression: false,
130
+ autoGainControl: false,
131
+ sampleRate: this.sampleRate,
132
+ channelCount: 1
133
+ }
134
+ };
135
+ console.log('Safari audio constraints:', constraints);
136
+
137
+ try {
138
+ this.stream = await navigator.mediaDevices.getUserMedia(constraints);
139
+ const track = this.stream.getAudioTracks()[0];
140
+ console.log('Safari audio permissions granted:', {
141
+ track: track.label,
142
+ settings: track.getSettings(),
143
+ constraints: track.getConstraints(),
144
+ enabled: track.enabled,
145
+ muted: track.muted,
146
+ readyState: track.readyState
147
+ });
148
+ } catch (err) {
149
+ console.error('Failed to get Safari audio permissions:', err);
150
+ throw err;
151
+ }
152
+
153
+ // 2. Create and initialize audio context
154
+ try {
155
+ this.audioContext = await createSafariAudioContext(this.sampleRate);
156
+ console.log('Safari audio context ready:', {
157
+ state: this.audioContext.state,
158
+ currentTime: this.audioContext.currentTime
159
+ });
160
+ } catch (err) {
161
+ console.error('Failed to initialize Safari audio context:', err);
162
+ throw err;
163
+ }
164
+
165
+ // 3. Create and connect audio source
166
+ try {
167
+ console.log('Creating Safari audio source...');
168
+ this.source = this.audioContext.createMediaStreamSource(this.stream);
169
+ console.log('Safari audio source created successfully:', {
170
+ numberOfInputs: this.source.numberOfInputs,
171
+ numberOfOutputs: this.source.numberOfOutputs,
172
+ channelCount: this.source.channelCount
173
+ });
174
+ } catch (err) {
175
+ console.error('Failed to create Safari audio source:', err);
176
+ throw err;
177
+ }
178
+
179
+ // 4. Load and create worklet
180
+ try {
181
+ const workletName = "audio-recorder-worklet";
182
+ console.log('Loading Safari audio worklet...');
183
+ const src = createWorketFromSrc(workletName, SafariAudioRecordingWorklet);
184
+ await this.audioContext.audioWorklet.addModule(src);
185
+ console.log('Safari audio worklet module loaded');
186
+
187
+ this.recordingWorklet = new AudioWorkletNode(
188
+ this.audioContext,
189
+ workletName,
190
+ {
191
+ numberOfInputs: 1,
192
+ numberOfOutputs: 1,
193
+ channelCount: 1,
194
+ processorOptions: {
195
+ sampleRate: this.sampleRate
196
+ }
197
+ }
198
+ );
199
+
200
+ // Add detailed error handlers
201
+ this.recordingWorklet.onprocessorerror = (event) => {
202
+ console.error('Safari AudioWorklet processor error:', event);
203
+ };
204
+
205
+ this.recordingWorklet.port.onmessageerror = (event) => {
206
+ console.error('Safari AudioWorklet message error:', event);
207
+ };
208
+
209
+ // Add data handler with detailed logging
210
+ this.recordingWorklet.port.onmessage = (ev: MessageEvent) => {
211
+ const data = ev.data.data;
212
+ console.log('Safari AudioWorklet message received:', {
213
+ eventType: ev.data.event,
214
+ hasData: !!data,
215
+ dataType: data ? typeof data : null,
216
+ timestamp: Date.now()
217
+ });
218
+
219
+ if (data?.int16arrayBuffer) {
220
+ console.log('Processing Safari audio chunk:', {
221
+ byteLength: data.int16arrayBuffer.byteLength,
222
+ timestamp: Date.now()
223
+ });
224
+ const arrayBufferString = arrayBufferToBase64(data.int16arrayBuffer);
225
+ this.emit("data", arrayBufferString);
226
+ } else {
227
+ console.warn('Invalid Safari audio chunk received:', ev.data);
228
+ }
229
+ };
230
+
231
+ console.log('Safari AudioWorkletNode created successfully');
232
+ } catch (err) {
233
+ console.error('Failed to setup Safari audio worklet:', err);
234
+ throw err;
235
+ }
236
+
237
+ // 5. Connect nodes
238
+ try {
239
+ console.log('Connecting Safari audio nodes...');
240
+ this.source.connect(this.recordingWorklet);
241
+ console.log('Safari audio nodes connected successfully');
242
+ } catch (err) {
243
+ console.error('Failed to connect Safari audio nodes:', err);
244
+ throw err;
245
+ }
246
+
247
+ } else {
248
+ // Chrome/other browsers implementation
249
+ console.log('Non-Safari browser detected - using standard audio initialization');
250
+
251
+ // Get media stream first for Chrome
252
+ const constraints = {
253
+ audio: {
254
+ echoCancellation: true,
255
+ noiseSuppression: true,
256
+ autoGainControl: true,
257
+ sampleRate: this.sampleRate
258
+ }
259
+ };
260
+ console.log('Chrome audio constraints:', constraints);
261
+
262
+ try {
263
+ this.stream = await navigator.mediaDevices.getUserMedia(constraints);
264
+ const track = this.stream.getAudioTracks()[0];
265
+ console.log('Chrome audio permissions granted:', {
266
+ track: track.label,
267
+ settings: track.getSettings()
268
+ });
269
+ } catch (err) {
270
+ console.error('Failed to get Chrome audio permissions:', err);
271
+ throw err;
272
+ }
273
+
274
+ // Create audio context after getting stream for Chrome
275
+ try {
276
+ console.log('Creating Chrome audio context...');
277
+ this.audioContext = await audioContext({ sampleRate: this.sampleRate });
278
+ console.log('Chrome audio context created:', {
279
+ state: this.audioContext.state,
280
+ sampleRate: this.audioContext.sampleRate
281
+ });
282
+ } catch (err) {
283
+ console.error('Failed to create Chrome audio context:', err);
284
+ throw err;
285
+ }
286
+
287
+ // Create media stream source
288
+ try {
289
+ console.log('Creating Chrome audio source...');
290
+ this.source = this.audioContext.createMediaStreamSource(this.stream);
291
+ console.log('Chrome audio source created');
292
+ } catch (err) {
293
+ console.error('Failed to create Chrome audio source:', err);
294
+ throw err;
295
+ }
296
+
297
+ // Load and create standard worklet
298
+ try {
299
+ const workletName = "audio-recorder-worklet";
300
+ console.log('Loading Chrome audio worklet...');
301
+ const src = createWorketFromSrc(workletName, AudioRecordingWorklet);
302
+ await this.audioContext.audioWorklet.addModule(src);
303
+ console.log('Chrome audio worklet loaded');
304
+
305
+ this.recordingWorklet = new AudioWorkletNode(
306
+ this.audioContext,
307
+ workletName,
308
+ {
309
+ numberOfInputs: 1,
310
+ numberOfOutputs: 1,
311
+ channelCount: 1,
312
+ processorOptions: {
313
+ sampleRate: this.sampleRate
314
+ }
315
+ }
316
+ );
317
+
318
+ // Add error handlers
319
+ this.recordingWorklet.onprocessorerror = (event) => {
320
+ console.error('Chrome AudioWorklet processor error:', event);
321
+ };
322
+
323
+ this.recordingWorklet.port.onmessageerror = (event) => {
324
+ console.error('Chrome AudioWorklet message error:', event);
325
+ };
326
+
327
+ // Add data handler
328
+ this.recordingWorklet.port.onmessage = async (ev: MessageEvent) => {
329
+ const arrayBuffer = ev.data.data?.int16arrayBuffer;
330
+ if (arrayBuffer) {
331
+ const arrayBufferString = arrayBufferToBase64(arrayBuffer);
332
+ this.emit("data", arrayBufferString);
333
+ } else {
334
+ console.warn('Invalid Chrome audio chunk received:', ev.data);
335
+ }
336
+ };
337
+
338
+ console.log('Chrome AudioWorkletNode created');
339
+ } catch (err) {
340
+ console.error('Failed to setup Chrome audio worklet:', err);
341
+ throw err;
342
+ }
343
+
344
+ // Connect nodes
345
+ try {
346
+ console.log('Connecting Chrome audio nodes...');
347
+ this.source.connect(this.recordingWorklet);
348
+ console.log('Chrome audio nodes connected');
349
+
350
+ // Set up VU meter
351
+ const vuWorkletName = "vu-meter";
352
+ await this.audioContext.audioWorklet.addModule(
353
+ createWorketFromSrc(vuWorkletName, VolMeterWorket),
354
+ );
355
+ this.vuWorklet = new AudioWorkletNode(this.audioContext, vuWorkletName);
356
+ this.vuWorklet.port.onmessage = (ev: MessageEvent) => {
357
+ this.emit("volume", ev.data.volume);
358
+ };
359
+ this.source.connect(this.vuWorklet);
360
+ console.log('Chrome VU meter connected');
361
+ } catch (err) {
362
+ console.error('Failed to connect Chrome audio nodes:', err);
363
+ throw err;
364
+ }
365
+ }
366
+
367
+ this.recording = true;
368
+ console.log('Recording started successfully');
369
+ resolve();
370
+ this.starting = null;
371
+ } catch (error) {
372
+ console.error('Failed to start recording:', error);
373
+ this.stop();
374
+ reject(error);
375
+ this.starting = null;
376
+ }
377
+ });
378
+ return this.starting;
379
+ }
380
+
381
+ stop() {
382
+ console.log('Stopping audio recorder...');
383
+ // its plausible that stop would be called before start completes
384
+ // such as if the websocket immediately hangs up
385
+ const handleStop = () => {
386
+ try {
387
+ if (this.source) {
388
+ console.log('Disconnecting audio source...');
389
+ this.source.disconnect();
390
+ }
391
+ if (this.stream) {
392
+ console.log('Stopping media stream tracks...');
393
+ this.stream.getTracks().forEach(track => {
394
+ track.stop();
395
+ console.log('Stopped track:', track.label);
396
+ });
397
+ }
398
+ if (this.audioContext && this.isSafari) {
399
+ console.log('Closing Safari audio context...');
400
+ this.audioContext.close();
401
+ }
402
+ this.stream = undefined;
403
+ this.recordingWorklet = undefined;
404
+ this.vuWorklet = undefined;
405
+ console.log('Audio recorder stopped successfully');
406
+ } catch (err) {
407
+ console.error('Error while stopping audio recorder:', err);
408
+ }
409
+ };
410
+ if (this.starting) {
411
+ console.log('Stop called while starting - waiting for start to complete...');
412
+ this.starting.then(handleStop);
413
+ return;
414
+ }
415
+ handleStop();
416
+ }
417
+ }
src/lib/audio-streamer.ts ADDED
@@ -0,0 +1,270 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import {
18
+ createWorketFromSrc,
19
+ registeredWorklets,
20
+ } from "./audioworklet-registry";
21
+
22
+ export class AudioStreamer {
23
+ public audioQueue: Float32Array[] = [];
24
+ private isPlaying: boolean = false;
25
+ private sampleRate: number = 24000;
26
+ private bufferSize: number = 7680;
27
+ private processingBuffer: Float32Array = new Float32Array(0);
28
+ private scheduledTime: number = 0;
29
+ public gainNode: GainNode;
30
+ public source: AudioBufferSourceNode;
31
+ private isStreamComplete: boolean = false;
32
+ private checkInterval: number | null = null;
33
+ private initialBufferTime: number = 0.1; //0.1 // 100ms initial buffer
34
+ private endOfQueueAudioSource: AudioBufferSourceNode | null = null;
35
+
36
+ public onComplete = () => {};
37
+
38
+ constructor(public context: AudioContext) {
39
+ this.gainNode = this.context.createGain();
40
+ this.source = this.context.createBufferSource();
41
+ this.gainNode.connect(this.context.destination);
42
+ this.addPCM16 = this.addPCM16.bind(this);
43
+ }
44
+
45
+ async addWorklet<T extends (d: any) => void>(
46
+ workletName: string,
47
+ workletSrc: string,
48
+ handler: T,
49
+ ): Promise<this> {
50
+ let workletsRecord = registeredWorklets.get(this.context);
51
+ if (workletsRecord && workletsRecord[workletName]) {
52
+ // the worklet already exists on this context
53
+ // add the new handler to it
54
+ workletsRecord[workletName].handlers.push(handler);
55
+ return Promise.resolve(this);
56
+ //throw new Error(`Worklet ${workletName} already exists on context`);
57
+ }
58
+
59
+ if (!workletsRecord) {
60
+ registeredWorklets.set(this.context, {});
61
+ workletsRecord = registeredWorklets.get(this.context)!;
62
+ }
63
+
64
+ // create new record to fill in as becomes available
65
+ workletsRecord[workletName] = { handlers: [handler] };
66
+
67
+ const src = createWorketFromSrc(workletName, workletSrc);
68
+ await this.context.audioWorklet.addModule(src);
69
+ const worklet = new AudioWorkletNode(this.context, workletName);
70
+
71
+ //add the node into the map
72
+ workletsRecord[workletName].node = worklet;
73
+
74
+ return this;
75
+ }
76
+
77
+ addPCM16(chunk: Uint8Array) {
78
+ const float32Array = new Float32Array(chunk.length / 2);
79
+ const dataView = new DataView(chunk.buffer);
80
+
81
+ for (let i = 0; i < chunk.length / 2; i++) {
82
+ try {
83
+ const int16 = dataView.getInt16(i * 2, true);
84
+ float32Array[i] = int16 / 32768;
85
+ } catch (e) {
86
+ console.error(e);
87
+ // console.log(
88
+ // `dataView.length: ${dataView.byteLength}, i * 2: ${i * 2}`,
89
+ // );
90
+ }
91
+ }
92
+
93
+ const newBuffer = new Float32Array(
94
+ this.processingBuffer.length + float32Array.length,
95
+ );
96
+ newBuffer.set(this.processingBuffer);
97
+ newBuffer.set(float32Array, this.processingBuffer.length);
98
+ this.processingBuffer = newBuffer;
99
+
100
+ while (this.processingBuffer.length >= this.bufferSize) {
101
+ const buffer = this.processingBuffer.slice(0, this.bufferSize);
102
+ this.audioQueue.push(buffer);
103
+ this.processingBuffer = this.processingBuffer.slice(this.bufferSize);
104
+ }
105
+
106
+ if (!this.isPlaying) {
107
+ this.isPlaying = true;
108
+ // Initialize scheduledTime only when we start playing
109
+ this.scheduledTime = this.context.currentTime + this.initialBufferTime;
110
+ this.scheduleNextBuffer();
111
+ }
112
+ }
113
+
114
+ private createAudioBuffer(audioData: Float32Array): AudioBuffer {
115
+ const audioBuffer = this.context.createBuffer(
116
+ 1,
117
+ audioData.length,
118
+ this.sampleRate,
119
+ );
120
+ audioBuffer.getChannelData(0).set(audioData);
121
+ return audioBuffer;
122
+ }
123
+
124
+ private scheduleNextBuffer() {
125
+ const SCHEDULE_AHEAD_TIME = 0.2;
126
+
127
+ while (
128
+ this.audioQueue.length > 0 &&
129
+ this.scheduledTime < this.context.currentTime + SCHEDULE_AHEAD_TIME
130
+ ) {
131
+ const audioData = this.audioQueue.shift()!;
132
+ const audioBuffer = this.createAudioBuffer(audioData);
133
+ const source = this.context.createBufferSource();
134
+
135
+ if (this.audioQueue.length === 0) {
136
+ if (this.endOfQueueAudioSource) {
137
+ this.endOfQueueAudioSource.onended = null;
138
+ }
139
+ this.endOfQueueAudioSource = source;
140
+ source.onended = () => {
141
+ if (
142
+ !this.audioQueue.length &&
143
+ this.endOfQueueAudioSource === source
144
+ ) {
145
+ this.endOfQueueAudioSource = null;
146
+ this.onComplete();
147
+ }
148
+ };
149
+ }
150
+
151
+ source.buffer = audioBuffer;
152
+ source.connect(this.gainNode);
153
+
154
+ const worklets = registeredWorklets.get(this.context);
155
+
156
+ if (worklets) {
157
+ Object.entries(worklets).forEach(([workletName, graph]) => {
158
+ const { node, handlers } = graph;
159
+ if (node) {
160
+ source.connect(node);
161
+ node.port.onmessage = function (ev: MessageEvent) {
162
+ handlers.forEach((handler) => {
163
+ handler.call(node.port, ev);
164
+ });
165
+ };
166
+ node.connect(this.context.destination);
167
+ }
168
+ });
169
+ }
170
+
171
+ // i added this trying to fix clicks
172
+ // this.gainNode.gain.setValueAtTime(0, 0);
173
+ // this.gainNode.gain.linearRampToValueAtTime(1, 1);
174
+
175
+ // Ensure we never schedule in the past
176
+ const startTime = Math.max(this.scheduledTime, this.context.currentTime);
177
+ source.start(startTime);
178
+
179
+ this.scheduledTime = startTime + audioBuffer.duration;
180
+ }
181
+
182
+ if (this.audioQueue.length === 0 && this.processingBuffer.length === 0) {
183
+ if (this.isStreamComplete) {
184
+ this.isPlaying = false;
185
+ if (this.checkInterval) {
186
+ clearInterval(this.checkInterval);
187
+ this.checkInterval = null;
188
+ }
189
+ } else {
190
+ if (!this.checkInterval) {
191
+ this.checkInterval = window.setInterval(() => {
192
+ if (
193
+ this.audioQueue.length > 0 ||
194
+ this.processingBuffer.length >= this.bufferSize
195
+ ) {
196
+ this.scheduleNextBuffer();
197
+ }
198
+ }, 100) as unknown as number;
199
+ }
200
+ }
201
+ } else {
202
+ const nextCheckTime =
203
+ (this.scheduledTime - this.context.currentTime) * 1000;
204
+ setTimeout(
205
+ () => this.scheduleNextBuffer(),
206
+ Math.max(0, nextCheckTime - 50),
207
+ );
208
+ }
209
+ }
210
+
211
+ stop() {
212
+ this.isPlaying = false;
213
+ this.isStreamComplete = true;
214
+ this.audioQueue = [];
215
+ this.processingBuffer = new Float32Array(0);
216
+ this.scheduledTime = this.context.currentTime;
217
+
218
+ if (this.checkInterval) {
219
+ clearInterval(this.checkInterval);
220
+ this.checkInterval = null;
221
+ }
222
+
223
+ this.gainNode.gain.linearRampToValueAtTime(
224
+ 0,
225
+ this.context.currentTime + 0.1,
226
+ );
227
+
228
+ setTimeout(() => {
229
+ this.gainNode.disconnect();
230
+ this.gainNode = this.context.createGain();
231
+ this.gainNode.connect(this.context.destination);
232
+ }, 200);
233
+ }
234
+
235
+ async resume() {
236
+ if (this.context.state === "suspended") {
237
+ await this.context.resume();
238
+ }
239
+ this.isStreamComplete = false;
240
+ this.scheduledTime = this.context.currentTime + this.initialBufferTime;
241
+ this.gainNode.gain.setValueAtTime(1, this.context.currentTime);
242
+ }
243
+
244
+ complete() {
245
+ this.isStreamComplete = true;
246
+ if (this.processingBuffer.length > 0) {
247
+ this.audioQueue.push(this.processingBuffer);
248
+ this.processingBuffer = new Float32Array(0);
249
+ if (this.isPlaying) {
250
+ this.scheduleNextBuffer();
251
+ }
252
+ } else {
253
+ this.onComplete();
254
+ }
255
+ }
256
+ }
257
+
258
+ // // Usage example:
259
+ // const audioStreamer = new AudioStreamer();
260
+ //
261
+ // // In your streaming code:
262
+ // function handleChunk(chunk: Uint8Array) {
263
+ // audioStreamer.handleChunk(chunk);
264
+ // }
265
+ //
266
+ // // To start playing (call this in response to a user interaction)
267
+ // await audioStreamer.resume();
268
+ //
269
+ // // To stop playing
270
+ // // audioStreamer.stop();
src/lib/audioworklet-registry.ts ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ /**
18
+ * A registry to map attached worklets by their audio-context
19
+ * any module using `audioContext.audioWorklet.addModule(` should register the worklet here
20
+ */
21
+ export type WorkletGraph = {
22
+ node?: AudioWorkletNode;
23
+ handlers: Array<(this: MessagePort, ev: MessageEvent) => any>;
24
+ };
25
+
26
+ export const registeredWorklets: Map<
27
+ AudioContext,
28
+ Record<string, WorkletGraph>
29
+ > = new Map();
30
+
31
+ export const createWorketFromSrc = (
32
+ workletName: string,
33
+ workletSrc: string,
34
+ ) => {
35
+ const script = new Blob(
36
+ [`registerProcessor("${workletName}", ${workletSrc})`],
37
+ {
38
+ type: "application/javascript",
39
+ },
40
+ );
41
+
42
+ return URL.createObjectURL(script);
43
+ };
src/lib/multimodal-live-client.ts ADDED
@@ -0,0 +1,313 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import { Content, GenerativeContentBlob, Part } from "@google/generative-ai";
18
+ import { EventEmitter } from "eventemitter3";
19
+ import { difference } from "lodash";
20
+ import {
21
+ ClientContentMessage,
22
+ isInterrupted,
23
+ isModelTurn,
24
+ isServerContentMessage,
25
+ isSetupCompleteMessage,
26
+ isToolCallCancellationMessage,
27
+ isToolCallMessage,
28
+ isTurnComplete,
29
+ LiveIncomingMessage,
30
+ ModelTurn,
31
+ RealtimeInputMessage,
32
+ ServerContent,
33
+ SetupMessage,
34
+ StreamingLog,
35
+ ToolCall,
36
+ ToolCallCancellation,
37
+ ToolResponseMessage,
38
+ type LiveConfig,
39
+ } from "../multimodal-live-types";
40
+ import { blobToJSON, base64ToArrayBuffer } from "./utils";
41
+
42
+ /**
43
+ * the events that this client will emit
44
+ */
45
+ interface MultimodalLiveClientEventTypes {
46
+ open: () => void;
47
+ log: (log: StreamingLog) => void;
48
+ close: (event: CloseEvent) => void;
49
+ audio: (data: ArrayBuffer) => void;
50
+ content: (data: ServerContent) => void;
51
+ interrupted: () => void;
52
+ setupcomplete: () => void;
53
+ turncomplete: () => void;
54
+ toolcall: (toolCall: ToolCall) => void;
55
+ toolcallcancellation: (toolcallCancellation: ToolCallCancellation) => void;
56
+ }
57
+
58
+ export type MultimodalLiveAPIClientConnection = {
59
+ url?: string;
60
+ apiKey?: string;
61
+ };
62
+
63
+ /**
64
+ * A event-emitting class that manages the connection to the websocket and emits
65
+ * events to the rest of the application.
66
+ * If you dont want to use react you can still use this.
67
+ */
68
+ export class MultimodalLiveClient extends EventEmitter<MultimodalLiveClientEventTypes> {
69
+ public ws: WebSocket | null = null;
70
+ protected config: LiveConfig | null = null;
71
+ public url: string;
72
+
73
+ constructor({ url, apiKey }: MultimodalLiveAPIClientConnection = {}) {
74
+ super();
75
+ this.url = url || `${window.location.protocol === 'https:' ? 'wss:' : 'ws:'}//${window.location.host}/ws`;
76
+ this.send = this.send.bind(this);
77
+ }
78
+
79
+ log(type: string, message: StreamingLog["message"]) {
80
+ const log: StreamingLog = {
81
+ date: new Date(),
82
+ type,
83
+ message,
84
+ };
85
+ this.emit("log", log);
86
+ }
87
+
88
+ connect(config: LiveConfig): Promise<boolean> {
89
+ this.config = config;
90
+
91
+ const ws = new WebSocket(this.url);
92
+
93
+ ws.addEventListener("message", async (evt: MessageEvent) => {
94
+ if (evt.data instanceof Blob) {
95
+ this.receive(evt.data);
96
+ } else {
97
+ console.log("non blob message", evt);
98
+ }
99
+ });
100
+ return new Promise((resolve, reject) => {
101
+ const onError = (ev: Event) => {
102
+ this.disconnect(ws);
103
+ const message = `Could not connect to "${this.url}"`;
104
+ this.log(`server.${ev.type}`, message);
105
+ reject(new Error(message));
106
+ };
107
+ ws.addEventListener("error", onError);
108
+ ws.addEventListener("open", (ev: Event) => {
109
+ if (!this.config) {
110
+ reject("Invalid config sent to `connect(config)`");
111
+ return;
112
+ }
113
+ this.log(`client.${ev.type}`, `connected to socket`);
114
+ this.emit("open");
115
+
116
+ this.ws = ws;
117
+
118
+ const setupMessage: SetupMessage = {
119
+ setup: this.config,
120
+ };
121
+ this._sendDirect(setupMessage);
122
+ this.log("client.send", "setup");
123
+
124
+ ws.removeEventListener("error", onError);
125
+ ws.addEventListener("close", (ev: CloseEvent) => {
126
+ console.log(ev);
127
+ this.disconnect(ws);
128
+ let reason = ev.reason || "";
129
+ if (reason.toLowerCase().includes("error")) {
130
+ const prelude = "ERROR]";
131
+ const preludeIndex = reason.indexOf(prelude);
132
+ if (preludeIndex > 0) {
133
+ reason = reason.slice(
134
+ preludeIndex + prelude.length + 1,
135
+ Infinity,
136
+ );
137
+ }
138
+ }
139
+ this.log(
140
+ `server.${ev.type}`,
141
+ `disconnected ${reason ? `with reason: ${reason}` : ``}`,
142
+ );
143
+ this.emit("close", ev);
144
+ });
145
+ resolve(true);
146
+ });
147
+ });
148
+ }
149
+
150
+ disconnect(ws?: WebSocket) {
151
+ // could be that this is an old websocket and theres already a new instance
152
+ // only close it if its still the correct reference
153
+ if ((!ws || this.ws === ws) && this.ws) {
154
+ this.ws.close();
155
+ this.ws = null;
156
+ this.log("client.close", `Disconnected`);
157
+ return true;
158
+ }
159
+ return false;
160
+ }
161
+
162
+ protected async receive(blob: Blob) {
163
+ const response: LiveIncomingMessage = (await blobToJSON(
164
+ blob,
165
+ )) as LiveIncomingMessage;
166
+ if (isToolCallMessage(response)) {
167
+ this.log("server.toolCall", response);
168
+ this.emit("toolcall", response.toolCall);
169
+ return;
170
+ }
171
+ if (isToolCallCancellationMessage(response)) {
172
+ this.log("receive.toolCallCancellation", response);
173
+ this.emit("toolcallcancellation", response.toolCallCancellation);
174
+ return;
175
+ }
176
+
177
+ if (isSetupCompleteMessage(response)) {
178
+ this.log("server.send", "setupComplete");
179
+ this.emit("setupcomplete");
180
+ return;
181
+ }
182
+
183
+ // this json also might be `contentUpdate { interrupted: true }`
184
+ // or contentUpdate { end_of_turn: true }
185
+ if (isServerContentMessage(response)) {
186
+ const { serverContent } = response;
187
+ if (isInterrupted(serverContent)) {
188
+ this.log("receive.serverContent", "interrupted");
189
+ this.emit("interrupted");
190
+ return;
191
+ }
192
+ if (isTurnComplete(serverContent)) {
193
+ this.log("server.send", "turnComplete");
194
+ this.emit("turncomplete");
195
+ //plausible theres more to the message, continue
196
+ }
197
+
198
+ if (isModelTurn(serverContent)) {
199
+ let parts: Part[] = serverContent.modelTurn.parts;
200
+
201
+ // when its audio that is returned for modelTurn
202
+ const audioParts = parts.filter(
203
+ (p) => p.inlineData && p.inlineData.mimeType.startsWith("audio/pcm"),
204
+ );
205
+ const base64s = audioParts.map((p) => p.inlineData?.data);
206
+
207
+ // strip the audio parts out of the modelTurn
208
+ const otherParts = difference(parts, audioParts);
209
+ // console.log("otherParts", otherParts);
210
+
211
+ base64s.forEach((b64) => {
212
+ if (b64) {
213
+ const data = base64ToArrayBuffer(b64);
214
+ this.emit("audio", data);
215
+ this.log(`server.audio`, `buffer (${data.byteLength})`);
216
+ }
217
+ });
218
+ if (!otherParts.length) {
219
+ return;
220
+ }
221
+
222
+ parts = otherParts;
223
+
224
+ const content: ModelTurn = { modelTurn: { parts } };
225
+ this.emit("content", content);
226
+ this.log(`server.content`, response);
227
+ }
228
+ } else {
229
+ console.log("received unmatched message", response);
230
+ }
231
+ }
232
+
233
+ /**
234
+ * send realtimeInput, this is base64 chunks of "audio/pcm" and/or "image/jpg"
235
+ */
236
+ sendRealtimeInput(chunks: GenerativeContentBlob[]) {
237
+ let hasAudio = false;
238
+ let hasVideo = false;
239
+ for (let i = 0; i < chunks.length; i++) {
240
+ const ch = chunks[i];
241
+ if (ch.mimeType.includes("audio")) {
242
+ hasAudio = true;
243
+ }
244
+ if (ch.mimeType.includes("image")) {
245
+ hasVideo = true;
246
+ }
247
+ if (hasAudio && hasVideo) {
248
+ break;
249
+ }
250
+ }
251
+ const message =
252
+ hasAudio && hasVideo
253
+ ? "audio + video"
254
+ : hasAudio
255
+ ? "audio"
256
+ : hasVideo
257
+ ? "video"
258
+ : "unknown";
259
+
260
+ const data: RealtimeInputMessage = {
261
+ realtimeInput: {
262
+ mediaChunks: chunks,
263
+ },
264
+ };
265
+ this._sendDirect(data);
266
+ this.log(`client.realtimeInput`, message);
267
+ }
268
+
269
+ /**
270
+ * send a response to a function call and provide the id of the functions you are responding to
271
+ */
272
+ sendToolResponse(toolResponse: ToolResponseMessage["toolResponse"]) {
273
+ const message: ToolResponseMessage = {
274
+ toolResponse,
275
+ };
276
+
277
+ this._sendDirect(message);
278
+ this.log(`client.toolResponse`, message);
279
+ }
280
+
281
+ /**
282
+ * send normal content parts such as { text }
283
+ */
284
+ send(parts: Part | Part[], turnComplete: boolean = true) {
285
+ parts = Array.isArray(parts) ? parts : [parts];
286
+ const content: Content = {
287
+ role: "user",
288
+ parts,
289
+ };
290
+
291
+ const clientContentRequest: ClientContentMessage = {
292
+ clientContent: {
293
+ turns: [content],
294
+ turnComplete,
295
+ },
296
+ };
297
+
298
+ this._sendDirect(clientContentRequest);
299
+ this.log(`client.send`, clientContentRequest);
300
+ }
301
+
302
+ /**
303
+ * used internally to send all messages
304
+ * don't use directly unless trying to send an unsupported message type
305
+ */
306
+ _sendDirect(request: object) {
307
+ if (!this.ws) {
308
+ throw new Error("WebSocket is not connected");
309
+ }
310
+ const str = JSON.stringify(request);
311
+ this.ws.send(str);
312
+ }
313
+ }
src/lib/platform.ts ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ export const isIOS = (): boolean => {
2
+ const userAgent = window.navigator.userAgent.toLowerCase();
3
+ return /iphone|ipad|ipod/.test(userAgent) ||
4
+ // Detect iPad on iOS 13+ (which reports as Mac)
5
+ (userAgent.includes('mac') && 'ontouchend' in document);
6
+ };
src/lib/store-logger.ts ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import { create } from "zustand";
18
+ import { StreamingLog } from "../multimodal-live-types";
19
+ import { mockLogs } from "../components/logger/mock-logs";
20
+
21
+ interface StoreLoggerState {
22
+ maxLogs: number;
23
+ logs: StreamingLog[];
24
+ log: (streamingLog: StreamingLog) => void;
25
+ clearLogs: () => void;
26
+ }
27
+
28
+ export const useLoggerStore = create<StoreLoggerState>((set, get) => ({
29
+ maxLogs: 500,
30
+ logs: [], //mockLogs,
31
+ log: ({ date, type, message }: StreamingLog) => {
32
+ set((state) => {
33
+ const prevLog = state.logs.at(-1);
34
+ if (prevLog && prevLog.type === type && prevLog.message === message) {
35
+ return {
36
+ logs: [
37
+ ...state.logs.slice(0, -1),
38
+ {
39
+ date,
40
+ type,
41
+ message,
42
+ count: prevLog.count ? prevLog.count + 1 : 1,
43
+ } as StreamingLog,
44
+ ],
45
+ };
46
+ }
47
+ return {
48
+ logs: [
49
+ ...state.logs.slice(-(get().maxLogs - 1)),
50
+ {
51
+ date,
52
+ type,
53
+ message,
54
+ } as StreamingLog,
55
+ ],
56
+ };
57
+ });
58
+ },
59
+
60
+ clearLogs: () => {
61
+ console.log("clear log");
62
+ set({ logs: [] });
63
+ },
64
+ setMaxLogs: (n: number) => set({ maxLogs: n }),
65
+ }));
src/lib/utils.ts ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ export type GetAudioContextOptions = AudioContextOptions & {
18
+ id?: string;
19
+ };
20
+
21
+ const map: Map<string, AudioContext> = new Map();
22
+
23
+ export const audioContext: (
24
+ options?: GetAudioContextOptions,
25
+ ) => Promise<AudioContext> = (() => {
26
+ const didInteract = new Promise((res) => {
27
+ window.addEventListener("pointerdown", res, { once: true });
28
+ window.addEventListener("keydown", res, { once: true });
29
+ });
30
+
31
+ return async (options?: GetAudioContextOptions) => {
32
+ try {
33
+ const a = new Audio();
34
+ a.src =
35
+ "data:audio/wav;base64,UklGRigAAABXQVZFZm10IBIAAAABAAEARKwAAIhYAQACABAAAABkYXRhAgAAAAEA";
36
+ await a.play();
37
+ if (options?.id && map.has(options.id)) {
38
+ const ctx = map.get(options.id);
39
+ if (ctx) {
40
+ return ctx;
41
+ }
42
+ }
43
+ const ctx = new AudioContext(options);
44
+ if (options?.id) {
45
+ map.set(options.id, ctx);
46
+ }
47
+ return ctx;
48
+ } catch (e) {
49
+ await didInteract;
50
+ if (options?.id && map.has(options.id)) {
51
+ const ctx = map.get(options.id);
52
+ if (ctx) {
53
+ return ctx;
54
+ }
55
+ }
56
+ const ctx = new AudioContext(options);
57
+ if (options?.id) {
58
+ map.set(options.id, ctx);
59
+ }
60
+ return ctx;
61
+ }
62
+ };
63
+ })();
64
+
65
+ export const blobToJSON = (blob: Blob) =>
66
+ new Promise((resolve, reject) => {
67
+ const reader = new FileReader();
68
+ reader.onload = () => {
69
+ if (reader.result) {
70
+ const json = JSON.parse(reader.result as string);
71
+ resolve(json);
72
+ } else {
73
+ reject("oops");
74
+ }
75
+ };
76
+ reader.readAsText(blob);
77
+ });
78
+
79
+ export function base64ToArrayBuffer(base64: string) {
80
+ var binaryString = atob(base64);
81
+ var bytes = new Uint8Array(binaryString.length);
82
+ for (let i = 0; i < binaryString.length; i++) {
83
+ bytes[i] = binaryString.charCodeAt(i);
84
+ }
85
+ return bytes.buffer;
86
+ }
src/lib/worklets/audio-processing.ts ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ const AudioRecordingWorklet = `
18
+ class AudioProcessingWorklet extends AudioWorkletProcessor {
19
+
20
+ // send and clear buffer every 2048 samples,
21
+ // which at 16khz is about 8 times a second
22
+ buffer = new Int16Array(2048);
23
+
24
+ // current write index
25
+ bufferWriteIndex = 0;
26
+
27
+ constructor() {
28
+ super();
29
+ this.hasAudio = false;
30
+ }
31
+
32
+ /**
33
+ * @param inputs Float32Array[][] [input#][channel#][sample#] so to access first inputs 1st channel inputs[0][0]
34
+ * @param outputs Float32Array[][]
35
+ */
36
+ process(inputs) {
37
+ if (inputs[0].length) {
38
+ const channel0 = inputs[0][0];
39
+ this.processChunk(channel0);
40
+ }
41
+ return true;
42
+ }
43
+
44
+ sendAndClearBuffer(){
45
+ this.port.postMessage({
46
+ event: "chunk",
47
+ data: {
48
+ int16arrayBuffer: this.buffer.slice(0, this.bufferWriteIndex).buffer,
49
+ },
50
+ });
51
+ this.bufferWriteIndex = 0;
52
+ }
53
+
54
+ processChunk(float32Array) {
55
+ const l = float32Array.length;
56
+
57
+ for (let i = 0; i < l; i++) {
58
+ // convert float32 -1 to 1 to int16 -32768 to 32767
59
+ const int16Value = float32Array[i] * 32768;
60
+ this.buffer[this.bufferWriteIndex++] = int16Value;
61
+ if(this.bufferWriteIndex >= this.buffer.length) {
62
+ this.sendAndClearBuffer();
63
+ }
64
+ }
65
+
66
+ if(this.bufferWriteIndex >= this.buffer.length) {
67
+ this.sendAndClearBuffer();
68
+ }
69
+ }
70
+ }
71
+ `;
72
+
73
+ export default AudioRecordingWorklet;
src/lib/worklets/safari-audio-processing.ts ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ const SafariAudioRecordingWorklet = `
18
+ class AudioProcessingWorklet extends AudioWorkletProcessor {
19
+ // Safari seems to work better with smaller buffer sizes
20
+ // and more frequent updates
21
+ buffer = new Int16Array(1024);
22
+ bufferWriteIndex = 0;
23
+ lastProcessTime = 0;
24
+ sampleRate = 0;
25
+
26
+ constructor(options) {
27
+ super();
28
+ console.log('Safari AudioProcessingWorklet constructed with options:', options);
29
+ this.sampleRate = options.processorOptions?.sampleRate || sampleRate;
30
+ console.log('Using sample rate:', this.sampleRate);
31
+ }
32
+
33
+ process(inputs) {
34
+ // Log processing details periodically
35
+ const now = currentTime;
36
+ if (now - this.lastProcessTime > 1) {
37
+ console.log('Safari AudioProcessingWorklet processing:', {
38
+ inputChannels: inputs[0]?.length,
39
+ inputSamples: inputs[0]?.[0]?.length,
40
+ bufferWriteIndex: this.bufferWriteIndex,
41
+ time: now
42
+ });
43
+ this.lastProcessTime = now;
44
+ }
45
+
46
+ if (!inputs[0]?.length) {
47
+ console.warn('No input channels available');
48
+ return true;
49
+ }
50
+
51
+ const channel0 = inputs[0][0];
52
+ if (!channel0?.length) {
53
+ console.warn('Empty input channel');
54
+ return true;
55
+ }
56
+
57
+ this.processChunk(channel0);
58
+ return true;
59
+ }
60
+
61
+ sendAndClearBuffer() {
62
+ if (this.bufferWriteIndex > 0) {
63
+ this.port.postMessage({
64
+ event: "chunk",
65
+ data: {
66
+ int16arrayBuffer: this.buffer.slice(0, this.bufferWriteIndex).buffer,
67
+ },
68
+ });
69
+ this.bufferWriteIndex = 0;
70
+ }
71
+ }
72
+
73
+ processChunk(float32Array) {
74
+ // Safari can sometimes send empty arrays or undefined
75
+ if (!float32Array?.length) {
76
+ return;
77
+ }
78
+
79
+ const l = float32Array.length;
80
+ for (let i = 0; i < l; i++) {
81
+ // Convert float32 -1 to 1 to int16 -32768 to 32767
82
+ // Add some additional gain for Safari which tends to be quieter
83
+ const int16Value = Math.max(-32768, Math.min(32767, float32Array[i] * 32768 * 1.5));
84
+ this.buffer[this.bufferWriteIndex++] = int16Value;
85
+
86
+ if (this.bufferWriteIndex >= this.buffer.length) {
87
+ this.sendAndClearBuffer();
88
+ }
89
+ }
90
+
91
+ // Make sure to send any remaining data
92
+ if (this.bufferWriteIndex > 0) {
93
+ this.sendAndClearBuffer();
94
+ }
95
+ }
96
+ }
97
+ `;
98
+
99
+ export default SafariAudioRecordingWorklet;
src/lib/worklets/vol-meter.ts ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ const VolMeterWorket = `
18
+ class VolMeter extends AudioWorkletProcessor {
19
+ volume
20
+ updateIntervalInMS
21
+ nextUpdateFrame
22
+
23
+ constructor() {
24
+ super()
25
+ this.volume = 0
26
+ this.updateIntervalInMS = 25
27
+ this.nextUpdateFrame = this.updateIntervalInMS
28
+ this.port.onmessage = event => {
29
+ if (event.data.updateIntervalInMS) {
30
+ this.updateIntervalInMS = event.data.updateIntervalInMS
31
+ }
32
+ }
33
+ }
34
+
35
+ get intervalInFrames() {
36
+ return (this.updateIntervalInMS / 1000) * sampleRate
37
+ }
38
+
39
+ process(inputs) {
40
+ const input = inputs[0]
41
+
42
+ if (input.length > 0) {
43
+ const samples = input[0]
44
+ let sum = 0
45
+ let rms = 0
46
+
47
+ for (let i = 0; i < samples.length; ++i) {
48
+ sum += samples[i] * samples[i]
49
+ }
50
+
51
+ rms = Math.sqrt(sum / samples.length)
52
+ this.volume = Math.max(rms, this.volume * 0.7)
53
+
54
+ this.nextUpdateFrame -= samples.length
55
+ if (this.nextUpdateFrame < 0) {
56
+ this.nextUpdateFrame += this.intervalInFrames
57
+ this.port.postMessage({volume: this.volume})
58
+ }
59
+ }
60
+
61
+ return true
62
+ }
63
+ }`;
64
+
65
+ export default VolMeterWorket;
src/multimodal-live-types.ts ADDED
@@ -0,0 +1,242 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2024 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import type {
18
+ Content,
19
+ FunctionCall,
20
+ GenerationConfig,
21
+ GenerativeContentBlob,
22
+ Part,
23
+ Tool,
24
+ } from "@google/generative-ai";
25
+
26
+ /**
27
+ * this module contains type-definitions and Type-Guards
28
+ */
29
+
30
+ // Type-definitions
31
+
32
+ /* outgoing types */
33
+
34
+ /**
35
+ * the config to initiate the session
36
+ */
37
+ export type LiveConfig = {
38
+ model: string;
39
+ systemInstruction?: { parts: Part[] };
40
+ generationConfig?: Partial<LiveGenerationConfig>;
41
+ tools?: Array<Tool | { googleSearch: {} } | { codeExecution: {} }>;
42
+ };
43
+
44
+ export type LiveGenerationConfig = GenerationConfig & {
45
+ responseModalities: "text" | "audio" | "image";
46
+ speechConfig?: {
47
+ voiceConfig?: {
48
+ prebuiltVoiceConfig?: {
49
+ voiceName: "Puck" | "Charon" | "Kore" | "Fenrir" | "Aoede" | string;
50
+ };
51
+ };
52
+ };
53
+ };
54
+
55
+ export type LiveOutgoingMessage =
56
+ | SetupMessage
57
+ | ClientContentMessage
58
+ | RealtimeInputMessage
59
+ | ToolResponseMessage;
60
+
61
+ export type SetupMessage = {
62
+ setup: LiveConfig;
63
+ };
64
+
65
+ export type ClientContentMessage = {
66
+ clientContent: {
67
+ turns: Content[];
68
+ turnComplete: boolean;
69
+ };
70
+ };
71
+
72
+ export type RealtimeInputMessage = {
73
+ realtimeInput: {
74
+ mediaChunks: GenerativeContentBlob[];
75
+ };
76
+ };
77
+
78
+ export type ToolResponseMessage = {
79
+ toolResponse: {
80
+ functionResponses: LiveFunctionResponse[];
81
+ };
82
+ };
83
+
84
+ export type ToolResponse = ToolResponseMessage["toolResponse"];
85
+
86
+ export type LiveFunctionResponse = {
87
+ response: object;
88
+ id: string;
89
+ };
90
+
91
+ /** Incoming types */
92
+
93
+ export type LiveIncomingMessage =
94
+ | ToolCallCancellationMessage
95
+ | ToolCallMessage
96
+ | ServerContentMessage
97
+ | SetupCompleteMessage;
98
+
99
+ export type SetupCompleteMessage = { setupComplete: {} };
100
+
101
+ export type ServerContentMessage = {
102
+ serverContent: ServerContent;
103
+ };
104
+
105
+ export type ServerContent = ModelTurn | TurnComplete | Interrupted;
106
+
107
+ export type ModelTurn = {
108
+ modelTurn: {
109
+ parts: Part[];
110
+ };
111
+ };
112
+
113
+ export type TurnComplete = { turnComplete: boolean };
114
+
115
+ export type Interrupted = { interrupted: true };
116
+
117
+ export type ToolCallCancellationMessage = {
118
+ toolCallCancellation: {
119
+ ids: string[];
120
+ };
121
+ };
122
+
123
+ export type ToolCallCancellation =
124
+ ToolCallCancellationMessage["toolCallCancellation"];
125
+
126
+ export type ToolCallMessage = {
127
+ toolCall: ToolCall;
128
+ };
129
+
130
+ export type LiveFunctionCall = FunctionCall & {
131
+ id: string;
132
+ };
133
+
134
+ /**
135
+ * A `toolCall` message
136
+ */
137
+ export type ToolCall = {
138
+ functionCalls: LiveFunctionCall[];
139
+ };
140
+
141
+ /** log types */
142
+ export type StreamingLog = {
143
+ date: Date;
144
+ type: string;
145
+ count?: number;
146
+ message: string | LiveOutgoingMessage | LiveIncomingMessage;
147
+ };
148
+
149
+ // Type-Guards
150
+
151
+ const prop = (a: any, prop: string, kind: string = "object") =>
152
+ typeof a === "object" && typeof a[prop] === "object";
153
+
154
+ // outgoing messages
155
+ export const isSetupMessage = (a: unknown): a is SetupMessage =>
156
+ prop(a, "setup");
157
+
158
+ export const isClientContentMessage = (a: unknown): a is ClientContentMessage =>
159
+ prop(a, "clientContent");
160
+
161
+ export const isRealtimeInputMessage = (a: unknown): a is RealtimeInputMessage =>
162
+ prop(a, "realtimeInput");
163
+
164
+ export const isToolResponseMessage = (a: unknown): a is ToolResponseMessage =>
165
+ prop(a, "toolResponse");
166
+
167
+ // incoming messages
168
+ export const isSetupCompleteMessage = (a: unknown): a is SetupCompleteMessage =>
169
+ prop(a, "setupComplete");
170
+
171
+ export const isServerContentMessage = (a: any): a is ServerContentMessage =>
172
+ prop(a, "serverContent");
173
+
174
+ export const isToolCallMessage = (a: any): a is ToolCallMessage =>
175
+ prop(a, "toolCall");
176
+
177
+ export const isToolCallCancellationMessage = (
178
+ a: unknown,
179
+ ): a is ToolCallCancellationMessage =>
180
+ prop(a, "toolCallCancellation") &&
181
+ isToolCallCancellation((a as any).toolCallCancellation);
182
+
183
+ export const isModelTurn = (a: any): a is ModelTurn =>
184
+ typeof (a as ModelTurn).modelTurn === "object";
185
+
186
+ export const isTurnComplete = (a: any): a is TurnComplete =>
187
+ typeof (a as TurnComplete).turnComplete === "boolean";
188
+
189
+ export const isInterrupted = (a: any): a is Interrupted =>
190
+ (a as Interrupted).interrupted;
191
+
192
+ export function isToolCall(value: unknown): value is ToolCall {
193
+ if (!value || typeof value !== "object") return false;
194
+
195
+ const candidate = value as Record<string, unknown>;
196
+
197
+ return (
198
+ Array.isArray(candidate.functionCalls) &&
199
+ candidate.functionCalls.every((call) => isLiveFunctionCall(call))
200
+ );
201
+ }
202
+
203
+ export function isToolResponse(value: unknown): value is ToolResponse {
204
+ if (!value || typeof value !== "object") return false;
205
+
206
+ const candidate = value as Record<string, unknown>;
207
+
208
+ return (
209
+ Array.isArray(candidate.functionResponses) &&
210
+ candidate.functionResponses.every((resp) => isLiveFunctionResponse(resp))
211
+ );
212
+ }
213
+
214
+ export function isLiveFunctionCall(value: unknown): value is LiveFunctionCall {
215
+ if (!value || typeof value !== "object") return false;
216
+
217
+ const candidate = value as Record<string, unknown>;
218
+
219
+ return (
220
+ typeof candidate.name === "string" &&
221
+ typeof candidate.id === "string" &&
222
+ typeof candidate.args === "object" &&
223
+ candidate.args !== null
224
+ );
225
+ }
226
+
227
+ export function isLiveFunctionResponse(
228
+ value: unknown,
229
+ ): value is LiveFunctionResponse {
230
+ if (!value || typeof value !== "object") return false;
231
+
232
+ const candidate = value as Record<string, unknown>;
233
+
234
+ return (
235
+ typeof candidate.response === "object" && typeof candidate.id === "string"
236
+ );
237
+ }
238
+
239
+ export const isToolCallCancellation = (
240
+ a: unknown,
241
+ ): a is ToolCallCancellationMessage["toolCallCancellation"] =>
242
+ typeof a === "object" && Array.isArray((a as any).ids);