Remove extra file
This commit is contained in:
		
							parent
							
								
									d31151756b
								
							
						
					
					
						commit
						0e7350086d
					
				
					 1 changed files with 0 additions and 308 deletions
				
			
		|  | @ -1,308 +0,0 @@ | ||||||
| const Applet = imports.ui.applet; |  | ||||||
| const Gio = imports.gi.Gio; |  | ||||||
| const GLib = imports.gi.GLib; |  | ||||||
| const Lang = imports.lang; |  | ||||||
| const Mainloop = imports.mainloop; |  | ||||||
| const Soup = imports.gi.Soup; |  | ||||||
| const St = imports.gi.St; |  | ||||||
| 
 |  | ||||||
| const STREAM_SOCKET_URL = 'wss://hana.neonaialpha.com/node/v1/stream?token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJjbGllbnRfaWQiOiIwNjQ1N2FlMC1lMjQ0LTQxNjMtOWQ1NS0xNWFhMGNiYTQ5NDYiLCJ1c2VybmFtZSI6Im5lb24iLCJwYXNzd29yZCI6Im5lb24iLCJwZXJtaXNzaW9ucyI6eyJhc3Npc3QiOnRydWUsImJhY2tlbmQiOnRydWUsIm5vZGUiOnRydWV9LCJleHBpcmUiOjE3MjgxMDEzOTUuMjE3MTMyM30.BO3ymPLDg2v8epVxdnaf0iLh9DJnVSTZT_hM1M--V84'; |  | ||||||
| const NODE_SOCKET_URL = 'wss://hana.neonaialpha.com/node/v1?token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJjbGllbnRfaWQiOiIwNjQ1N2FlMC1lMjQ0LTQxNjMtOWQ1NS0xNWFhMGNiYTQ5NDYiLCJ1c2VybmFtZSI6Im5lb24iLCJwYXNzd29yZCI6Im5lb24iLCJwZXJtaXNzaW9ucyI6eyJhc3Npc3QiOnRydWUsImJhY2tlbmQiOnRydWUsIm5vZGUiOnRydWV9LCJleHBpcmUiOjE3MjgxMDEzOTUuMjE3MTMyM30.BO3ymPLDg2v8epVxdnaf0iLh9DJnVSTZT_hM1M--V84'; |  | ||||||
| 
 |  | ||||||
| function MyApplet(metadata, orientation, panel_height, instance_id) { |  | ||||||
|     this._init(metadata, orientation, panel_height, instance_id); |  | ||||||
| } |  | ||||||
| 
 |  | ||||||
| MyApplet.prototype = { |  | ||||||
|     __proto__: Applet.IconApplet.prototype, |  | ||||||
| 
 |  | ||||||
|     _init: function(metadata, orientation, panel_height, instance_id) { |  | ||||||
|         Applet.IconApplet.prototype._init.call(this, orientation, panel_height, instance_id); |  | ||||||
| 
 |  | ||||||
|         this.metadata = metadata; |  | ||||||
|         // Set the custom SVG icon |  | ||||||
|         this._setCustomIcon(metadata.path + "/icon.svg"); |  | ||||||
|         this.set_applet_tooltip(_("Voice Assistant")); |  | ||||||
| 
 |  | ||||||
|         this._streamSocket = null; |  | ||||||
|         this._nodeSocket = null; |  | ||||||
|         this._isRecording = false; |  | ||||||
|         this._recorder = null; |  | ||||||
|         this._player = null; |  | ||||||
|         this._audioBuffer = new Uint8Array(0); |  | ||||||
|         this._playbackBuffer = []; // Initialize the playback buffer |  | ||||||
|          |  | ||||||
|         global.log("[Voice Assistant] Applet initialized"); |  | ||||||
|         this._initSockets(); |  | ||||||
|     }, |  | ||||||
| 
 |  | ||||||
|     _setCustomIcon: function(iconPath) { |  | ||||||
|         try { |  | ||||||
|             let file = Gio.File.new_for_path(iconPath); |  | ||||||
|             let gicon = new Gio.FileIcon({ file: file }); |  | ||||||
|             this.set_applet_icon_symbolic_name(iconPath); |  | ||||||
|             this._applet_icon.gicon = gicon; |  | ||||||
|         } catch (e) { |  | ||||||
|             global.logError("[Voice Assistant] Error setting custom icon: " + e.message); |  | ||||||
|             // Fallback to the default icon if there's an error |  | ||||||
|             this.set_applet_icon_name("microphone-sensitivity-medium"); |  | ||||||
|         } |  | ||||||
|     }, |  | ||||||
| 
 |  | ||||||
|     _initSockets: function() { |  | ||||||
|         global.log("[Voice Assistant] Initializing WebSockets"); |  | ||||||
|         let maxPayloadSize = 10 * 1024 * 1024; // 10 MB in bytes |  | ||||||
|         // Initialize Node WebSocket |  | ||||||
|         try { |  | ||||||
|             let session = new Soup.Session(); |  | ||||||
|             let message = new Soup.Message({ |  | ||||||
|                 method: 'GET', |  | ||||||
|                 uri: GLib.Uri.parse(NODE_SOCKET_URL, GLib.UriFlags.NONE) |  | ||||||
|             }); |  | ||||||
|              |  | ||||||
|             session.websocket_connect_async(message, null, null, null, null, (session, result) => { |  | ||||||
|                 try { |  | ||||||
|                     this._nodeSocket = session.websocket_connect_finish(result); |  | ||||||
|                     // Set the maximum incoming payload size |  | ||||||
|                     this._nodeSocket.set_max_incoming_payload_size(maxPayloadSize); |  | ||||||
|                     this._nodeSocket.connect('message', Lang.bind(this, this._onNodeMessage)); |  | ||||||
|                     this._nodeSocket.connect('error', Lang.bind(this, this._onSocketError)); |  | ||||||
|                     global.log("[Voice Assistant] Node WebSocket initialized"); |  | ||||||
|                 } catch (e) { |  | ||||||
|                     global.logError("[Voice Assistant] Error finalizing Node WebSocket: " + e.message); |  | ||||||
|                 } |  | ||||||
|             }); |  | ||||||
|             // Initialize streaming WebSocket |  | ||||||
|             try { |  | ||||||
|                 let session = new Soup.Session(); |  | ||||||
|                 let message = new Soup.Message({ |  | ||||||
|                     method: 'GET', |  | ||||||
|                     uri: GLib.Uri.parse(STREAM_SOCKET_URL, GLib.UriFlags.NONE) |  | ||||||
|                 }); |  | ||||||
|                  |  | ||||||
|                 session.websocket_connect_async(message, null, null, null, null, (session, result) => { |  | ||||||
|                     try { |  | ||||||
|                         this._streamSocket = session.websocket_connect_finish(result); |  | ||||||
|                         this._streamSocket.connect('message', Lang.bind(this, this._onStreamMessage)); |  | ||||||
|                         this._streamSocket.connect('error', Lang.bind(this, this._onSocketError)); |  | ||||||
|                         global.log("[Voice Assistant] Stream WebSocket initialized"); |  | ||||||
|                     } catch (e) { |  | ||||||
|                         global.logError("[Voice Assistant] Error finalizing stream WebSocket: " + e.message); |  | ||||||
|                     } |  | ||||||
|                 }); |  | ||||||
|             } catch (e) { |  | ||||||
|                 global.logError("[Voice Assistant] Error initializing stream WebSocket: " + e.message); |  | ||||||
|             } |  | ||||||
|         } catch (e) { |  | ||||||
|             global.logError("[Voice Assistant] Error initializing Node WebSocket: " + e.message); |  | ||||||
|         } |  | ||||||
|     }, |  | ||||||
| 
 |  | ||||||
|     _onSocketError: function(socket, error) { |  | ||||||
|         global.logError("[Voice Assistant] WebSocket error: " + error.message); |  | ||||||
|         try { |  | ||||||
|             this._streamSocket.close(); |  | ||||||
|             this._nodeSocket.close(); |  | ||||||
|         } finally { |  | ||||||
|             this._initSockets(); |  | ||||||
|         } |  | ||||||
|     }, |  | ||||||
| 
 |  | ||||||
|     _onNodeMessage: function(connection, type, message) { |  | ||||||
|         try { |  | ||||||
|             if (type === Soup.WebsocketDataType.TEXT) { |  | ||||||
|                 let data = message.get_data(); |  | ||||||
|                 let jsonData = JSON.parse(data); |  | ||||||
|                 global.log("[Voice Assistant] Parsed node message: " + jsonData.type); |  | ||||||
|                 // Handle text messages if needed |  | ||||||
|             } else { |  | ||||||
|                 global.log("[Voice Assistant] Received unknown data type from node: " + type); |  | ||||||
|             } |  | ||||||
|         } catch (e) { |  | ||||||
|             global.logError("[Voice Assistant] Error handling node message: " + e.message); |  | ||||||
|         } |  | ||||||
|     }, |  | ||||||
| 
 |  | ||||||
|     _onStreamMessage: function(connection, type, message) { |  | ||||||
|         try { |  | ||||||
|             if (type === Soup.WebsocketDataType.BINARY) { |  | ||||||
|                 global.log("[Voice Assistant] Received binary audio data of length: " + message.get_data().length); |  | ||||||
|                 this._playbackBuffer.push(message.get_data()); |  | ||||||
|                 this._playAudio(); |  | ||||||
|             } else { |  | ||||||
|                 global.log("[Voice Assistant] Received unknown data type from stream: " + type); |  | ||||||
|             } |  | ||||||
|         } catch (e) { |  | ||||||
|             global.logError("[Voice Assistant] Error handling stream message: " + e.message); |  | ||||||
|         } |  | ||||||
|     }, |  | ||||||
| 
 |  | ||||||
|     _playAudio: function() { |  | ||||||
|         if (this._player) { |  | ||||||
|             // If a player is already running, just add the new data to the buffer |  | ||||||
|             return; |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         if (this._playbackBuffer.length === 0) { |  | ||||||
|             return; |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         let audioData = this._playbackBuffer.shift(); |  | ||||||
| 
 |  | ||||||
|         // Create a temporary file to store the audio data |  | ||||||
|         let [file, stream] = Gio.File.new_tmp("voice-assistant-XXXXXX"); |  | ||||||
|         stream.output_stream.write_all(audioData, null); |  | ||||||
|         stream.close(null); |  | ||||||
| 
 |  | ||||||
|         // Play the audio using GStreamer |  | ||||||
|         this._player = new Gio.Subprocess({ |  | ||||||
|             argv: [ |  | ||||||
|                 'gst-launch-1.0', |  | ||||||
|                 'filesrc', 'location=' + file.get_path(), |  | ||||||
|                 '!', 'decodebin', |  | ||||||
|                 '!', 'audioconvert', |  | ||||||
|                 '!', 'audioresample', |  | ||||||
|                 '!', 'autoaudiosink' |  | ||||||
|             ], |  | ||||||
|             flags: Gio.SubprocessFlags.NONE |  | ||||||
|         }); |  | ||||||
| 
 |  | ||||||
|         this._player.init(null); |  | ||||||
| 
 |  | ||||||
|         // Clean up when playback is finished |  | ||||||
|         this._player.wait_async(null, (source, result) => { |  | ||||||
|             try { |  | ||||||
|                 source.wait_finish(result); |  | ||||||
|             } catch (e) { |  | ||||||
|                 global.logError("[Voice Assistant] Error during audio playback: " + e.message); |  | ||||||
|             } finally { |  | ||||||
|                 this._player = null; |  | ||||||
|                 file.delete(null); |  | ||||||
|                 // Play the next audio chunk if available |  | ||||||
|                 this._playAudio(); |  | ||||||
|             } |  | ||||||
|         }); |  | ||||||
|     }, |  | ||||||
| 
 |  | ||||||
|         _startRecording: function() { |  | ||||||
|         if (this._isRecording) return; |  | ||||||
| 
 |  | ||||||
|         this._isRecording = true; |  | ||||||
|         this._setCustomIcon(this.metadata.path + "/icon-active.svg"); |  | ||||||
|         global.log("[Voice Assistant] Starting recording"); |  | ||||||
| 
 |  | ||||||
|         try { |  | ||||||
|             // Initialize GStreamer pipeline for recording |  | ||||||
|             this._recorder = new Gio.Subprocess({ |  | ||||||
|                 argv: [ |  | ||||||
|                     'gst-launch-1.0', |  | ||||||
|                     'pulsesrc', |  | ||||||
|                     '!', |  | ||||||
|                     'audioconvert', |  | ||||||
|                     '!', |  | ||||||
|                     'audio/x-raw,format=S16LE,channels=1,rate=16000', |  | ||||||
|                     '!', |  | ||||||
|                     'fdsink' |  | ||||||
|                 ], |  | ||||||
|                 flags: Gio.SubprocessFlags.STDOUT_PIPE |  | ||||||
|             }); |  | ||||||
| 
 |  | ||||||
|             this._recorder.init(null); |  | ||||||
|             global.log("[Voice Assistant] Recording subprocess initialized"); |  | ||||||
| 
 |  | ||||||
|             // Read audio data and send it over WebSocket |  | ||||||
|             let stdout = this._recorder.get_stdout_pipe(); |  | ||||||
|             this._readAudioData(stdout); |  | ||||||
|         } catch (e) { |  | ||||||
|             global.logError("[Voice Assistant] Error starting recording: " + e.message); |  | ||||||
|         } |  | ||||||
|     }, |  | ||||||
| 
 |  | ||||||
|     _readAudioData: function(stdout) { |  | ||||||
|         stdout.read_bytes_async(4096, GLib.PRIORITY_DEFAULT, null, (source, result) => { |  | ||||||
|             try { |  | ||||||
|                 let bytes = source.read_bytes_finish(result); |  | ||||||
|                 if (bytes && bytes.get_size() > 0) { |  | ||||||
|                     // Append new data to the existing buffer |  | ||||||
|                     let newData = new Uint8Array(bytes.get_data()); |  | ||||||
|                     let combinedBuffer = new Uint8Array(this._audioBuffer.length + newData.length); |  | ||||||
|                     combinedBuffer.set(this._audioBuffer); |  | ||||||
|                     combinedBuffer.set(newData, this._audioBuffer.length); |  | ||||||
|                     this._audioBuffer = combinedBuffer; |  | ||||||
| 
 |  | ||||||
|                     // If we have accumulated 4096 or more bytes, send them |  | ||||||
|                     while (this._audioBuffer.length >= 4096) { |  | ||||||
|                         let chunkToSend = this._audioBuffer.slice(0, 4096); |  | ||||||
|                         this._streamSocket.send_binary(chunkToSend); |  | ||||||
| //                        global.log("[Voice Assistant] Sent 4096 bytes of audio data"); |  | ||||||
|                          |  | ||||||
|                         // Keep the remaining data in the buffer |  | ||||||
|                         this._audioBuffer = this._audioBuffer.slice(4096); |  | ||||||
|                     } |  | ||||||
| 
 |  | ||||||
|                     // Continue reading |  | ||||||
|                     this._readAudioData(stdout); |  | ||||||
|                 } else { |  | ||||||
|                     global.log("[Voice Assistant] End of audio stream reached"); |  | ||||||
| //                    // Send any remaining data in the buffer |  | ||||||
| //                    if (this._audioBuffer.length > 0) { |  | ||||||
| //                        this._streamSocket.send_binary(this._audioBuffer); |  | ||||||
| //                        global.log("[Voice Assistant] Sent final " + this._audioBuffer.length + " bytes of audio data"); |  | ||||||
| //                    } |  | ||||||
|                     this._stopRecording(); |  | ||||||
|                 } |  | ||||||
|             } catch (e) { |  | ||||||
|                 global.logError("[Voice Assistant] Error reading audio data: " + e.message); |  | ||||||
|                 this._stopRecording(); |  | ||||||
|             } |  | ||||||
|         }); |  | ||||||
|     }, |  | ||||||
| 
 |  | ||||||
|     _stopRecording: function() { |  | ||||||
|         if (!this._isRecording) return; |  | ||||||
| 
 |  | ||||||
|         this._isRecording = false; |  | ||||||
|         this._setCustomIcon(this.metadata.path + "/icon.svg"); |  | ||||||
|         global.log("[Voice Assistant] Stopping recording"); |  | ||||||
| 
 |  | ||||||
|         if (this._recorder) { |  | ||||||
|             this._recorder.force_exit(); |  | ||||||
|             this._recorder = null; |  | ||||||
|             global.log("[Voice Assistant] Recording subprocess terminated"); |  | ||||||
|         } |  | ||||||
|         // Clear the audio buffer |  | ||||||
|         this._audioBuffer = new Uint8Array(0); |  | ||||||
|     }, |  | ||||||
| 
 |  | ||||||
|     on_applet_clicked: function() { |  | ||||||
|         if (this._isRecording) { |  | ||||||
|             global.log("[Voice Assistant] Applet clicked: stopping recording"); |  | ||||||
|             this._stopRecording(); |  | ||||||
|         } else { |  | ||||||
|             global.log("[Voice Assistant] Applet clicked: starting recording"); |  | ||||||
|             this._startRecording(); |  | ||||||
|         } |  | ||||||
|     }, |  | ||||||
| 
 |  | ||||||
|     on_applet_removed_from_panel: function() { |  | ||||||
|         global.log("[Voice Assistant] Applet removed from panel"); |  | ||||||
|         this._stopRecording(); |  | ||||||
|         if (this._streamSocket) { |  | ||||||
|             this._streamSocket.close(Soup.WebsocketCloseCode.NORMAL, null); |  | ||||||
|             global.log("[Voice Assistant] Record WebSocket closed"); |  | ||||||
|         } |  | ||||||
|         if (this._nodeSocket) { |  | ||||||
|             this._nodeSocket.close(Soup.WebsocketCloseCode.NORMAL, null); |  | ||||||
|             global.log("[Voice Assistant] Node WebSocket closed"); |  | ||||||
|         } |  | ||||||
|         if (this._player) { |  | ||||||
|             this._player.force_exit(); |  | ||||||
|             global.log("[Voice Assistant] Audio player terminated"); |  | ||||||
|         } |  | ||||||
|     } |  | ||||||
| }; |  | ||||||
| 
 |  | ||||||
| function main(metadata, orientation, panel_height, instance_id) { |  | ||||||
|     global.log("[Voice Assistant] Main function called"); |  | ||||||
|     return new MyApplet(metadata, orientation, panel_height, instance_id); |  | ||||||
| } |  | ||||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue