Initial Implementation
This commit is contained in:
commit
d31151756b
6 changed files with 675 additions and 0 deletions
338
applet.js
Normal file
338
applet.js
Normal file
|
@ -0,0 +1,338 @@
|
|||
const Applet = imports.ui.applet;
|
||||
const Gio = imports.gi.Gio;
|
||||
const GLib = imports.gi.GLib;
|
||||
const Lang = imports.lang;
|
||||
const Mainloop = imports.mainloop;
|
||||
const Soup = imports.gi.Soup;
|
||||
const St = imports.gi.St;
|
||||
const Settings = imports.ui.settings;
|
||||
|
||||
function MyApplet(metadata, orientation, panel_height, instance_id) {
|
||||
this._init(metadata, orientation, panel_height, instance_id);
|
||||
}
|
||||
|
||||
MyApplet.prototype = {
|
||||
__proto__: Applet.IconApplet.prototype,
|
||||
|
||||
_init: function(metadata, orientation, panel_height, instance_id) {
|
||||
Applet.IconApplet.prototype._init.call(this, orientation, panel_height, instance_id);
|
||||
|
||||
this.metadata = metadata;
|
||||
this.settings = new Settings.AppletSettings(this, metadata.uuid, instance_id);
|
||||
|
||||
this.settings.bindProperty(
|
||||
Settings.BindingDirection.IN,
|
||||
"baseUrl",
|
||||
"baseUrl",
|
||||
this.onSettingsChanged,
|
||||
null
|
||||
);
|
||||
|
||||
this.settings.bindProperty(
|
||||
Settings.BindingDirection.IN,
|
||||
"token",
|
||||
"token",
|
||||
this.onSettingsChanged,
|
||||
null
|
||||
);
|
||||
|
||||
this._setCustomIcon(metadata.path + "/icon.svg");
|
||||
this.set_applet_tooltip(_("Voice Assistant"));
|
||||
|
||||
this._streamSocket = null;
|
||||
this._nodeSocket = null;
|
||||
this._isRecording = false;
|
||||
this._recorder = null;
|
||||
this._player = null;
|
||||
this._audioBuffer = new Uint8Array(0);
|
||||
this._playbackBuffer = [];
|
||||
|
||||
global.log("[Voice Assistant] Applet initialized");
|
||||
this._initSockets();
|
||||
},
|
||||
|
||||
onSettingsChanged: function() {
|
||||
global.log("[Voice Assistant] Settings changed, reinitializing sockets");
|
||||
if (this._streamSocket) {
|
||||
this._streamSocket.close(Soup.WebsocketCloseCode.NORMAL, null);
|
||||
}
|
||||
if (this._nodeSocket) {
|
||||
this._nodeSocket.close(Soup.WebsocketCloseCode.NORMAL, null);
|
||||
}
|
||||
this._initSockets();
|
||||
},
|
||||
|
||||
_setCustomIcon: function(iconPath) {
|
||||
try {
|
||||
let file = Gio.File.new_for_path(iconPath);
|
||||
let gicon = new Gio.FileIcon({ file: file });
|
||||
this.set_applet_icon_symbolic_name(iconPath);
|
||||
this._applet_icon.gicon = gicon;
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error setting custom icon: " + e.message);
|
||||
// Fallback to the default icon if there's an error
|
||||
this.set_applet_icon_name("microphone-sensitivity-medium");
|
||||
}
|
||||
},
|
||||
|
||||
_initSockets: function() {
|
||||
global.log("[Voice Assistant] Initializing WebSockets");
|
||||
let maxPayloadSize = 10 * 1024 * 1024; // 10 MB in bytes
|
||||
|
||||
const STREAM_SOCKET_URL = `wss://${this.baseUrl}/node/v1/stream?token=${this.token}`;
|
||||
const NODE_SOCKET_URL = `wss://${this.baseUrl}/node/v1?token=${this.token}`;
|
||||
|
||||
// Initialize Node WebSocket
|
||||
try {
|
||||
let session = new Soup.Session();
|
||||
let message = new Soup.Message({
|
||||
method: 'GET',
|
||||
uri: GLib.Uri.parse(NODE_SOCKET_URL, GLib.UriFlags.NONE)
|
||||
});
|
||||
|
||||
session.websocket_connect_async(message, null, null, null, null, (session, result) => {
|
||||
try {
|
||||
this._nodeSocket = session.websocket_connect_finish(result);
|
||||
this._nodeSocket.set_max_incoming_payload_size(maxPayloadSize);
|
||||
this._nodeSocket.connect('message', Lang.bind(this, this._onNodeMessage));
|
||||
this._nodeSocket.connect('error', Lang.bind(this, this._onSocketError));
|
||||
global.log("[Voice Assistant] Node WebSocket initialized");
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error finalizing Node WebSocket: " + e.message);
|
||||
}
|
||||
});
|
||||
|
||||
// Initialize streaming WebSocket
|
||||
try {
|
||||
let streamSession = new Soup.Session();
|
||||
let streamMessage = new Soup.Message({
|
||||
method: 'GET',
|
||||
uri: GLib.Uri.parse(STREAM_SOCKET_URL, GLib.UriFlags.NONE)
|
||||
});
|
||||
|
||||
streamSession.websocket_connect_async(streamMessage, null, null, null, null, (streamSession, streamResult) => {
|
||||
try {
|
||||
this._streamSocket = streamSession.websocket_connect_finish(streamResult);
|
||||
this._streamSocket.connect('message', Lang.bind(this, this._onStreamMessage));
|
||||
this._streamSocket.connect('error', Lang.bind(this, this._onSocketError));
|
||||
global.log("[Voice Assistant] Stream WebSocket initialized");
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error finalizing stream WebSocket: " + e.message);
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error initializing stream WebSocket: " + e.message);
|
||||
}
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error initializing Node WebSocket: " + e.message);
|
||||
}
|
||||
},
|
||||
|
||||
_onSocketError: function(socket, error) {
|
||||
global.logError("[Voice Assistant] WebSocket error: " + error.message);
|
||||
try {
|
||||
this._streamSocket.close();
|
||||
this._nodeSocket.close();
|
||||
} finally {
|
||||
this._initSockets();
|
||||
}
|
||||
},
|
||||
|
||||
_onNodeMessage: function(connection, type, message) {
|
||||
try {
|
||||
if (type === Soup.WebsocketDataType.TEXT) {
|
||||
let data = message.get_data();
|
||||
let jsonData = JSON.parse(data);
|
||||
global.log("[Voice Assistant] Parsed node message: " + jsonData.type);
|
||||
// Handle text messages if needed
|
||||
} else {
|
||||
global.log("[Voice Assistant] Received unknown data type from node: " + type);
|
||||
}
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error handling node message: " + e.message);
|
||||
}
|
||||
},
|
||||
|
||||
_onStreamMessage: function(connection, type, message) {
|
||||
try {
|
||||
if (type === Soup.WebsocketDataType.BINARY) {
|
||||
global.log("[Voice Assistant] Received binary audio data of length: " + message.get_data().length);
|
||||
this._playbackBuffer.push(message.get_data());
|
||||
this._playAudio();
|
||||
} else {
|
||||
global.log("[Voice Assistant] Received unknown data type from stream: " + type);
|
||||
}
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error handling stream message: " + e.message);
|
||||
}
|
||||
},
|
||||
|
||||
_playAudio: function() {
|
||||
if (this._player) {
|
||||
// If a player is already running, just add the new data to the buffer
|
||||
return;
|
||||
}
|
||||
|
||||
if (this._playbackBuffer.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
let audioData = this._playbackBuffer.shift();
|
||||
|
||||
// Create a temporary file to store the audio data
|
||||
let [file, stream] = Gio.File.new_tmp("voice-assistant-XXXXXX");
|
||||
stream.output_stream.write_all(audioData, null);
|
||||
stream.close(null);
|
||||
|
||||
// Play the audio using GStreamer
|
||||
this._player = new Gio.Subprocess({
|
||||
argv: [
|
||||
'gst-launch-1.0',
|
||||
'filesrc', 'location=' + file.get_path(),
|
||||
'!', 'decodebin',
|
||||
'!', 'audioconvert',
|
||||
'!', 'audioresample',
|
||||
'!', 'autoaudiosink'
|
||||
],
|
||||
flags: Gio.SubprocessFlags.NONE
|
||||
});
|
||||
|
||||
this._player.init(null);
|
||||
|
||||
// Clean up when playback is finished
|
||||
this._player.wait_async(null, (source, result) => {
|
||||
try {
|
||||
source.wait_finish(result);
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error during audio playback: " + e.message);
|
||||
} finally {
|
||||
this._player = null;
|
||||
file.delete(null);
|
||||
// Play the next audio chunk if available
|
||||
this._playAudio();
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_startRecording: function() {
|
||||
if (this._isRecording) return;
|
||||
|
||||
this._isRecording = true;
|
||||
this._setCustomIcon(this.metadata.path + "/icon-active.svg");
|
||||
global.log("[Voice Assistant] Starting recording");
|
||||
|
||||
try {
|
||||
// Initialize GStreamer pipeline for recording
|
||||
this._recorder = new Gio.Subprocess({
|
||||
argv: [
|
||||
'gst-launch-1.0',
|
||||
'pulsesrc',
|
||||
'!',
|
||||
'audioconvert',
|
||||
'!',
|
||||
'audio/x-raw,format=S16LE,channels=1,rate=16000',
|
||||
'!',
|
||||
'fdsink'
|
||||
],
|
||||
flags: Gio.SubprocessFlags.STDOUT_PIPE
|
||||
});
|
||||
|
||||
this._recorder.init(null);
|
||||
global.log("[Voice Assistant] Recording subprocess initialized");
|
||||
|
||||
// Read audio data and send it over WebSocket
|
||||
let stdout = this._recorder.get_stdout_pipe();
|
||||
this._readAudioData(stdout);
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error starting recording: " + e.message);
|
||||
}
|
||||
},
|
||||
|
||||
_readAudioData: function(stdout) {
|
||||
stdout.read_bytes_async(4096, GLib.PRIORITY_DEFAULT, null, (source, result) => {
|
||||
try {
|
||||
let bytes = source.read_bytes_finish(result);
|
||||
if (bytes && bytes.get_size() > 0) {
|
||||
// Append new data to the existing buffer
|
||||
let newData = new Uint8Array(bytes.get_data());
|
||||
let combinedBuffer = new Uint8Array(this._audioBuffer.length + newData.length);
|
||||
combinedBuffer.set(this._audioBuffer);
|
||||
combinedBuffer.set(newData, this._audioBuffer.length);
|
||||
this._audioBuffer = combinedBuffer;
|
||||
|
||||
// If we have accumulated 4096 or more bytes, send them
|
||||
while (this._audioBuffer.length >= 4096) {
|
||||
let chunkToSend = this._audioBuffer.slice(0, 4096);
|
||||
this._streamSocket.send_binary(chunkToSend);
|
||||
// global.log("[Voice Assistant] Sent 4096 bytes of audio data");
|
||||
|
||||
// Keep the remaining data in the buffer
|
||||
this._audioBuffer = this._audioBuffer.slice(4096);
|
||||
}
|
||||
|
||||
// Continue reading
|
||||
this._readAudioData(stdout);
|
||||
} else {
|
||||
global.log("[Voice Assistant] End of audio stream reached");
|
||||
// // Send any remaining data in the buffer
|
||||
// if (this._audioBuffer.length > 0) {
|
||||
// this._streamSocket.send_binary(this._audioBuffer);
|
||||
// global.log("[Voice Assistant] Sent final " + this._audioBuffer.length + " bytes of audio data");
|
||||
// }
|
||||
this._stopRecording();
|
||||
}
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error reading audio data: " + e.message);
|
||||
this._stopRecording();
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_stopRecording: function() {
|
||||
if (!this._isRecording) return;
|
||||
|
||||
this._isRecording = false;
|
||||
this._setCustomIcon(this.metadata.path + "/icon.svg");
|
||||
global.log("[Voice Assistant] Stopping recording");
|
||||
|
||||
if (this._recorder) {
|
||||
this._recorder.force_exit();
|
||||
this._recorder = null;
|
||||
global.log("[Voice Assistant] Recording subprocess terminated");
|
||||
}
|
||||
// Clear the audio buffer
|
||||
this._audioBuffer = new Uint8Array(0);
|
||||
},
|
||||
|
||||
on_applet_clicked: function() {
|
||||
if (this._isRecording) {
|
||||
global.log("[Voice Assistant] Applet clicked: stopping recording");
|
||||
this._stopRecording();
|
||||
} else {
|
||||
global.log("[Voice Assistant] Applet clicked: starting recording");
|
||||
this._startRecording();
|
||||
}
|
||||
},
|
||||
|
||||
on_applet_removed_from_panel: function() {
|
||||
global.log("[Voice Assistant] Applet removed from panel");
|
||||
this._stopRecording();
|
||||
if (this._streamSocket) {
|
||||
this._streamSocket.close(Soup.WebsocketCloseCode.NORMAL, null);
|
||||
global.log("[Voice Assistant] Record WebSocket closed");
|
||||
}
|
||||
if (this._nodeSocket) {
|
||||
this._nodeSocket.close(Soup.WebsocketCloseCode.NORMAL, null);
|
||||
global.log("[Voice Assistant] Node WebSocket closed");
|
||||
}
|
||||
if (this._player) {
|
||||
this._player.force_exit();
|
||||
global.log("[Voice Assistant] Audio player terminated");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function main(metadata, orientation, panel_height, instance_id) {
|
||||
global.log("[Voice Assistant] Main function called");
|
||||
return new MyApplet(metadata, orientation, panel_height, instance_id);
|
||||
}
|
308
applet.js.noconfig
Normal file
308
applet.js.noconfig
Normal file
|
@ -0,0 +1,308 @@
|
|||
const Applet = imports.ui.applet;
|
||||
const Gio = imports.gi.Gio;
|
||||
const GLib = imports.gi.GLib;
|
||||
const Lang = imports.lang;
|
||||
const Mainloop = imports.mainloop;
|
||||
const Soup = imports.gi.Soup;
|
||||
const St = imports.gi.St;
|
||||
|
||||
const STREAM_SOCKET_URL = 'wss://hana.neonaialpha.com/node/v1/stream?token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJjbGllbnRfaWQiOiIwNjQ1N2FlMC1lMjQ0LTQxNjMtOWQ1NS0xNWFhMGNiYTQ5NDYiLCJ1c2VybmFtZSI6Im5lb24iLCJwYXNzd29yZCI6Im5lb24iLCJwZXJtaXNzaW9ucyI6eyJhc3Npc3QiOnRydWUsImJhY2tlbmQiOnRydWUsIm5vZGUiOnRydWV9LCJleHBpcmUiOjE3MjgxMDEzOTUuMjE3MTMyM30.BO3ymPLDg2v8epVxdnaf0iLh9DJnVSTZT_hM1M--V84';
|
||||
const NODE_SOCKET_URL = 'wss://hana.neonaialpha.com/node/v1?token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJjbGllbnRfaWQiOiIwNjQ1N2FlMC1lMjQ0LTQxNjMtOWQ1NS0xNWFhMGNiYTQ5NDYiLCJ1c2VybmFtZSI6Im5lb24iLCJwYXNzd29yZCI6Im5lb24iLCJwZXJtaXNzaW9ucyI6eyJhc3Npc3QiOnRydWUsImJhY2tlbmQiOnRydWUsIm5vZGUiOnRydWV9LCJleHBpcmUiOjE3MjgxMDEzOTUuMjE3MTMyM30.BO3ymPLDg2v8epVxdnaf0iLh9DJnVSTZT_hM1M--V84';
|
||||
|
||||
function MyApplet(metadata, orientation, panel_height, instance_id) {
|
||||
this._init(metadata, orientation, panel_height, instance_id);
|
||||
}
|
||||
|
||||
MyApplet.prototype = {
|
||||
__proto__: Applet.IconApplet.prototype,
|
||||
|
||||
_init: function(metadata, orientation, panel_height, instance_id) {
|
||||
Applet.IconApplet.prototype._init.call(this, orientation, panel_height, instance_id);
|
||||
|
||||
this.metadata = metadata;
|
||||
// Set the custom SVG icon
|
||||
this._setCustomIcon(metadata.path + "/icon.svg");
|
||||
this.set_applet_tooltip(_("Voice Assistant"));
|
||||
|
||||
this._streamSocket = null;
|
||||
this._nodeSocket = null;
|
||||
this._isRecording = false;
|
||||
this._recorder = null;
|
||||
this._player = null;
|
||||
this._audioBuffer = new Uint8Array(0);
|
||||
this._playbackBuffer = []; // Initialize the playback buffer
|
||||
|
||||
global.log("[Voice Assistant] Applet initialized");
|
||||
this._initSockets();
|
||||
},
|
||||
|
||||
_setCustomIcon: function(iconPath) {
|
||||
try {
|
||||
let file = Gio.File.new_for_path(iconPath);
|
||||
let gicon = new Gio.FileIcon({ file: file });
|
||||
this.set_applet_icon_symbolic_name(iconPath);
|
||||
this._applet_icon.gicon = gicon;
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error setting custom icon: " + e.message);
|
||||
// Fallback to the default icon if there's an error
|
||||
this.set_applet_icon_name("microphone-sensitivity-medium");
|
||||
}
|
||||
},
|
||||
|
||||
_initSockets: function() {
|
||||
global.log("[Voice Assistant] Initializing WebSockets");
|
||||
let maxPayloadSize = 10 * 1024 * 1024; // 10 MB in bytes
|
||||
// Initialize Node WebSocket
|
||||
try {
|
||||
let session = new Soup.Session();
|
||||
let message = new Soup.Message({
|
||||
method: 'GET',
|
||||
uri: GLib.Uri.parse(NODE_SOCKET_URL, GLib.UriFlags.NONE)
|
||||
});
|
||||
|
||||
session.websocket_connect_async(message, null, null, null, null, (session, result) => {
|
||||
try {
|
||||
this._nodeSocket = session.websocket_connect_finish(result);
|
||||
// Set the maximum incoming payload size
|
||||
this._nodeSocket.set_max_incoming_payload_size(maxPayloadSize);
|
||||
this._nodeSocket.connect('message', Lang.bind(this, this._onNodeMessage));
|
||||
this._nodeSocket.connect('error', Lang.bind(this, this._onSocketError));
|
||||
global.log("[Voice Assistant] Node WebSocket initialized");
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error finalizing Node WebSocket: " + e.message);
|
||||
}
|
||||
});
|
||||
// Initialize streaming WebSocket
|
||||
try {
|
||||
let session = new Soup.Session();
|
||||
let message = new Soup.Message({
|
||||
method: 'GET',
|
||||
uri: GLib.Uri.parse(STREAM_SOCKET_URL, GLib.UriFlags.NONE)
|
||||
});
|
||||
|
||||
session.websocket_connect_async(message, null, null, null, null, (session, result) => {
|
||||
try {
|
||||
this._streamSocket = session.websocket_connect_finish(result);
|
||||
this._streamSocket.connect('message', Lang.bind(this, this._onStreamMessage));
|
||||
this._streamSocket.connect('error', Lang.bind(this, this._onSocketError));
|
||||
global.log("[Voice Assistant] Stream WebSocket initialized");
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error finalizing stream WebSocket: " + e.message);
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error initializing stream WebSocket: " + e.message);
|
||||
}
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error initializing Node WebSocket: " + e.message);
|
||||
}
|
||||
},
|
||||
|
||||
_onSocketError: function(socket, error) {
|
||||
global.logError("[Voice Assistant] WebSocket error: " + error.message);
|
||||
try {
|
||||
this._streamSocket.close();
|
||||
this._nodeSocket.close();
|
||||
} finally {
|
||||
this._initSockets();
|
||||
}
|
||||
},
|
||||
|
||||
_onNodeMessage: function(connection, type, message) {
|
||||
try {
|
||||
if (type === Soup.WebsocketDataType.TEXT) {
|
||||
let data = message.get_data();
|
||||
let jsonData = JSON.parse(data);
|
||||
global.log("[Voice Assistant] Parsed node message: " + jsonData.type);
|
||||
// Handle text messages if needed
|
||||
} else {
|
||||
global.log("[Voice Assistant] Received unknown data type from node: " + type);
|
||||
}
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error handling node message: " + e.message);
|
||||
}
|
||||
},
|
||||
|
||||
_onStreamMessage: function(connection, type, message) {
|
||||
try {
|
||||
if (type === Soup.WebsocketDataType.BINARY) {
|
||||
global.log("[Voice Assistant] Received binary audio data of length: " + message.get_data().length);
|
||||
this._playbackBuffer.push(message.get_data());
|
||||
this._playAudio();
|
||||
} else {
|
||||
global.log("[Voice Assistant] Received unknown data type from stream: " + type);
|
||||
}
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error handling stream message: " + e.message);
|
||||
}
|
||||
},
|
||||
|
||||
_playAudio: function() {
|
||||
if (this._player) {
|
||||
// If a player is already running, just add the new data to the buffer
|
||||
return;
|
||||
}
|
||||
|
||||
if (this._playbackBuffer.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
let audioData = this._playbackBuffer.shift();
|
||||
|
||||
// Create a temporary file to store the audio data
|
||||
let [file, stream] = Gio.File.new_tmp("voice-assistant-XXXXXX");
|
||||
stream.output_stream.write_all(audioData, null);
|
||||
stream.close(null);
|
||||
|
||||
// Play the audio using GStreamer
|
||||
this._player = new Gio.Subprocess({
|
||||
argv: [
|
||||
'gst-launch-1.0',
|
||||
'filesrc', 'location=' + file.get_path(),
|
||||
'!', 'decodebin',
|
||||
'!', 'audioconvert',
|
||||
'!', 'audioresample',
|
||||
'!', 'autoaudiosink'
|
||||
],
|
||||
flags: Gio.SubprocessFlags.NONE
|
||||
});
|
||||
|
||||
this._player.init(null);
|
||||
|
||||
// Clean up when playback is finished
|
||||
this._player.wait_async(null, (source, result) => {
|
||||
try {
|
||||
source.wait_finish(result);
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error during audio playback: " + e.message);
|
||||
} finally {
|
||||
this._player = null;
|
||||
file.delete(null);
|
||||
// Play the next audio chunk if available
|
||||
this._playAudio();
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_startRecording: function() {
|
||||
if (this._isRecording) return;
|
||||
|
||||
this._isRecording = true;
|
||||
this._setCustomIcon(this.metadata.path + "/icon-active.svg");
|
||||
global.log("[Voice Assistant] Starting recording");
|
||||
|
||||
try {
|
||||
// Initialize GStreamer pipeline for recording
|
||||
this._recorder = new Gio.Subprocess({
|
||||
argv: [
|
||||
'gst-launch-1.0',
|
||||
'pulsesrc',
|
||||
'!',
|
||||
'audioconvert',
|
||||
'!',
|
||||
'audio/x-raw,format=S16LE,channels=1,rate=16000',
|
||||
'!',
|
||||
'fdsink'
|
||||
],
|
||||
flags: Gio.SubprocessFlags.STDOUT_PIPE
|
||||
});
|
||||
|
||||
this._recorder.init(null);
|
||||
global.log("[Voice Assistant] Recording subprocess initialized");
|
||||
|
||||
// Read audio data and send it over WebSocket
|
||||
let stdout = this._recorder.get_stdout_pipe();
|
||||
this._readAudioData(stdout);
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error starting recording: " + e.message);
|
||||
}
|
||||
},
|
||||
|
||||
_readAudioData: function(stdout) {
|
||||
stdout.read_bytes_async(4096, GLib.PRIORITY_DEFAULT, null, (source, result) => {
|
||||
try {
|
||||
let bytes = source.read_bytes_finish(result);
|
||||
if (bytes && bytes.get_size() > 0) {
|
||||
// Append new data to the existing buffer
|
||||
let newData = new Uint8Array(bytes.get_data());
|
||||
let combinedBuffer = new Uint8Array(this._audioBuffer.length + newData.length);
|
||||
combinedBuffer.set(this._audioBuffer);
|
||||
combinedBuffer.set(newData, this._audioBuffer.length);
|
||||
this._audioBuffer = combinedBuffer;
|
||||
|
||||
// If we have accumulated 4096 or more bytes, send them
|
||||
while (this._audioBuffer.length >= 4096) {
|
||||
let chunkToSend = this._audioBuffer.slice(0, 4096);
|
||||
this._streamSocket.send_binary(chunkToSend);
|
||||
// global.log("[Voice Assistant] Sent 4096 bytes of audio data");
|
||||
|
||||
// Keep the remaining data in the buffer
|
||||
this._audioBuffer = this._audioBuffer.slice(4096);
|
||||
}
|
||||
|
||||
// Continue reading
|
||||
this._readAudioData(stdout);
|
||||
} else {
|
||||
global.log("[Voice Assistant] End of audio stream reached");
|
||||
// // Send any remaining data in the buffer
|
||||
// if (this._audioBuffer.length > 0) {
|
||||
// this._streamSocket.send_binary(this._audioBuffer);
|
||||
// global.log("[Voice Assistant] Sent final " + this._audioBuffer.length + " bytes of audio data");
|
||||
// }
|
||||
this._stopRecording();
|
||||
}
|
||||
} catch (e) {
|
||||
global.logError("[Voice Assistant] Error reading audio data: " + e.message);
|
||||
this._stopRecording();
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_stopRecording: function() {
|
||||
if (!this._isRecording) return;
|
||||
|
||||
this._isRecording = false;
|
||||
this._setCustomIcon(this.metadata.path + "/icon.svg");
|
||||
global.log("[Voice Assistant] Stopping recording");
|
||||
|
||||
if (this._recorder) {
|
||||
this._recorder.force_exit();
|
||||
this._recorder = null;
|
||||
global.log("[Voice Assistant] Recording subprocess terminated");
|
||||
}
|
||||
// Clear the audio buffer
|
||||
this._audioBuffer = new Uint8Array(0);
|
||||
},
|
||||
|
||||
on_applet_clicked: function() {
|
||||
if (this._isRecording) {
|
||||
global.log("[Voice Assistant] Applet clicked: stopping recording");
|
||||
this._stopRecording();
|
||||
} else {
|
||||
global.log("[Voice Assistant] Applet clicked: starting recording");
|
||||
this._startRecording();
|
||||
}
|
||||
},
|
||||
|
||||
on_applet_removed_from_panel: function() {
|
||||
global.log("[Voice Assistant] Applet removed from panel");
|
||||
this._stopRecording();
|
||||
if (this._streamSocket) {
|
||||
this._streamSocket.close(Soup.WebsocketCloseCode.NORMAL, null);
|
||||
global.log("[Voice Assistant] Record WebSocket closed");
|
||||
}
|
||||
if (this._nodeSocket) {
|
||||
this._nodeSocket.close(Soup.WebsocketCloseCode.NORMAL, null);
|
||||
global.log("[Voice Assistant] Node WebSocket closed");
|
||||
}
|
||||
if (this._player) {
|
||||
this._player.force_exit();
|
||||
global.log("[Voice Assistant] Audio player terminated");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function main(metadata, orientation, panel_height, instance_id) {
|
||||
global.log("[Voice Assistant] Main function called");
|
||||
return new MyApplet(metadata, orientation, panel_height, instance_id);
|
||||
}
|
5
icon-active.svg
Normal file
5
icon-active.svg
Normal file
|
@ -0,0 +1,5 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 81 77" width="81.0pt" height="77.0pt">
|
||||
<path d="M 36.33 4.65 C 38.92 5.65 39.55 9.16 42.14 9.97 C 46.85 11.74 50.44 14.40 53.94 17.94 C 56.74 18.04 59.16 17.67 61.55 19.44 C 65.43 21.79 67.01 27.11 65.26 31.23 C 64.52 33.03 62.51 34.08 61.00 35.19 C 60.96 39.51 60.43 43.14 59.09 47.23 C 65.23 49.98 71.62 51.82 77.48 55.20 C 68.70 53.14 60.34 49.75 52.28 45.73 C 49.59 44.21 45.89 42.41 44.40 39.59 C 43.38 37.80 44.91 36.16 46.51 35.51 C 51.18 33.46 56.74 34.20 61.16 33.11 C 64.99 31.80 64.93 26.56 63.44 23.54 C 61.86 20.71 57.17 18.03 54.19 20.16 C 50.13 23.59 46.15 29.87 40.79 31.75 C 38.86 32.50 37.27 30.78 36.97 29.02 C 35.99 22.26 38.24 16.37 38.53 9.97 C 38.14 3.06 24.61 4.54 24.82 10.97 C 26.03 16.58 29.95 23.52 29.70 29.87 C 29.39 31.62 28.14 32.64 26.46 31.55 C 21.32 28.34 17.95 21.38 14.00 17.02 C 11.57 14.75 8.51 16.69 6.64 18.65 C 4.37 21.22 3.04 25.32 6.23 27.79 C 10.96 30.75 18.54 32.13 22.81 36.27 C 24.12 37.71 24.12 39.29 22.52 40.49 C 18.18 43.42 11.39 43.22 6.51 44.65 C 3.75 45.88 3.98 49.59 5.03 51.93 C 6.03 54.48 8.97 57.63 11.94 56.14 C 16.07 53.39 19.25 48.37 24.17 46.11 C 25.74 45.42 27.97 45.54 28.45 47.48 C 30.18 53.41 25.42 60.21 26.20 65.75 C 28.44 69.41 36.20 68.67 37.50 64.51 C 37.37 60.24 34.47 55.00 36.08 50.17 C 37.17 48.37 39.15 48.26 40.95 49.03 C 45.08 50.73 48.70 54.05 51.93 57.08 C 57.21 62.34 62.40 66.87 65.85 73.61 C 60.69 68.91 56.50 63.31 51.30 58.64 C 48.00 61.18 44.95 62.88 41.05 64.32 C 38.16 65.36 37.44 68.50 34.21 69.20 C 29.56 70.39 26.03 69.29 24.01 64.74 C 19.80 63.06 16.11 61.10 12.62 58.18 C 10.06 57.96 7.84 58.07 5.96 56.03 C 2.49 52.54 2.06 48.22 4.08 43.81 C 3.40 39.51 3.10 35.44 4.06 31.14 C 4.90 28.06 2.56 26.05 2.89 22.98 C 3.81 17.78 8.24 13.81 13.72 14.66 C 16.63 12.70 19.56 11.03 22.80 9.65 C 25.68 4.59 30.82 2.71 36.33 4.65 Z" fill="#ffffff" />
|
||||
</svg>
|
After Width: | Height: | Size: 2 KiB |
5
icon.svg
Normal file
5
icon.svg
Normal file
|
@ -0,0 +1,5 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 81 77" width="81.0pt" height="77.0pt">
|
||||
<path d="M 36.33 4.65 C 38.92 5.65 39.55 9.16 42.14 9.97 C 46.85 11.74 50.44 14.40 53.94 17.94 C 56.74 18.04 59.16 17.67 61.55 19.44 C 65.43 21.79 67.01 27.11 65.26 31.23 C 64.52 33.03 62.51 34.08 61.00 35.19 C 60.96 39.51 60.43 43.14 59.09 47.23 C 65.23 49.98 71.62 51.82 77.48 55.20 C 68.70 53.14 60.34 49.75 52.28 45.73 C 49.59 44.21 45.89 42.41 44.40 39.59 C 43.38 37.80 44.91 36.16 46.51 35.51 C 51.18 33.46 56.74 34.20 61.16 33.11 C 64.99 31.80 64.93 26.56 63.44 23.54 C 61.86 20.71 57.17 18.03 54.19 20.16 C 50.13 23.59 46.15 29.87 40.79 31.75 C 38.86 32.50 37.27 30.78 36.97 29.02 C 35.99 22.26 38.24 16.37 38.53 9.97 C 38.14 3.06 24.61 4.54 24.82 10.97 C 26.03 16.58 29.95 23.52 29.70 29.87 C 29.39 31.62 28.14 32.64 26.46 31.55 C 21.32 28.34 17.95 21.38 14.00 17.02 C 11.57 14.75 8.51 16.69 6.64 18.65 C 4.37 21.22 3.04 25.32 6.23 27.79 C 10.96 30.75 18.54 32.13 22.81 36.27 C 24.12 37.71 24.12 39.29 22.52 40.49 C 18.18 43.42 11.39 43.22 6.51 44.65 C 3.75 45.88 3.98 49.59 5.03 51.93 C 6.03 54.48 8.97 57.63 11.94 56.14 C 16.07 53.39 19.25 48.37 24.17 46.11 C 25.74 45.42 27.97 45.54 28.45 47.48 C 30.18 53.41 25.42 60.21 26.20 65.75 C 28.44 69.41 36.20 68.67 37.50 64.51 C 37.37 60.24 34.47 55.00 36.08 50.17 C 37.17 48.37 39.15 48.26 40.95 49.03 C 45.08 50.73 48.70 54.05 51.93 57.08 C 57.21 62.34 62.40 66.87 65.85 73.61 C 60.69 68.91 56.50 63.31 51.30 58.64 C 48.00 61.18 44.95 62.88 41.05 64.32 C 38.16 65.36 37.44 68.50 34.21 69.20 C 29.56 70.39 26.03 69.29 24.01 64.74 C 19.80 63.06 16.11 61.10 12.62 58.18 C 10.06 57.96 7.84 58.07 5.96 56.03 C 2.49 52.54 2.06 48.22 4.08 43.81 C 3.40 39.51 3.10 35.44 4.06 31.14 C 4.90 28.06 2.56 26.05 2.89 22.98 C 3.81 17.78 8.24 13.81 13.72 14.66 C 16.63 12.70 19.56 11.03 22.80 9.65 C 25.68 4.59 30.82 2.71 36.33 4.65 Z" fill="#808080" />
|
||||
</svg>
|
After Width: | Height: | Size: 2 KiB |
7
metadata.json
Normal file
7
metadata.json
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"uuid": "neon-client@mcknight.tech",
|
||||
"name": "Neon AI Client",
|
||||
"description": "Interact with Neon AI via a HANA endpoint",
|
||||
"icon": "microphone-sensitivity-medium",
|
||||
"settings-schema": "settings-schema"
|
||||
}
|
12
settings-schema.json
Normal file
12
settings-schema.json
Normal file
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"baseUrl": {
|
||||
"type": "entry",
|
||||
"default": "hana.neonaialpha.com",
|
||||
"description": "Base URL for HANA Websocket"
|
||||
},
|
||||
"token": {
|
||||
"type": "entry",
|
||||
"default": "",
|
||||
"description": "Valid token for authentication"
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue