Skip to content

Commit

Permalink
send some meta data
Browse files Browse the repository at this point in the history
  • Loading branch information
Mahmoudz committed May 27, 2024
1 parent c013706 commit 8cd9a44
Show file tree
Hide file tree
Showing 4 changed files with 38 additions and 20 deletions.
Binary file modified assets/sista-admin-dark.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified assets/sista-admin-light.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
52 changes: 36 additions & 16 deletions src/core/AiAssistantEngine.ts
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ class AiAssistantEngine extends EventEmitter {

if (inputUserCommand) {
try {
await this._makeAPIRequest(inputUserCommand);
await this._makeApiCall(inputUserCommand);
} catch (err) {
Logger.error('Error making API request:', err);
this.emitStateChange(EventEmitter.STATE_IDLE);
Expand Down Expand Up @@ -164,38 +164,57 @@ class AiAssistantEngine extends EventEmitter {
}
}

private _makeAPIRequest = async (
userInput: Blob | string,
): Promise<void> => {
private _makeApiCall = async (userInput: Blob | string): Promise<void> => {
Logger.log('F: _makeApiCall');

// --------[ Update UI ]--------

this.makingAPIRequest = true;
Logger.log('F: _makeAPIRequest');
this.emitStateChange(EventEmitter.STATE_THINKING_START);

// --------[ Prepare FormData ]--------

const formData = new FormData();

// Add the user IDs object
formData.append('endUser', JSON.stringify(this.user.getEndUserIds()));

// Add the user input
if (this.userInputMethod === UserInputMethod.AUDIO_RECORDER) {
formData.append('userInputAsAudio', userInput as Blob);
} else if (this.userInputMethod === UserInputMethod.SPEECH_RECOGNIZER) {
formData.append('userInputAsText', userInput as string);
}

formData.append('sdkVersion', this.sdkVersion);
formData.append(
'endUser',
JSON.stringify(this.user.getEndUserDetails()),
);
// Add the functions signatures
formData.append(
'functionsSignatures',
JSON.stringify(this.functionExecutor.functionSignatures),
);

// Add the page content (user screen)
if (this.scrapeContent) {
formData.append(
'pageContent',
JSON.stringify(this.scraper.getText()),
);
}

// Add some metadata
formData.append(
'meta',
JSON.stringify({
sdkVersion: this.sdkVersion,
currentUrl: window.location.href,
referrerUrl: document.referrer,
userAgent: navigator.userAgent,
language: navigator.language,
screenResolution: `${window.screen.width}x${window.screen.height}`,
}),
);

// --------[ Make the API Call ]--------

try {
const response = await fetch(`${this.apiUrl}/processor/run`, {
method: 'POST',
Expand All @@ -206,14 +225,15 @@ class AiAssistantEngine extends EventEmitter {
body: formData,
});

const data: ApiResponse = await response.json();
// --------[ Handle the API Response ]--------

this.makingAPIRequest = false;
const data: ApiResponse = await response.json();
this._handleApiResponse(data);
} catch (error) {
Logger.error('Error Calling Sista API:', error);
this.emitStateChange(EventEmitter.STATE_IDLE);
this.makingAPIRequest = false;
this.gettingUserInput = false;
}
};

Expand All @@ -237,21 +257,21 @@ class AiAssistantEngine extends EventEmitter {
return;
}

// ----[ Step 1: Display User Input Command ]----
// --------[ Step 1: Display User Input Command ]--------
// Handle user command as text first. This is useful for debugging
if (response.data.inputVoiceCommandAsText) {
this._handleInputVoiceCommandAsText(
response.data.inputVoiceCommandAsText,
);
}

// ----[ Step 2: Display AI Text Reply ]----
// --------[ Step 2: Display AI Text Reply ]--------
// Handle text response last
if (response.data.outputTextReply) {
this._handleTextResponse(response.data.outputTextReply);
}

// ----[ Step 3: Execute Functions ]----
// --------[ Step 3: Execute Functions ]--------
// Process executable functions if they are present, which have the highest priority
if (
response.data.outputExecutableFunctions &&
Expand All @@ -263,7 +283,7 @@ class AiAssistantEngine extends EventEmitter {
return; // No need to process further if functions are executed
}

// ----[ Step 4: Play AI Audio Reply ]----
// --------[ Step 4: Play AI Audio Reply ]--------
// Stop any currently playing audio
this.audioPlayer.stopCurrentSound();
// Handle audio response if available as a Stream
Expand Down
6 changes: 2 additions & 4 deletions src/core/User.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
// src/core/User.ts

interface EndUserDetails {
endUserAgent: string;
interface EndUserIds {
generatedEndUserId: string;
providedEndUserId: string | null;
}
Expand All @@ -15,9 +14,8 @@ class User {
this.generatedUserId = this._generateEndUserId();
}

getEndUserDetails(): EndUserDetails {
getEndUserIds(): EndUserIds {
return {
endUserAgent: navigator.userAgent,
generatedEndUserId: this.generatedUserId,
providedEndUserId: this.providedUserId,
};
Expand Down

0 comments on commit 8cd9a44

Please sign in to comment.