Skip to content

Commit

Permalink
Merge pull request #11 from dyte-in/feat/start-request-params
Browse files Browse the repository at this point in the history
  • Loading branch information
palashgo authored Dec 29, 2023
2 parents 0bca933 + 3c61190 commit cacc752
Show file tree
Hide file tree
Showing 6 changed files with 106 additions and 27 deletions.
27 changes: 27 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,12 @@ activateTranscriptions({
symblAccessToken: 'ACCESS_TOKEN_FROM_SYMBL_AI',
connectionId: 'SOME_ARBITRARY_CONNECTION_ID', // optional,
speakerUserId: 'SOME_ARBITRARY_USER_ID_FOR_SPEAKER', // optional
symblStartRequestParams: { // optional. Subset of https://docs.symbl.ai/reference/streaming-api-reference#start_request
noConnectionTimeout: 0,
config: {
sentiment: false,
},
},
});
```

Expand All @@ -40,6 +46,27 @@ This method internally connects with Symbl using Websocket connection & automati

`speakerUserId` field is optional. If not passed, value of `meeting.self.clientSpecificId` will be used as `speakerUserId`.

`symblStartRequestParams` field is optional. In case you want to control Symbl settings further, you can override the values by passing just the fields to override, from https://docs.symbl.ai/reference/streaming-api-reference#start_request.

We perform deep merge of the passed value with the defaults, therefore no need to construct complete start_request message. For example, If you want to add just the email field to speaker and also want to change noConnectionTimeout to 300, you can do so using the following code snippet.

```js
activateTranscriptions({
meeting: meeting, // From DyteClient.init
symblAccessToken: 'ACCESS_TOKEN_FROM_SYMBL_AI',
connectionId: 'SOME_ARBITRARY_CONNECTION_ID', // optional,
speakerUserId: 'SOME_ARBITRARY_USER_ID_FOR_SPEAKER', // optional
symblStartRequestParams: { // optional. Any subset of https://docs.symbl.ai/reference/streaming-api-reference#start_request
noConnectionTimeout: 300,
speaker: {
email: '[email protected]',
}
}
});
```

<b>Note:</b> If, in case, the passed fields are incorrect or poorly placed, conversation might not get created. In such cases, an error would be logged in developer console for you to debug further.


4. If you want to show transcriptions to a participant or for `self`, you can do so using the following snippet.

Expand Down
6 changes: 6 additions & 0 deletions demo/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,12 @@ const init = async () => {
meeting,
languageCode: 'en-US',
symblAccessToken,
symblStartRequestParams: { // optional. Subset of https://docs.symbl.ai/reference/streaming-api-reference#start_request
noConnectionTimeout: 0,
config: {
sentiment: false,
},
},
});

await addTranscriptionsListener({
Expand Down
40 changes: 36 additions & 4 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 4 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,9 @@
"peerDepdendencies": {
"@dytesdk/web-core": ">=0.26.0"
},
"dependencies": {},
"dependencies": {
"lodash-es": "^4.17.21"
},
"devDependencies": {
"@commitlint/cli": "^13.1.0",
"@commitlint/config-conventional": "^13.1.0",
Expand All @@ -57,6 +59,7 @@
"@semantic-release/release-notes-generator": "^9.0.3",
"@types/events": "^3.0.0",
"@types/jest": "^27.0.6",
"@types/lodash-es": "^4.17.12",
"@types/long": "4.0.0",
"@types/node": "^16.0.0",
"@types/uuid": "^8.3.4",
Expand Down
3 changes: 3 additions & 0 deletions src/param_types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ export interface ActivateTranscriptionsConfig {
languageCode?: string,
connectionId?: string,
speakerUserId?: string,
symblStartRequestParams?: { // https://docs.symbl.ai/reference/streaming-api-reference#start_request
[key:string]: any,
},
}

export interface DeactivateTranscriptionsConfig {
Expand Down
52 changes: 30 additions & 22 deletions src/symbl_transcriptions.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import merge from 'lodash-es/merge';
import audioTranscriptionMiddleware from './audio_middleware';
import { ActivateTranscriptionsConfig, DeactivateTranscriptionsConfig } from './param_types';
import {
Expand All @@ -7,7 +8,6 @@ import {
setTranscriptions,
setWebSocket,
} from './transcriptions_building_blocks';

/**
*
* @param ActivateTranscriptionsConfig Required params to initialise the middleware.
Expand All @@ -21,6 +21,7 @@ async function activateTranscriptions({
languageCode,
connectionId,
speakerUserId,
symblStartRequestParams = {},
}: ActivateTranscriptionsConfig) {
// As a fail-safe, deactivateTranscriptions if activateTranscriptions function is called twice
// eslint-disable-next-line no-use-before-define
Expand All @@ -35,6 +36,10 @@ async function activateTranscriptions({
// Fired when a message is received from the WebSocket server
ws.onmessage = async (event) => {
const data = JSON.parse(event.data);
if (data.type === 'error') {
console.error('Symbl error: ', data);
return;
}
if (data.type === 'message_response') {
data.messages?.forEach((message: any) => {
// console.log('Live transcript (more accurate): ', message.payload.content, data);
Expand Down Expand Up @@ -93,29 +98,32 @@ async function activateTranscriptions({
console.info('Connection to Symbl websocket closed');
};

// If start_request params are there, they wil be given a top priority
const startRequestParams = merge({
id: meeting.self.id,
type: 'start_request',
meetingTitle: meeting.meta.meetingTitle,
// insightTypes: ['question', 'action_item'], // Will enable insight generation
config: {
confidenceThreshold: 0.5,
languageCode, // Symbl has bug. This field is not honoured
speechRecognition: {
encoding: 'LINEAR16',
sampleRateHertz: 16000,
},
},
speaker: {
// if speaker has email key, transcription gets sent at the end
// speaker supports all arbitary values
userId: speakerUserId || meeting.self.clientSpecificId || meeting.self.id,
name: meeting.self.name,
peerId: meeting.self.id,
},
}, symblStartRequestParams);

// Fired when the connection succeeds.
ws.onopen = () => {
ws.send(JSON.stringify({
id: meeting.self.id,
type: 'start_request',
meetingTitle: meeting.meta.meetingTitle,
// insightTypes: ['question', 'action_item'], // Will enable insight generation
config: {
confidenceThreshold: 0.5,
languageCode, // Symbl has bug. This field is not honoured
speechRecognition: {
encoding: 'LINEAR16',
sampleRateHertz: 16000,
},
},
speaker: {
// if speaker has email key, transcription gets sent at the end
// speaker supports all arbitary values
userId: speakerUserId || meeting.self.clientSpecificId || meeting.self.id,
name: meeting.self.name,
peerId: meeting.self.id,
},
}));
ws.send(JSON.stringify(startRequestParams));
};

return meeting.self.addAudioMiddleware(audioTranscriptionMiddleware);
Expand Down

0 comments on commit cacc752

Please sign in to comment.