Skip to content

Commit

Permalink
Adding voice ai (#414)
Browse files Browse the repository at this point in the history
* adding-voice-ai

* fixing-example

* removing-extra-files

* removing-dist
  • Loading branch information
Shyam-Raghuwanshi authored Nov 4, 2024
1 parent a696493 commit 5b1005f
Show file tree
Hide file tree
Showing 18 changed files with 423 additions and 23 deletions.
20 changes: 12 additions & 8 deletions JS/edgechains/arakoodev/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -35,26 +35,22 @@
"document": "^0.4.7",
"dts-bundle-generator": "^9.3.1",
"esbuild": "^0.20.2",
"eventsource-parser": "^1.1.2",
"get-port": "^7.1.0",
"hono": "3.9",
"jest-environment-jsdom": "^29.7.0",
"jsdom": "^24.1.0",
"node-fetch": "^3.3.2",
"node-html-parser": "^6.1.13",
"openai": "^4.55.3",
"pdf-parse": "^1.1.1",
"pg": "^8.11.5",
"playwright": "^1.45.1",
"prettier": "^3.2.5",
"regenerator-runtime": "^0.14.1",
"request": "^2.88.2",
"retell-client-js-sdk": "^2.0.4",
"retell-sdk": "^4.7.0",
"retry": "^0.13.1",
"text-encoding": "^0.7.0",
"ts-node": "^10.9.2",
"typeorm": "^0.3.20",
"vitest": "^2.0.3",
"web-streams-polyfill": "^4.0.0",
"youtube-transcript": "^1.2.1",
"zod": "^3.23.8",
"zod-to-json-schema": "^3.23.0"
Expand All @@ -66,10 +62,18 @@
"@babel/preset-typescript": "^7.24.1",
"@types/cors": "^2.8.17",
"@types/jest": "^29.5.12",
"@types/node": "^20.13.0",
"@types/node": "^20.17.2",
"@types/pdf-parse": "^1.1.4",
"@types/ws": "^8.5.12",
"buffer": "^6.0.3",
"crypto-browserify": "^3.12.1",
"jest": "^29.7.0",
"process": "^0.11.10",
"stream-browserify": "^3.0.0",
"stream-http": "^3.2.0",
"ts-jest": "^29.1.2",
"typescript": "^5.4.5"
"ts-loader": "^9.5.1",
"typescript": "^5.6.3",
"util": "^0.12.5"
}
}
2 changes: 2 additions & 0 deletions JS/edgechains/arakoodev/src/ai/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
export { OpenAI } from "./lib/openai/openai.js";
export { GeminiAI } from "./lib/gemini/gemini.js";
export { LlamaAI } from "./lib/llama/llama.js";
export { RetellAI } from "./lib/retell-ai/retell.js";
export { RetellWebClient } from "./lib/retell-ai/retellWebClient.js"
66 changes: 56 additions & 10 deletions JS/edgechains/arakoodev/src/ai/src/lib/openai/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ interface OpenAIChatOptions {
temperature?: number;
prompt?: string;
messages?: messageOption[];
frequency_penalty?: number
}

interface chatWithFunctionOptions {
Expand Down Expand Up @@ -86,14 +87,15 @@ export class OpenAI {
model: chatOptions.model || "gpt-3.5-turbo",
messages: chatOptions.prompt
? [
{
role: chatOptions.role || "user",
content: chatOptions.prompt,
},
]
{
role: chatOptions.role || "user",
content: chatOptions.prompt,
},
]
: chatOptions.messages,
max_tokens: chatOptions.max_tokens || 256,
temperature: chatOptions.temperature || 0.7,
frequency_penalty: 1,
},
{
headers: {
Expand All @@ -119,6 +121,50 @@ export class OpenAI {
return response[0].message;
}

async streamedChat(chatOptions: OpenAIChatOptions): Promise<OpenAIChatReturnOptions> {
const response = await axios
.post(
openAI_url,
{
model: chatOptions.model || "gpt-3.5-turbo",
messages: chatOptions.prompt
? [
{
role: chatOptions.role || "user",
content: chatOptions.prompt,
},
]
: chatOptions.messages,
max_tokens: chatOptions.max_tokens || 256,
temperature: chatOptions.temperature || 0.7,
frequency_penalty: chatOptions.frequency_penalty || 1,
stream: true
},
{
headers: {
Authorization: "Bearer " + this.apiKey,
"content-type": "application/json",
"OpenAI-Organization": this.orgId,
},
}
)
.then((response) => {
return response.data.choices;
})
.catch((error) => {
if (error.response) {
console.log("Server responded with status code:", error.response.status);
console.log("Response data:", error.response.data);
} else if (error.request) {
console.log("No response received:", error);
} else {
console.log("Error creating request:", error.message);
}
});
return response[0].message;
}


async chatWithFunction(
chatOptions: chatWithFunctionOptions
): Promise<chatWithFunctionReturnOptions> {
Expand All @@ -129,11 +175,11 @@ export class OpenAI {
model: chatOptions.model || "gpt-3.5-turbo",
messages: chatOptions.prompt
? [
{
role: chatOptions.role || "user",
content: chatOptions.prompt,
},
]
{
role: chatOptions.role || "user",
content: chatOptions.prompt,
},
]
: chatOptions.messages,
max_tokens: chatOptions.max_tokens || 1024,
temperature: chatOptions.temperature || 0.7,
Expand Down
35 changes: 35 additions & 0 deletions JS/edgechains/arakoodev/src/ai/src/lib/retell-ai/retell.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import Retell from 'retell-sdk';
import { AgentCreateParams, AgentResponse } from 'retell-sdk/resources/agent.mjs';
import { LlmResponse, LlmCreateParams } from "retell-sdk/resources/llm.mjs";
export class RetellAI {
retellClient: Retell;
llm: null | LlmResponse;
constructor(apiKey: string) {
this.retellClient = new Retell({
apiKey: apiKey,
});
this.llm = null;
}

async createAgent(body: AgentCreateParams, options?: Retell.RequestOptions): Promise<AgentResponse> {
const defaultParams = { voice_id: "11labs-Adrian", agent_name: "Ryan", llm_websocket_url: this?.llm?.llm_websocket_url, }
const keys = Object.keys(defaultParams);
for (let i = 0; i < keys.length; i++) {
if (keys[i] in body) {
delete defaultParams[keys[i]]
}
}
const agent = await this.retellClient.agent.create({ ...defaultParams, ...body }, options);
return agent;
}
async createLLM(data?: LlmCreateParams): Promise<LlmResponse> {
const llm = await this.retellClient.llm.create(data || {});
this.llm = llm;
return llm;
}

async initiateWebCall(agent_id: string): Promise<string> {
const webCallResponse = await this.retellClient.call.createWebCall({ agent_id });
return webCallResponse.access_token
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import { RetellWebClient as RetellClient, StartCallConfig } from "retell-client-js-sdk";

export class RetellWebClient {
client: RetellClient
constructor() {
this.client = new RetellClient();
}

on(event: string, callback: (...args: any[]) => void) {
return this.client.on(event, callback);
}

async startCall(startCallConfig: StartCallConfig) {
return await this.client.startCall(startCallConfig);
}

async stopCall(): Promise<void> {
return this.client.stopCall();
}
}


Empty file.
10 changes: 5 additions & 5 deletions JS/edgechains/arakoodev/src/sync-rpc/lib/sync-rpc.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import { execSync } from "child_process";
import path from "path";
import fs from "fs";
import crypto from "crypto";
import os from "os";
import { execSync } from 'node:child_process';
import path from 'node:path';
import fs from 'node:fs';
import crypto from 'node:crypto';
import os from 'node:os';

function createSyncRPC(filename: string) {
const absolutePath = path.resolve(filename);
Expand Down
4 changes: 4 additions & 0 deletions JS/edgechains/examples/retell-ai/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
dist
node_modules
.env
.env.local
31 changes: 31 additions & 0 deletions JS/edgechains/examples/retell-ai/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# 🚀 Welcome to your new awesome project!

This project has been created using **webpack-cli**, you can now run

```
npm run build
```

or

```
yarn build
```

to bundle your application

## How to use

1. Install dependencies

```
npm install
```

2. Run the server

```
npm run start
```

>>>>>>> 100bddc3 (fixing-example)
39 changes: 39 additions & 0 deletions JS/edgechains/examples/retell-ai/client/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import 'htmx.org';
import { RetellWebClient } from "@arakoodev/edgechains.js/ai";
import './style.css';

// Create a single instance
const retellWebClient = new RetellWebClient();

async function startCall(access_token) {
try {

const callResponse = await retellWebClient.startCall({
accessToken: access_token,
});

console.log('Call started:', callResponse);
document.getElementById('callStatus').textContent = 'Call in progress...';

} catch (error) {
console.error('Failed to start call:', error);
document.getElementById('error').textContent = `Failed to start call: ${error.message}`;
}
}

async function endCall() {
try {
await retellWebClient.stopCall();
console.log('Call ended successfully');
document.getElementById('callStatus').textContent = 'Call ended';
} catch (error) {
console.error('Failed to end call:', error);
document.getElementById('error').textContent = `Failed to end call: ${error.message}`;
}
}

// Expose functions to be used with hyperscript or other event handlers
window.startCall = startCall;
window.endCall = endCall;

console.log('Client-side code initialized');
3 changes: 3 additions & 0 deletions JS/edgechains/examples/retell-ai/client/style.css
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
@tailwind base;
@tailwind components;
@tailwind utilities;
9 changes: 9 additions & 0 deletions JS/edgechains/examples/retell-ai/jsonnet/main.jsonnet
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@


local general_prompt = "You are a friendly agent that helps people retrieves their questions's answers.";
local begin_message = "Hi, I'm Edgechains agent, how can I help you?";

{
general_prompt: general_prompt,
begin_message: begin_message,
}
5 changes: 5 additions & 0 deletions JS/edgechains/examples/retell-ai/jsonnet/secrets.jsonnet
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
local RETELL_API_KEY = 'key_****';

{
reteall_api_key: RETELL_API_KEY,
}
67 changes: 67 additions & 0 deletions JS/edgechains/examples/retell-ai/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
{
"name": "htmx-webpack-demo",
"version": "1.0.0",
"scripts": {
"build": "webpack",
"dev": "webpack --watch",
"start": "node --experimental-wasm-modules ./server/index.js"
},
"dependencies": {
"@arakoodev/edgechains.js": "file:../../arakoodev",
"@arakoodev/jsonnet": "^0.25.0",
"@hono/node-server": "^1.13.4",
"browserify-zlib": "^0.2.0",
"buffer": "^6.0.3",
"crypto": "^1.0.1",
"crypto-browserify": "^3.12.1",
"file-uri-to-path": "^2.0.0",
"html-webpack-plugin": "^5.6.3",
"htmx.org": "^1.9.10",
"hyperscript": "^2.0.2",
"hyperscript.org": "^0.9.13",
"mini-css-extract-plugin": "^2.9.2",
"process": "^0.11.10",
"retell-client-js-sdk": "^2.0.4",
"retell-sdk": "^4.8.0",
"stream-browserify": "^3.0.0",
"stream-http": "^3.2.0",
"util": "^0.12.5",
"vm-browserify": "^1.1.2",
"vue": "^3.5.12",
"vue-loader": "^17.4.2",
"webpack-node-externals": "^3.0.0",
"workbox-webpack-plugin": "^7.3.0"
},
"devDependencies": {
"@babel/core": "^7.26.0",
"@babel/plugin-transform-runtime": "^7.25.9",
"@babel/preset-env": "^7.26.0",
"assert": "^2.1.0",
"babel-loader": "^9.2.1",
"css-loader": "^7.1.2",
"node-polyfill-webpack-plugin": "^4.0.0",
"os-browserify": "^0.3.0",
"path-browserify": "^1.0.1",
"postcss": "^8.4.47",
"postcss-loader": "^8.1.1",
"postcss-preset-env": "^10.0.9",
"style-loader": "^4.0.0",
"tailwindcss": "^3.4.14",
"url": "^0.11.4",
"webpack": "^5.96.1",
"webpack-cli": "^5.1.4",
"webpack-dev-server": "^5.1.0"
},
"browser": {
"crypto": "crypto-browserify",
"stream": "stream-browserify",
"http": "stream-http",
"util": "util",
"buffer": "buffer",
"node:crypto": "crypto-browserify",
"node:stream": "stream-browserify",
"node:http": "stream-http",
"node:buffer": "buffer",
"node:util": "util"
}
}
Loading

0 comments on commit 5b1005f

Please sign in to comment.