-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Support Gemini 1.5 via Google AI studio playground (#9)
* start work on AI studio proxy * add a test * add chat * add gemini 1.5 doc * lint * add function calling support for ai studio * update viz to support gemini 1.5 over AI studio
- Loading branch information
1 parent
b34f389
commit 3cad49f
Showing
14 changed files
with
525 additions
and
72 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
/* eslint-disable no-unused-vars */ | ||
// Gemini 1.5 Pro is *not* currently publicly available. | ||
// This is a demo that will only work if you have access to 1.5 Pro in the Google AI Studio playground *and* | ||
// a special user script (like an extension) that you can run to allow langxlang to use the browser as an API. | ||
|
||
const { GoogleAIStudioCompletionService, ChatSession } = require('langxlang') | ||
|
||
async function testCompletion () { | ||
// Use port 8095 to host the websocket server | ||
const service = new GoogleAIStudioCompletionService(8095) | ||
await service.ready | ||
const response = await service.requestCompletion('gemini-1.5-pro', '', 'Why is the sky blue?') | ||
console.log('Result', response.text) | ||
} | ||
|
||
// With ChatSessions | ||
async function testChatSession () { | ||
const service = new GoogleAIStudioCompletionService(8095) | ||
await service.ready | ||
const session = new ChatSession(service, 'gemini-1.5-pro', '') | ||
const message = await session.sendMessage('Hello! Why is the sky blue?') | ||
console.log('Done', message.length, 'bytes', 'now asking a followup') | ||
// ask related question about the response | ||
const followup = await session.sendMessage('Is this the case everywhere on Earth, what about the poles?') | ||
console.log('Done', followup.text.length, 'bytes') | ||
} | ||
|
||
// In order to run this example, you need to have the Google AI Studio user script client running | ||
// that will connect to the WebSocket server running the specified port (8095 in this example) | ||
// The client code is a user script that you can run in the Google AI Studio playground. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
const studio = require('./googleAIStudio') | ||
|
||
const supportedModels = ['gemini-1.0-pro', 'gemini-1.5-pro'] | ||
|
||
class GoogleAIStudioCompletionService { | ||
constructor (serverPort) { | ||
this.serverPort = serverPort | ||
this.ready = studio.runServer(serverPort) | ||
} | ||
|
||
stop () { | ||
studio.stopServer() | ||
} | ||
|
||
async requestCompletion (model, system, user, chunkCb) { | ||
if (!supportedModels.includes(model)) { | ||
throw new Error(`Model ${model} is not supported`) | ||
} | ||
const result = await studio.generateCompletion(model, system + '\n' + user, chunkCb) | ||
return { text: result.text } | ||
} | ||
|
||
async requestStreamingChat (model, { messages, maxTokens, functions }, chunkCb) { | ||
if (!supportedModels.includes(model)) { | ||
throw new Error(`Model ${model} is not supported`) | ||
} | ||
const result = await studio.requestChatCompletion(model, messages, chunkCb, { maxTokens, functions }) | ||
return { ...result, completeMessage: result.text } | ||
} | ||
} | ||
|
||
module.exports = GoogleAIStudioCompletionService |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.