Skip to content

Commit

Permalink
feat: add usage tracking and timestamps to ChatStoreMessage structure
Browse files Browse the repository at this point in the history
  • Loading branch information
heimoshuiyu committed Feb 6, 2025
1 parent ff46a6f commit 62bf705
Show file tree
Hide file tree
Showing 4 changed files with 62 additions and 35 deletions.
1 change: 1 addition & 0 deletions src/components/ImageGenDrawer.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,7 @@ export function ImageGenDrawer({ disableFactor }: Props) {
logprobs: null,
response_model_name: imageGenModel,
reasoning_content: null,
usage: null,
});

setChatStore({ ...chatStore });
Expand Down
1 change: 1 addition & 0 deletions src/pages/AddToolMsg.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ const AddToolMsg = (props: {
logprobs: null,
response_model_name: null,
reasoning_content: null,
usage: null,
});
setChatStore({ ...chatStore });
setNewToolCallID("");
Expand Down
88 changes: 54 additions & 34 deletions src/pages/Chatbox.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ export default function ChatBOX() {
const _completeWithStreamMode = async (
response: Response,
signal: AbortSignal
): Promise<Usage> => {
): Promise<ChatStoreMessage> => {
let responseTokenCount = 0; // including reasoning content and normal content
const allChunkMessage: string[] = [];
const allReasoningContentChunk: string[] = [];
Expand Down Expand Up @@ -168,22 +168,7 @@ export default function ChatBOX() {
const reasoning_content = allReasoningContentChunk.join("");

console.log("save logprobs", logprobs);
const newMsg: ChatStoreMessage = {
role: "assistant",
content,
reasoning_content,
hide: false,
token:
responseTokenCount -
(usage?.completion_tokens_details?.reasoning_tokens ?? 0),
example: false,
audio: null,
logprobs,
response_model_name,
};
if (allChunkTool.length > 0) newMsg.tool_calls = allChunkTool;

chatStore.history.push(newMsg);
// manually copy status from client to chatStore
chatStore.maxTokens = client.max_tokens;
chatStore.tokenMargin = client.tokens_margin;
Expand Down Expand Up @@ -212,37 +197,57 @@ export default function ChatBOX() {
ret.completion_tokens_details = usage.completion_tokens_details ?? null;
}

return ret;
const newMsg: ChatStoreMessage = {
role: "assistant",
content,
reasoning_content,
hide: false,
token:
responseTokenCount -
(usage?.completion_tokens_details?.reasoning_tokens ?? 0),
example: false,
audio: null,
logprobs,
response_model_name,
usage,
};
if (allChunkTool.length > 0) newMsg.tool_calls = allChunkTool;

return newMsg;
};

const _completeWithFetchMode = async (response: Response): Promise<Usage> => {
const _completeWithFetchMode = async (
response: Response
): Promise<ChatStoreMessage> => {
const data = (await response.json()) as FetchResponse;
const msg = client.processFetchResponse(data);

chatStore.history.push({
setShowGenerating(false);

const usage: Usage = {
prompt_tokens: data.usage.prompt_tokens ?? 0,
completion_tokens: data.usage.completion_tokens ?? 0,
total_tokens: data.usage.total_tokens ?? 0,
response_model_name: data.model ?? null,
prompt_tokens_details: data.usage.prompt_tokens_details ?? null,
completion_tokens_details: data.usage.completion_tokens_details ?? null,
};

const ret: ChatStoreMessage = {
role: "assistant",
content: msg.content,
tool_calls: msg.tool_calls,
hide: false,
token: data.usage?.completion_tokens_details
? data.usage.completion_tokens -
data.usage.completion_tokens_details.reasoning_tokens
: data.usage.completion_tokens ?? calculate_token_length(msg.content),
: (data.usage.completion_tokens ?? calculate_token_length(msg.content)),
example: false,
audio: null,
logprobs: data.choices[0]?.logprobs,
response_model_name: data.model,
reasoning_content: data.choices[0]?.message?.reasoning_content ?? null,
});
setShowGenerating(false);

const ret: Usage = {
prompt_tokens: data.usage.prompt_tokens ?? 0,
completion_tokens: data.usage.completion_tokens ?? 0,
total_tokens: data.usage.total_tokens ?? 0,
response_model_name: data.model ?? null,
prompt_tokens_details: data.usage.prompt_tokens_details ?? null,
completion_tokens_details: data.usage.completion_tokens_details ?? null,
usage,
};

return ret;
Expand Down Expand Up @@ -291,6 +296,8 @@ export default function ChatBOX() {
client.max_gen_tokens = chatStore.maxGenTokens;
client.enable_max_gen_tokens = chatStore.maxGenTokens_enabled;

const created_at = new Date();

try {
setShowGenerating(true);
abortControllerRef.current = new AbortController();
Expand All @@ -299,25 +306,37 @@ export default function ChatBOX() {
chatStore.logprobs,
abortControllerRef.current.signal
);
const responsed_at = new Date();
const contentType = response.headers.get("content-type");
let usage: Usage;
let cs: ChatStoreMessage;
if (contentType?.startsWith("text/event-stream")) {
usage = await _completeWithStreamMode(
cs = await _completeWithStreamMode(
response,
abortControllerRef.current.signal
);
} else if (contentType?.startsWith("application/json")) {
usage = await _completeWithFetchMode(response);
cs = await _completeWithFetchMode(response);
} else {
throw `unknown response content type ${contentType}`;
}
const usage = cs.usage;
if (!usage) {
throw "panic: usage is null";
}

const completed_at = new Date();
cs.created_at = created_at.toISOString();
cs.responsed_at = responsed_at.toISOString();
cs.completed_at = completed_at.toISOString();

chatStore.history.push(cs);
console.log("new chatStore", cs);

// manually copy status from client to chatStore
chatStore.maxTokens = client.max_tokens;
chatStore.tokenMargin = client.tokens_margin;
chatStore.totalTokens = client.total_tokens;

console.log("usage", usage);
// estimate user's input message token
const aboveTokens = chatStore.history
.filter(({ hide }) => !hide)
Expand Down Expand Up @@ -407,6 +426,7 @@ export default function ChatBOX() {
logprobs: null,
response_model_name: null,
reasoning_content: null,
usage: null,
});

// manually calculate token length
Expand Down
7 changes: 6 additions & 1 deletion src/types/chatstore.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Logprobs, Message, MessageDetail, ToolCall } from "@/chatgpt";
import { Logprobs, Message, MessageDetail, ToolCall, Usage } from "@/chatgpt";

/**
* ChatStore is the main object of the chatgpt-api-web,
Expand Down Expand Up @@ -71,6 +71,11 @@ export interface ChatStoreMessage {
audio: Blob | null;
logprobs: Logprobs | null;
response_model_name: string | null;
usage: Usage | null;

created_at?: string;
responsed_at?: string;
completed_at?: string;

role: "system" | "user" | "assistant" | "tool";
content: string | MessageDetail[];
Expand Down

0 comments on commit 62bf705

Please sign in to comment.