feat(sync): full code sync from release
This commit is contained in:
55
frontend/src/utils/__tests__/openaiWsMode.spec.ts
Normal file
55
frontend/src/utils/__tests__/openaiWsMode.spec.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import {
|
||||
OPENAI_WS_MODE_DEDICATED,
|
||||
OPENAI_WS_MODE_OFF,
|
||||
OPENAI_WS_MODE_SHARED,
|
||||
isOpenAIWSModeEnabled,
|
||||
normalizeOpenAIWSMode,
|
||||
openAIWSModeFromEnabled,
|
||||
resolveOpenAIWSModeFromExtra
|
||||
} from '@/utils/openaiWsMode'
|
||||
|
||||
describe('openaiWsMode utils', () => {
|
||||
it('normalizes mode values', () => {
|
||||
expect(normalizeOpenAIWSMode('off')).toBe(OPENAI_WS_MODE_OFF)
|
||||
expect(normalizeOpenAIWSMode(' Shared ')).toBe(OPENAI_WS_MODE_SHARED)
|
||||
expect(normalizeOpenAIWSMode('DEDICATED')).toBe(OPENAI_WS_MODE_DEDICATED)
|
||||
expect(normalizeOpenAIWSMode('invalid')).toBeNull()
|
||||
})
|
||||
|
||||
it('maps legacy enabled flag to mode', () => {
|
||||
expect(openAIWSModeFromEnabled(true)).toBe(OPENAI_WS_MODE_SHARED)
|
||||
expect(openAIWSModeFromEnabled(false)).toBe(OPENAI_WS_MODE_OFF)
|
||||
expect(openAIWSModeFromEnabled('true')).toBeNull()
|
||||
})
|
||||
|
||||
it('resolves by mode key first, then enabled, then fallback enabled keys', () => {
|
||||
const extra = {
|
||||
openai_oauth_responses_websockets_v2_mode: 'dedicated',
|
||||
openai_oauth_responses_websockets_v2_enabled: false,
|
||||
responses_websockets_v2_enabled: false
|
||||
}
|
||||
const mode = resolveOpenAIWSModeFromExtra(extra, {
|
||||
modeKey: 'openai_oauth_responses_websockets_v2_mode',
|
||||
enabledKey: 'openai_oauth_responses_websockets_v2_enabled',
|
||||
fallbackEnabledKeys: ['responses_websockets_v2_enabled', 'openai_ws_enabled']
|
||||
})
|
||||
expect(mode).toBe(OPENAI_WS_MODE_DEDICATED)
|
||||
})
|
||||
|
||||
it('falls back to default when nothing is present', () => {
|
||||
const mode = resolveOpenAIWSModeFromExtra({}, {
|
||||
modeKey: 'openai_apikey_responses_websockets_v2_mode',
|
||||
enabledKey: 'openai_apikey_responses_websockets_v2_enabled',
|
||||
fallbackEnabledKeys: ['responses_websockets_v2_enabled', 'openai_ws_enabled'],
|
||||
defaultMode: OPENAI_WS_MODE_OFF
|
||||
})
|
||||
expect(mode).toBe(OPENAI_WS_MODE_OFF)
|
||||
})
|
||||
|
||||
it('treats off as disabled and shared/dedicated as enabled', () => {
|
||||
expect(isOpenAIWSModeEnabled(OPENAI_WS_MODE_OFF)).toBe(false)
|
||||
expect(isOpenAIWSModeEnabled(OPENAI_WS_MODE_SHARED)).toBe(true)
|
||||
expect(isOpenAIWSModeEnabled(OPENAI_WS_MODE_DEDICATED)).toBe(true)
|
||||
})
|
||||
})
|
||||
90
frontend/src/utils/__tests__/soraTokenParser.spec.ts
Normal file
90
frontend/src/utils/__tests__/soraTokenParser.spec.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import { parseSoraRawTokens } from '@/utils/soraTokenParser'
|
||||
|
||||
describe('parseSoraRawTokens', () => {
|
||||
it('parses sessionToken and accessToken from JSON payload', () => {
|
||||
const payload = JSON.stringify({
|
||||
user: { id: 'u1' },
|
||||
accessToken: 'at-json-1',
|
||||
sessionToken: 'st-json-1'
|
||||
})
|
||||
|
||||
const result = parseSoraRawTokens(payload)
|
||||
|
||||
expect(result.sessionTokens).toEqual(['st-json-1'])
|
||||
expect(result.accessTokens).toEqual(['at-json-1'])
|
||||
})
|
||||
|
||||
it('supports plain session tokens (one per line)', () => {
|
||||
const result = parseSoraRawTokens('st-1\nst-2')
|
||||
|
||||
expect(result.sessionTokens).toEqual(['st-1', 'st-2'])
|
||||
expect(result.accessTokens).toEqual([])
|
||||
})
|
||||
|
||||
it('supports non-standard object snippets via regex', () => {
|
||||
const raw = "sessionToken: 'st-snippet', access_token: \"at-snippet\""
|
||||
const result = parseSoraRawTokens(raw)
|
||||
|
||||
expect(result.sessionTokens).toEqual(['st-snippet'])
|
||||
expect(result.accessTokens).toEqual(['at-snippet'])
|
||||
})
|
||||
|
||||
it('keeps unique tokens and extracts JWT-like plain line as AT too', () => {
|
||||
const jwt = 'eyJhbGciOiJIUzI1NiJ9.eyJzdWIiOiIxIn0.signature'
|
||||
const raw = `st-dup\nst-dup\n${jwt}\n${JSON.stringify({ sessionToken: 'st-json', accessToken: jwt })}`
|
||||
const result = parseSoraRawTokens(raw)
|
||||
|
||||
expect(result.sessionTokens).toEqual(['st-json', 'st-dup'])
|
||||
expect(result.accessTokens).toEqual([jwt])
|
||||
})
|
||||
|
||||
it('parses session token from Set-Cookie line and strips cookie attributes', () => {
|
||||
const raw =
|
||||
'__Secure-next-auth.session-token.0=st-cookie-part-0; Domain=.chatgpt.com; Path=/; Expires=Thu, 28 May 2026 11:43:36 GMT; HttpOnly; Secure; SameSite=Lax'
|
||||
const result = parseSoraRawTokens(raw)
|
||||
|
||||
expect(result.sessionTokens).toEqual(['st-cookie-part-0'])
|
||||
expect(result.accessTokens).toEqual([])
|
||||
})
|
||||
|
||||
it('merges chunked session-token cookies by numeric suffix order', () => {
|
||||
const raw = [
|
||||
'Set-Cookie: __Secure-next-auth.session-token.1=part-1; Path=/; HttpOnly',
|
||||
'Set-Cookie: __Secure-next-auth.session-token.0=part-0; Path=/; HttpOnly'
|
||||
].join('\n')
|
||||
const result = parseSoraRawTokens(raw)
|
||||
|
||||
expect(result.sessionTokens).toEqual(['part-0part-1'])
|
||||
expect(result.accessTokens).toEqual([])
|
||||
})
|
||||
|
||||
it('prefers latest duplicate chunk values when multiple cookie groups exist', () => {
|
||||
const raw = [
|
||||
'Set-Cookie: __Secure-next-auth.session-token.0=old-0; Path=/; HttpOnly',
|
||||
'Set-Cookie: __Secure-next-auth.session-token.1=old-1; Path=/; HttpOnly',
|
||||
'Set-Cookie: __Secure-next-auth.session-token.0=new-0; Path=/; HttpOnly',
|
||||
'Set-Cookie: __Secure-next-auth.session-token.1=new-1; Path=/; HttpOnly'
|
||||
].join('\n')
|
||||
const result = parseSoraRawTokens(raw)
|
||||
|
||||
expect(result.sessionTokens).toEqual(['new-0new-1'])
|
||||
expect(result.accessTokens).toEqual([])
|
||||
})
|
||||
|
||||
it('uses latest complete chunk group and ignores incomplete latest group', () => {
|
||||
const raw = [
|
||||
'set-cookie',
|
||||
'__Secure-next-auth.session-token.0=ok-0; Domain=.chatgpt.com; Path=/',
|
||||
'set-cookie',
|
||||
'__Secure-next-auth.session-token.1=ok-1; Domain=.chatgpt.com; Path=/',
|
||||
'set-cookie',
|
||||
'__Secure-next-auth.session-token.0=partial-0; Domain=.chatgpt.com; Path=/'
|
||||
].join('\n')
|
||||
|
||||
const result = parseSoraRawTokens(raw)
|
||||
|
||||
expect(result.sessionTokens).toEqual(['ok-0ok-1'])
|
||||
expect(result.accessTokens).toEqual([])
|
||||
})
|
||||
})
|
||||
61
frontend/src/utils/openaiWsMode.ts
Normal file
61
frontend/src/utils/openaiWsMode.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
export const OPENAI_WS_MODE_OFF = 'off'
|
||||
export const OPENAI_WS_MODE_SHARED = 'shared'
|
||||
export const OPENAI_WS_MODE_DEDICATED = 'dedicated'
|
||||
|
||||
export type OpenAIWSMode =
|
||||
| typeof OPENAI_WS_MODE_OFF
|
||||
| typeof OPENAI_WS_MODE_SHARED
|
||||
| typeof OPENAI_WS_MODE_DEDICATED
|
||||
|
||||
const OPENAI_WS_MODES = new Set<OpenAIWSMode>([
|
||||
OPENAI_WS_MODE_OFF,
|
||||
OPENAI_WS_MODE_SHARED,
|
||||
OPENAI_WS_MODE_DEDICATED
|
||||
])
|
||||
|
||||
export interface ResolveOpenAIWSModeOptions {
|
||||
modeKey: string
|
||||
enabledKey: string
|
||||
fallbackEnabledKeys?: string[]
|
||||
defaultMode?: OpenAIWSMode
|
||||
}
|
||||
|
||||
export const normalizeOpenAIWSMode = (mode: unknown): OpenAIWSMode | null => {
|
||||
if (typeof mode !== 'string') return null
|
||||
const normalized = mode.trim().toLowerCase()
|
||||
if (OPENAI_WS_MODES.has(normalized as OpenAIWSMode)) {
|
||||
return normalized as OpenAIWSMode
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
export const openAIWSModeFromEnabled = (enabled: unknown): OpenAIWSMode | null => {
|
||||
if (typeof enabled !== 'boolean') return null
|
||||
return enabled ? OPENAI_WS_MODE_SHARED : OPENAI_WS_MODE_OFF
|
||||
}
|
||||
|
||||
export const isOpenAIWSModeEnabled = (mode: OpenAIWSMode): boolean => {
|
||||
return mode !== OPENAI_WS_MODE_OFF
|
||||
}
|
||||
|
||||
export const resolveOpenAIWSModeFromExtra = (
|
||||
extra: Record<string, unknown> | null | undefined,
|
||||
options: ResolveOpenAIWSModeOptions
|
||||
): OpenAIWSMode => {
|
||||
const fallback = options.defaultMode ?? OPENAI_WS_MODE_OFF
|
||||
if (!extra) return fallback
|
||||
|
||||
const mode = normalizeOpenAIWSMode(extra[options.modeKey])
|
||||
if (mode) return mode
|
||||
|
||||
const enabledMode = openAIWSModeFromEnabled(extra[options.enabledKey])
|
||||
if (enabledMode) return enabledMode
|
||||
|
||||
const fallbackKeys = options.fallbackEnabledKeys ?? []
|
||||
for (const key of fallbackKeys) {
|
||||
const modeFromFallbackKey = openAIWSModeFromEnabled(extra[key])
|
||||
if (modeFromFallbackKey) return modeFromFallbackKey
|
||||
}
|
||||
|
||||
return fallback
|
||||
}
|
||||
308
frontend/src/utils/soraTokenParser.ts
Normal file
308
frontend/src/utils/soraTokenParser.ts
Normal file
@@ -0,0 +1,308 @@
|
||||
export interface ParsedSoraTokens {
|
||||
sessionTokens: string[]
|
||||
accessTokens: string[]
|
||||
}
|
||||
|
||||
const sessionKeyNames = new Set(['sessiontoken', 'session_token', 'st'])
|
||||
const accessKeyNames = new Set(['accesstoken', 'access_token', 'at'])
|
||||
|
||||
const sessionRegexes = [
|
||||
/\bsessionToken\b\s*:\s*["']([^"']+)["']/gi,
|
||||
/\bsession_token\b\s*:\s*["']([^"']+)["']/gi
|
||||
]
|
||||
|
||||
const accessRegexes = [
|
||||
/\baccessToken\b\s*:\s*["']([^"']+)["']/gi,
|
||||
/\baccess_token\b\s*:\s*["']([^"']+)["']/gi
|
||||
]
|
||||
|
||||
const sessionCookieRegex =
|
||||
/(?:^|[\n\r;])\s*(?:(?:set-cookie|cookie)\s*:\s*)?__Secure-(?:next-auth|authjs)\.session-token(?:\.(\d+))?=([^;\r\n]+)/gi
|
||||
|
||||
interface SessionCookieChunk {
|
||||
index: number
|
||||
value: string
|
||||
}
|
||||
|
||||
const ignoredPlainLines = new Set([
|
||||
'set-cookie',
|
||||
'cookie',
|
||||
'strict-transport-security',
|
||||
'vary',
|
||||
'x-content-type-options',
|
||||
'x-openai-proxy-wasm'
|
||||
])
|
||||
|
||||
function sanitizeToken(raw: string): string {
|
||||
return raw.trim().replace(/^["'`]+|["'`,;]+$/g, '')
|
||||
}
|
||||
|
||||
function addUnique(list: string[], seen: Set<string>, rawValue: string): void {
|
||||
const token = sanitizeToken(rawValue)
|
||||
if (!token || seen.has(token)) {
|
||||
return
|
||||
}
|
||||
seen.add(token)
|
||||
list.push(token)
|
||||
}
|
||||
|
||||
function isLikelyJWT(token: string): boolean {
|
||||
if (!token.startsWith('eyJ')) {
|
||||
return false
|
||||
}
|
||||
return token.split('.').length === 3
|
||||
}
|
||||
|
||||
function collectFromObject(
|
||||
value: unknown,
|
||||
sessionTokens: string[],
|
||||
sessionSeen: Set<string>,
|
||||
accessTokens: string[],
|
||||
accessSeen: Set<string>
|
||||
): void {
|
||||
if (Array.isArray(value)) {
|
||||
for (const item of value) {
|
||||
collectFromObject(item, sessionTokens, sessionSeen, accessTokens, accessSeen)
|
||||
}
|
||||
return
|
||||
}
|
||||
if (!value || typeof value !== 'object') {
|
||||
return
|
||||
}
|
||||
|
||||
for (const [key, fieldValue] of Object.entries(value as Record<string, unknown>)) {
|
||||
if (typeof fieldValue === 'string') {
|
||||
const normalizedKey = key.toLowerCase()
|
||||
if (sessionKeyNames.has(normalizedKey)) {
|
||||
addUnique(sessionTokens, sessionSeen, fieldValue)
|
||||
}
|
||||
if (accessKeyNames.has(normalizedKey)) {
|
||||
addUnique(accessTokens, accessSeen, fieldValue)
|
||||
}
|
||||
continue
|
||||
}
|
||||
collectFromObject(fieldValue, sessionTokens, sessionSeen, accessTokens, accessSeen)
|
||||
}
|
||||
}
|
||||
|
||||
function collectFromJSONString(
|
||||
raw: string,
|
||||
sessionTokens: string[],
|
||||
sessionSeen: Set<string>,
|
||||
accessTokens: string[],
|
||||
accessSeen: Set<string>
|
||||
): void {
|
||||
const trimmed = raw.trim()
|
||||
if (!trimmed) {
|
||||
return
|
||||
}
|
||||
|
||||
const candidates = [trimmed]
|
||||
const firstBrace = trimmed.indexOf('{')
|
||||
const lastBrace = trimmed.lastIndexOf('}')
|
||||
if (firstBrace >= 0 && lastBrace > firstBrace) {
|
||||
candidates.push(trimmed.slice(firstBrace, lastBrace + 1))
|
||||
}
|
||||
|
||||
for (const candidate of candidates) {
|
||||
try {
|
||||
const parsed = JSON.parse(candidate)
|
||||
collectFromObject(parsed, sessionTokens, sessionSeen, accessTokens, accessSeen)
|
||||
return
|
||||
} catch {
|
||||
// ignore and keep trying other candidates
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function collectByRegex(
|
||||
raw: string,
|
||||
regexes: RegExp[],
|
||||
tokens: string[],
|
||||
seen: Set<string>
|
||||
): void {
|
||||
for (const regex of regexes) {
|
||||
regex.lastIndex = 0
|
||||
let match: RegExpExecArray | null
|
||||
match = regex.exec(raw)
|
||||
while (match) {
|
||||
if (match[1]) {
|
||||
addUnique(tokens, seen, match[1])
|
||||
}
|
||||
match = regex.exec(raw)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function collectFromSessionCookies(
|
||||
raw: string,
|
||||
sessionTokens: string[],
|
||||
sessionSeen: Set<string>
|
||||
): void {
|
||||
const chunkMatches: SessionCookieChunk[] = []
|
||||
const singleValues: string[] = []
|
||||
|
||||
sessionCookieRegex.lastIndex = 0
|
||||
let match: RegExpExecArray | null
|
||||
match = sessionCookieRegex.exec(raw)
|
||||
while (match) {
|
||||
const chunkIndex = match[1]
|
||||
const rawValue = match[2]
|
||||
const value = sanitizeToken(rawValue || '')
|
||||
if (value) {
|
||||
if (chunkIndex !== undefined && chunkIndex !== '') {
|
||||
const idx = Number.parseInt(chunkIndex, 10)
|
||||
if (Number.isInteger(idx) && idx >= 0) {
|
||||
chunkMatches.push({ index: idx, value })
|
||||
}
|
||||
} else {
|
||||
singleValues.push(value)
|
||||
}
|
||||
}
|
||||
match = sessionCookieRegex.exec(raw)
|
||||
}
|
||||
|
||||
const mergedChunkToken = mergeLatestChunkedSessionToken(chunkMatches)
|
||||
if (mergedChunkToken) {
|
||||
addUnique(sessionTokens, sessionSeen, mergedChunkToken)
|
||||
}
|
||||
|
||||
for (const value of singleValues) {
|
||||
addUnique(sessionTokens, sessionSeen, value)
|
||||
}
|
||||
}
|
||||
|
||||
function mergeChunkSegment(
|
||||
chunks: SessionCookieChunk[],
|
||||
requiredMaxIndex: number,
|
||||
requireComplete: boolean
|
||||
): string {
|
||||
if (chunks.length === 0) {
|
||||
return ''
|
||||
}
|
||||
|
||||
const byIndex = new Map<number, string>()
|
||||
for (const chunk of chunks) {
|
||||
byIndex.set(chunk.index, chunk.value)
|
||||
}
|
||||
|
||||
if (!byIndex.has(0)) {
|
||||
return ''
|
||||
}
|
||||
if (requireComplete) {
|
||||
for (let i = 0; i <= requiredMaxIndex; i++) {
|
||||
if (!byIndex.has(i)) {
|
||||
return ''
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const orderedIndexes = Array.from(byIndex.keys()).sort((a, b) => a - b)
|
||||
return orderedIndexes.map((idx) => byIndex.get(idx) || '').join('')
|
||||
}
|
||||
|
||||
function mergeLatestChunkedSessionToken(chunks: SessionCookieChunk[]): string {
|
||||
if (chunks.length === 0) {
|
||||
return ''
|
||||
}
|
||||
|
||||
const requiredMaxIndex = chunks.reduce((max, chunk) => Math.max(max, chunk.index), 0)
|
||||
|
||||
const groupStarts: number[] = []
|
||||
chunks.forEach((chunk, idx) => {
|
||||
if (chunk.index === 0) {
|
||||
groupStarts.push(idx)
|
||||
}
|
||||
})
|
||||
|
||||
if (groupStarts.length === 0) {
|
||||
return mergeChunkSegment(chunks, requiredMaxIndex, false)
|
||||
}
|
||||
|
||||
for (let i = groupStarts.length - 1; i >= 0; i--) {
|
||||
const start = groupStarts[i]
|
||||
const end = i + 1 < groupStarts.length ? groupStarts[i + 1] : chunks.length
|
||||
const merged = mergeChunkSegment(chunks.slice(start, end), requiredMaxIndex, true)
|
||||
if (merged) {
|
||||
return merged
|
||||
}
|
||||
}
|
||||
|
||||
return mergeChunkSegment(chunks, requiredMaxIndex, false)
|
||||
}
|
||||
|
||||
function collectPlainLines(
|
||||
raw: string,
|
||||
sessionTokens: string[],
|
||||
sessionSeen: Set<string>,
|
||||
accessTokens: string[],
|
||||
accessSeen: Set<string>
|
||||
): void {
|
||||
const lines = raw
|
||||
.split('\n')
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0)
|
||||
|
||||
for (const line of lines) {
|
||||
const normalized = line.toLowerCase()
|
||||
if (ignoredPlainLines.has(normalized)) {
|
||||
continue
|
||||
}
|
||||
if (/^__secure-(next-auth|authjs)\.session-token(\.\d+)?=/i.test(line)) {
|
||||
continue
|
||||
}
|
||||
if (line.includes(';')) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (/^[a-zA-Z_][a-zA-Z0-9_]*=/.test(line)) {
|
||||
const parts = line.split('=', 2)
|
||||
const key = parts[0]?.trim().toLowerCase()
|
||||
const value = parts[1]?.trim() || ''
|
||||
if (key && sessionKeyNames.has(key)) {
|
||||
addUnique(sessionTokens, sessionSeen, value)
|
||||
continue
|
||||
}
|
||||
if (key && accessKeyNames.has(key)) {
|
||||
addUnique(accessTokens, accessSeen, value)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if (line.includes('{') || line.includes('}') || line.includes(':') || /\s/.test(line)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (isLikelyJWT(line)) {
|
||||
addUnique(accessTokens, accessSeen, line)
|
||||
continue
|
||||
}
|
||||
addUnique(sessionTokens, sessionSeen, line)
|
||||
}
|
||||
}
|
||||
|
||||
export function parseSoraRawTokens(rawInput: string): ParsedSoraTokens {
|
||||
const raw = rawInput.trim()
|
||||
if (!raw) {
|
||||
return {
|
||||
sessionTokens: [],
|
||||
accessTokens: []
|
||||
}
|
||||
}
|
||||
|
||||
const sessionTokens: string[] = []
|
||||
const accessTokens: string[] = []
|
||||
const sessionSeen = new Set<string>()
|
||||
const accessSeen = new Set<string>()
|
||||
|
||||
collectFromJSONString(raw, sessionTokens, sessionSeen, accessTokens, accessSeen)
|
||||
collectByRegex(raw, sessionRegexes, sessionTokens, sessionSeen)
|
||||
collectByRegex(raw, accessRegexes, accessTokens, accessSeen)
|
||||
collectFromSessionCookies(raw, sessionTokens, sessionSeen)
|
||||
collectPlainLines(raw, sessionTokens, sessionSeen, accessTokens, accessSeen)
|
||||
|
||||
return {
|
||||
sessionTokens,
|
||||
accessTokens
|
||||
}
|
||||
}
|
||||
33
frontend/src/utils/usageRequestType.ts
Normal file
33
frontend/src/utils/usageRequestType.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import type { UsageRequestType } from '@/types'
|
||||
|
||||
export interface UsageRequestTypeLike {
|
||||
request_type?: string | null
|
||||
stream?: boolean | null
|
||||
openai_ws_mode?: boolean | null
|
||||
}
|
||||
|
||||
const VALID_REQUEST_TYPES = new Set<UsageRequestType>(['unknown', 'sync', 'stream', 'ws_v2'])
|
||||
|
||||
export const isUsageRequestType = (value: unknown): value is UsageRequestType => {
|
||||
return typeof value === 'string' && VALID_REQUEST_TYPES.has(value as UsageRequestType)
|
||||
}
|
||||
|
||||
export const resolveUsageRequestType = (value: UsageRequestTypeLike): UsageRequestType => {
|
||||
if (isUsageRequestType(value.request_type)) {
|
||||
return value.request_type
|
||||
}
|
||||
if (value.openai_ws_mode) {
|
||||
return 'ws_v2'
|
||||
}
|
||||
return value.stream ? 'stream' : 'sync'
|
||||
}
|
||||
|
||||
export const requestTypeToLegacyStream = (requestType?: UsageRequestType | null): boolean | null | undefined => {
|
||||
if (!requestType || requestType === 'unknown') {
|
||||
return null
|
||||
}
|
||||
if (requestType === 'sync') {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
Reference in New Issue
Block a user