Skip to content

Commit

Permalink
feat: use gpt-4o-mini
Browse files Browse the repository at this point in the history
  • Loading branch information
DIYgod committed Sep 5, 2024
1 parent d1b2f26 commit 7b22359
Show file tree
Hide file tree
Showing 5 changed files with 9 additions and 148 deletions.
2 changes: 1 addition & 1 deletion src/app/api/score/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ ${content}
--------`,
},
],
model: "gpt-3.5-turbo",
model: "gpt-4o-mini",
temperature: 0,
response_format: { type: "json_object" },
})
Expand Down
33 changes: 7 additions & 26 deletions src/app/api/translate-note/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ import { Metadata } from "@prisma/client"
import { languageNames } from "~/i18n"
import { detectLanguage } from "~/lib/detect-lang"
import { toGateway } from "~/lib/ipfs-parser"
import { llmModelSwitcherByTextLength } from "~/lib/llm-model-switcher-by-text-length"
import prisma from "~/lib/prisma.server"
import { cacheGet } from "~/lib/redis.server"
import { getQuery, NextServerResponse } from "~/lib/server-helper"
Expand All @@ -21,22 +20,17 @@ type ContentTranslation = {
content?: string
}

let translationModel4K: OpenAI | undefined
let translationModel16K: OpenAI | undefined
let openai: OpenAI | undefined
if (process.env.OPENAI_API_KEY) {
const options = {
openAIApiKey: process.env.OPENAI_API_KEY,
temperature: 0.2,
maxTokens: -1,
}
translationModel4K = new OpenAI({ ...options, modelName: "gpt-3.5-turbo" })
translationModel16K = new OpenAI({
...options,
modelName: "gpt-3.5-turbo-16k",
})
openai = new OpenAI({ ...options, modelName: "gpt-4o-mini" })
}

type ChainKeyType = `${4 | 16}k_${Language}` // e.g. "4k_en" | "4k_zh" | "4k_zh-TW" | "4k_ja" | "16k_en" | "16k_zh" | "16k_zh-TW" | "16k_ja"
type ChainKeyType = Language // e.g. "4k_en" | "4k_zh" | "4k_zh-TW" | "4k_ja" | "16k_en" | "16k_zh" | "16k_zh-TW" | "16k_ja"
const translationChains = new Map<ChainKeyType, LLMChain>()

const getOriginalTranslation = async ({
Expand All @@ -50,7 +44,7 @@ const getOriginalTranslation = async ({
}): Promise<ContentTranslation | undefined> => {
if (fromLang === toLang) return

if (!translationModel4K || !translationModel16K) return
if (!openai) return

try {
const { title, content } = await (
Expand All @@ -60,18 +54,8 @@ const getOriginalTranslation = async ({
if (!fromLang && detectLanguage(title + content) === toLang) return

console.time(`fetching translation ${cid}, ${toLang}`)
const { modelSize, tokens } = llmModelSwitcherByTextLength(content, {
includeResponse: { lang: toLang },
})

if (!modelSize) {
console.error(
`|__ Error: Content too long for translation: ${cid}, ${toLang}. (Tokens: ${tokens})`,
)
return
}

let chain = translationChains.get(`${modelSize}_${toLang}`)
let chain = translationChains.get(toLang)

if (!chain) {
const template = `
Expand Down Expand Up @@ -101,12 +85,9 @@ Translate the following text to ${languageNames[toLang]} language:
inputVariables: ["text"],
})

const translateModel =
modelSize === "4k" ? translationModel4K : translationModel16K

chain = new LLMChain({ llm: translateModel, prompt })
chain = new LLMChain({ llm: openai, prompt })

translationChains.set(`${modelSize}_${toLang}`, chain)
translationChains.set(toLang, chain)
}

const t =
Expand Down
66 changes: 0 additions & 66 deletions src/lib/calculate-tokens.ts

This file was deleted.

54 changes: 0 additions & 54 deletions src/lib/llm-model-switcher-by-text-length.ts

This file was deleted.

2 changes: 1 addition & 1 deletion src/queries/page.server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ let model: OpenAI | undefined
if (process.env.OPENAI_API_KEY) {
model = new OpenAI({
openAIApiKey: process.env.OPENAI_API_KEY,
modelName: "gpt-3.5-turbo",
modelName: "gpt-4o-mini",
temperature: 0.3,
maxTokens: 400,
})
Expand Down

0 comments on commit 7b22359

Please sign in to comment.