kopia lustrzana https://github.com/transitive-bullshit/chatgpt-api
old-agentic-v1^2
rodzic
ae60410cc1
commit
3dbc154e6a
|
@ -2,27 +2,37 @@ import { OpenAIClient } from '@agentic/openai-fetch'
|
|||
import 'dotenv/config'
|
||||
import { z } from 'zod'
|
||||
|
||||
import { Agentic, SearchAndCrawlTool } from '@/index'
|
||||
import { Agentic, SearchAndCrawlTool, WeatherTool } from '@/index'
|
||||
|
||||
async function main() {
|
||||
const openai = new OpenAIClient({ apiKey: process.env.OPENAI_API_KEY! })
|
||||
const agentic = new Agentic({ openai })
|
||||
|
||||
const topic = process.argv[2] || 'OpenAI'
|
||||
|
||||
const res = await agentic
|
||||
.gpt4(`Summarize the latest news on {{topic}} using markdown.`)
|
||||
.modelParams({
|
||||
.gpt4({
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
content: `You are a MckInsey analyst who is an expert at writing executive summaries. Always respond using markdown unless instructed to respond using JSON.`
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: `Summarize the latest news on: {{topic}}`
|
||||
}
|
||||
],
|
||||
model: 'gpt-4-32k'
|
||||
})
|
||||
.tools([new SearchAndCrawlTool()])
|
||||
.tools([new SearchAndCrawlTool(), new WeatherTool()])
|
||||
.input(
|
||||
z.object({
|
||||
topic: z.string()
|
||||
})
|
||||
)
|
||||
.call({
|
||||
topic: 'OpenAI'
|
||||
})
|
||||
.call({ topic })
|
||||
|
||||
console.log('\n\n\n')
|
||||
console.log(res)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import defaultKy from 'ky'
|
||||
import { SetOptional } from 'type-fest'
|
||||
|
||||
import * as types from './types'
|
||||
import { DEFAULT_OPENAI_MODEL } from './constants'
|
||||
|
@ -55,7 +56,7 @@ export class Agentic {
|
|||
modelParams: {},
|
||||
timeoutMs: 2 * 60000,
|
||||
retryConfig: {
|
||||
retries: 3,
|
||||
retries: 2,
|
||||
strategy: 'heal',
|
||||
...opts.openaiModelDefaults?.retryConfig
|
||||
},
|
||||
|
@ -103,14 +104,17 @@ export class Agentic {
|
|||
}
|
||||
|
||||
openaiChatCompletion(
|
||||
promptOrChatCompletionParams:
|
||||
| string
|
||||
| Partial<types.openai.ChatCompletionParams> // TODO: make more strict
|
||||
promptOrChatCompletionParams: string | types.openai.ChatCompletionParams, // TODO: make more strict?
|
||||
modelParams?: SetOptional<
|
||||
types.openai.ChatCompletionParams,
|
||||
'model' | 'messages'
|
||||
>
|
||||
) {
|
||||
let options: Partial<types.openai.ChatCompletionParams>
|
||||
|
||||
if (typeof promptOrChatCompletionParams === 'string') {
|
||||
options = {
|
||||
...modelParams,
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
|
@ -119,7 +123,7 @@ export class Agentic {
|
|||
]
|
||||
}
|
||||
} else {
|
||||
options = promptOrChatCompletionParams
|
||||
options = { ...promptOrChatCompletionParams, ...modelParams }
|
||||
|
||||
if (!options.messages) {
|
||||
throw new Error('messages must be provided')
|
||||
|
@ -139,12 +143,17 @@ export class Agentic {
|
|||
gpt3(
|
||||
promptOrChatCompletionParams:
|
||||
| string
|
||||
| Omit<types.openai.ChatCompletionParams, 'model'>
|
||||
| SetOptional<types.openai.ChatCompletionParams, 'model'>,
|
||||
modelParams?: SetOptional<
|
||||
types.openai.ChatCompletionParams,
|
||||
'model' | 'messages'
|
||||
>
|
||||
) {
|
||||
let options: Omit<types.openai.ChatCompletionParams, 'model'>
|
||||
let options: SetOptional<types.openai.ChatCompletionParams, 'model'>
|
||||
|
||||
if (typeof promptOrChatCompletionParams === 'string') {
|
||||
options = {
|
||||
...modelParams,
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
|
@ -153,7 +162,7 @@ export class Agentic {
|
|||
]
|
||||
}
|
||||
} else {
|
||||
options = promptOrChatCompletionParams
|
||||
options = { ...promptOrChatCompletionParams, ...modelParams }
|
||||
|
||||
if (!options.messages) {
|
||||
throw new Error('messages must be provided')
|
||||
|
@ -174,12 +183,17 @@ export class Agentic {
|
|||
gpt4(
|
||||
promptOrChatCompletionParams:
|
||||
| string
|
||||
| Omit<types.openai.ChatCompletionParams, 'model'>
|
||||
| SetOptional<types.openai.ChatCompletionParams, 'model'>,
|
||||
modelParams?: SetOptional<
|
||||
types.openai.ChatCompletionParams,
|
||||
'model' | 'messages'
|
||||
>
|
||||
) {
|
||||
let options: Omit<types.openai.ChatCompletionParams, 'model'>
|
||||
let options: SetOptional<types.openai.ChatCompletionParams, 'model'>
|
||||
|
||||
if (typeof promptOrChatCompletionParams === 'string') {
|
||||
options = {
|
||||
...modelParams,
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
|
@ -188,7 +202,7 @@ export class Agentic {
|
|||
]
|
||||
}
|
||||
} else {
|
||||
options = promptOrChatCompletionParams
|
||||
options = { ...promptOrChatCompletionParams, ...modelParams }
|
||||
|
||||
if (!options.messages) {
|
||||
throw new Error('messages must be provided')
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import defaultKy from 'ky'
|
||||
import { AbortError } from 'p-retry'
|
||||
import pThrottle from 'p-throttle'
|
||||
import { P } from 'pino'
|
||||
|
||||
|
@ -337,7 +338,7 @@ export class DiffbotClient {
|
|||
apiKey = process.env.DIFFBOT_API_KEY,
|
||||
apiBaseUrl = DIFFBOT_API_BASE_URL,
|
||||
apiKnowledgeGraphBaseUrl = DIFFBOT_KNOWLEDGE_GRAPH_API_BASE_URL,
|
||||
timeoutMs = 60_000,
|
||||
timeoutMs = 30_000,
|
||||
ky = defaultKy
|
||||
}: {
|
||||
apiKey?: string
|
||||
|
@ -388,13 +389,24 @@ export class DiffbotClient {
|
|||
}
|
||||
}
|
||||
|
||||
console.log(`DiffbotClient._extract: ${endpoint}`, searchParams)
|
||||
// TODO
|
||||
const { url } = searchParams
|
||||
if (url) {
|
||||
const parsedUrl = new URL(url)
|
||||
if (parsedUrl.hostname.includes('theguardian.com')) {
|
||||
throw new AbortError(
|
||||
`Diffbot does not support URLs from domain "${parsedUrl.hostname}"`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// console.log(`DiffbotClient._extract: ${endpoint}`, searchParams)
|
||||
|
||||
return this.api
|
||||
.get(endpoint, {
|
||||
searchParams,
|
||||
headers,
|
||||
retry: 2
|
||||
retry: 1
|
||||
})
|
||||
.json<T>()
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ import { DiffbotClient } from '@/services'
|
|||
|
||||
import { isCI, ky } from '../_utils'
|
||||
|
||||
test('Diffbot.extractAnalyze', async (t) => {
|
||||
test('Diffbot.extractAnalyze - transitivebullsh.it', async (t) => {
|
||||
if (!process.env.DIFFBOT_API_KEY || isCI) {
|
||||
return t.pass()
|
||||
}
|
||||
|
@ -20,6 +20,23 @@ test('Diffbot.extractAnalyze', async (t) => {
|
|||
t.is(result.objects?.length, 1)
|
||||
})
|
||||
|
||||
// TODO
|
||||
// test.only('Diffbot.extractAnalyze - theguardian.com/world/ukraine', async (t) => {
|
||||
// if (!process.env.DIFFBOT_API_KEY || isCI) {
|
||||
// return t.pass()
|
||||
// }
|
||||
|
||||
// t.timeout(2 * 60 * 1000)
|
||||
// const client = new DiffbotClient({ ky })
|
||||
|
||||
// const result = await client.extractAnalyze({
|
||||
// url: 'https://www.theguardian.com/world/ukraine'
|
||||
// })
|
||||
// console.log(JSON.stringify(result, null, 2))
|
||||
// t.is(result.type, 'list')
|
||||
// t.is(result.objects?.length, 1)
|
||||
// })
|
||||
|
||||
test('Diffbot.extractArticle', async (t) => {
|
||||
if (!process.env.DIFFBOT_API_KEY || isCI) {
|
||||
return t.pass()
|
||||
|
|
Ładowanie…
Reference in New Issue