feat: add anthropic claude support

old-agentic-v1^2
Travis Fischer 2023-06-01 00:51:34 -07:00
rodzic cbacbc8bc5
commit f59bcafbbe
7 zmienionych plików z 199 dodań i 2 usunięć

Wyświetl plik

@ -36,6 +36,7 @@
"test:prettier": "prettier '**/*.{js,jsx,ts,tsx}' --check"
},
"dependencies": {
"@anthropic-ai/sdk": "^0.4.3",
"handlebars": "^4.7.7",
"js-tiktoken": "^1.0.6",
"jsonrepair": "^3.1.0",

Wyświetl plik

@ -1,6 +1,9 @@
lockfileVersion: '6.0'
dependencies:
'@anthropic-ai/sdk':
specifier: ^0.4.3
version: 0.4.3
handlebars:
specifier: ^4.7.7
version: 4.7.7
@ -105,6 +108,15 @@ devDependencies:
packages:
/@anthropic-ai/sdk@0.4.3:
resolution: {integrity: sha512-SZrlXvjUUYT9rPmSzlTtmVk1OjVNpkCzILRluhiYwNcxXfQyvPJDi0CI6PyymygcgtqEF5EVqhKmC/PtPsNEIw==}
dependencies:
'@fortaine/fetch-event-source': 3.0.6
cross-fetch: 3.1.6
transitivePeerDependencies:
- encoding
dev: false
/@babel/code-frame@7.21.4:
resolution: {integrity: sha512-LYvhNKfwWSPpocw8GI7gpK2nq3HSDuEPC/uSYaALSJu9xjsalaaYFOq0Pwt5KmVqwEbZlDu81aLXwBOmD/Fv9g==}
engines: {node: '>=6.9.0'}
@ -438,6 +450,11 @@ packages:
dev: true
optional: true
/@fortaine/fetch-event-source@3.0.6:
resolution: {integrity: sha512-621GAuLMvKtyZQ3IA6nlDWhV1V/7PGOTNIGLUifxt0KzM+dZIweJ6F3XvQF3QnqeNfS1N7WQ0Kil1Di/lhChEw==}
engines: {node: '>=16.15'}
dev: false
/@ioredis/commands@1.2.0:
resolution: {integrity: sha512-Sx1pU8EM64o2BrqNpEO1CNLtKQwyhuXuqyfH7oGKCk+1a33d2r5saW8zNwm3j6BTExtjrv2BxTgzzkMwts6vGg==}
dev: true
@ -1007,6 +1024,14 @@ packages:
engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
dev: true
/cross-fetch@3.1.6:
resolution: {integrity: sha512-riRvo06crlE8HiqOwIpQhxwdOk4fOeR7FVM/wXoxchFEqMNUjvbs3bfo4OTgMEMHzppd4DxFBDbyySj8Cv781g==}
dependencies:
node-fetch: 2.6.11
transitivePeerDependencies:
- encoding
dev: false
/cross-spawn@6.0.5:
resolution: {integrity: sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==}
engines: {node: '>=4.8'}
@ -2199,6 +2224,18 @@ packages:
path-to-regexp: 1.8.0
dev: true
/node-fetch@2.6.11:
resolution: {integrity: sha512-4I6pdBY1EthSqDmJkiNk3JIT8cswwR9nfeW/cPdUagJYEQG7R95WRH74wpz7ma8Gh/9dI9FP+OU+0E4FvtA55w==}
engines: {node: 4.x || >=6.0.0}
peerDependencies:
encoding: ^0.1.0
peerDependenciesMeta:
encoding:
optional: true
dependencies:
whatwg-url: 5.0.0
dev: false
/nofilter@3.1.0:
resolution: {integrity: sha512-l2NNj07e9afPnhAhvgVrCD/oy2Ai1yfLpuo3EpiO1jFTsB4sFz6oIfAfSZyQzVpkZQ9xS8ZS5g1jCBgq4Hwo0g==}
engines: {node: '>=12.19'}
@ -3047,6 +3084,10 @@ packages:
is-number: 7.0.0
dev: true
/tr46@0.0.3:
resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==}
dev: false
/tr46@1.0.1:
resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==}
dependencies:
@ -3185,6 +3226,10 @@ packages:
spdx-expression-parse: 3.0.1
dev: true
/webidl-conversions@3.0.1:
resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==}
dev: false
/webidl-conversions@4.0.2:
resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==}
dev: true
@ -3194,6 +3239,13 @@ packages:
engines: {node: '>=6'}
dev: true
/whatwg-url@5.0.0:
resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==}
dependencies:
tr46: 0.0.3
webidl-conversions: 3.0.1
dev: false
/whatwg-url@7.1.0:
resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==}
dependencies:

Wyświetl plik

@ -0,0 +1,83 @@
import * as anthropic from '@anthropic-ai/sdk'
import { type SetOptional } from 'type-fest'
import { ZodTypeAny, z } from 'zod'
import * as types from './types'
import { defaultAnthropicModel } from './constants'
import { BaseChatModelBuilder } from './llm'
const defaultStopSequences = [anthropic.HUMAN_PROMPT]
export class AnthropicChatModelBuilder<
TInput extends ZodTypeAny = ZodTypeAny,
TOutput extends ZodTypeAny = z.ZodType<string>
> extends BaseChatModelBuilder<
TInput,
TOutput,
SetOptional<
Omit<anthropic.SamplingParameters, 'prompt'>,
'model' | 'max_tokens_to_sample' | 'stop_sequences'
>,
anthropic.CompletionResponse
> {
_client: anthropic.Client
constructor(
client: anthropic.Client,
options: types.ChatModelOptions<
TInput,
TOutput,
SetOptional<
Omit<anthropic.SamplingParameters, 'prompt'>,
'model' | 'max_tokens_to_sample' | 'stop_sequences'
>
>
) {
super({
provider: 'anthropic',
model: options.modelParams?.model || defaultAnthropicModel,
...options
})
this._client = client
}
protected override async _createChatCompletion(
messages: types.ChatMessage[]
): Promise<types.BaseChatCompletionResponse<anthropic.CompletionResponse>> {
const prompt =
messages
.map((message) => {
switch (message.role) {
case 'user':
return `${anthropic.HUMAN_PROMPT} ${message.content}`
case 'assistant':
return `${anthropic.AI_PROMPT} ${message.content}`
default:
return message.content
}
})
.join('') + anthropic.AI_PROMPT
// TODO: support streaming
// TODO: support max_tokens_to_sample
// TODO: support stop_sequences correctly
// TODO: handle errors gracefully
const response = await this._client.complete({
stop_sequences: defaultStopSequences,
max_tokens_to_sample: 200, // TODO
...this._modelParams,
model: this._model,
prompt
})
return {
message: {
role: 'assistant',
content: response.completion
},
response
}
}
}

Wyświetl plik

@ -1 +1,2 @@
export const defaultOpenAIModel = 'gpt-3.5-turbo'
export const defaultAnthropicModel = 'claude-instant-v1'

Wyświetl plik

@ -2,6 +2,7 @@ export * from './agentic'
export * from './task'
export * from './llm'
export * from './openai'
export * from './anthropic'
export * from './tokenizer'
export * from './services/metaphor'

Wyświetl plik

@ -1,3 +1,4 @@
import * as anthropic from '@anthropic-ai/sdk'
import KeyvRedis from '@keyv/redis'
import 'dotenv/config'
import hashObject from 'hash-obj'
@ -7,6 +8,8 @@ import { OpenAIClient } from 'openai-fetch'
import pMemoize from 'p-memoize'
export const fakeOpenAIAPIKey = 'fake-openai-api-key'
export const fakeAnthropicAPIKey = 'fake-anthropic-api-key'
export const env = process.env.NODE_ENV || 'development'
export const isTest = env === 'test'
export const isCI = process.env.CI === 'true'
@ -38,6 +41,13 @@ export class OpenAITestClient extends OpenAIClient {
})
}
export class AnthropicTestClient extends anthropic.Client {
complete = pMemoize(super.complete, {
cacheKey: (params) => getCacheKey('anthropic:complete', params),
cache: keyv
})
}
export function getCacheKey(label: string, params: any): string {
const hash = hashObject(params, { algorithm: 'sha256' })
return `${label}:${hash}`
@ -56,6 +66,21 @@ export function createOpenAITestClient() {
}
}
const client = new OpenAITestClient({ apiKey })
return client
return new OpenAITestClient({ apiKey })
}
export function createAnthropicTestClient() {
const apiKey = isCI
? fakeAnthropicAPIKey
: process.env.ANTHROPIC_API_KEY ?? fakeAnthropicAPIKey
if (refreshTestCache) {
if (!process.env.ANTHROPIC_API_KEY) {
throw new Error(
'Cannot refresh test cache without ANTHROPIC_API_KEY environment variable.'
)
}
}
return new AnthropicTestClient(apiKey)
}

Wyświetl plik

@ -0,0 +1,34 @@
import test from 'ava'
import { expectTypeOf } from 'expect-type'
import { z } from 'zod'
import { AnthropicChatModelBuilder } from '../src/anthropic'
import { createAnthropicTestClient } from './_utils'
test('AnthropicChatModel ⇒ string output', async (t) => {
t.timeout(2 * 60 * 1000)
const client = createAnthropicTestClient()
const builder = new AnthropicChatModelBuilder(client, {
modelParams: {
temperature: 0,
max_tokens_to_sample: 30
},
messages: [
{
role: 'system',
content:
'You are a human, not a large language model. You are not an AI. Respond like a human would. Your name is Leeroy Jenkins.'
},
{
role: 'user',
content: 'what is your name?'
}
]
})
const result = await builder.call()
t.truthy(typeof result === 'string')
expectTypeOf(result).toMatchTypeOf<string>()
})