kopia lustrzana https://github.com/transitive-bullshit/chatgpt-api
init
commit
94fb118c97
|
@ -0,0 +1,10 @@
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
tab_width = 2
|
||||||
|
end_of_line = lf
|
||||||
|
charset = utf-8
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
insert_final_newline = true
|
|
@ -0,0 +1,8 @@
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# This is an example .env file.
|
||||||
|
#
|
||||||
|
# All of these environment vars must be defined either in your environment or in
|
||||||
|
# a local .env file in order to run this project.
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
OPENAI_API_KEY=
|
|
@ -0,0 +1,4 @@
|
||||||
|
{
|
||||||
|
"root": true,
|
||||||
|
"extends": ["@fisch0920/eslint-config", "@fisch0920/eslint-config/node"]
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
github: [transitive-bullshit]
|
|
@ -0,0 +1,50 @@
|
||||||
|
name: CI
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
name: Test Node.js ${{ matrix.node-version }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: true
|
||||||
|
matrix:
|
||||||
|
node-version:
|
||||||
|
- 20
|
||||||
|
- 21
|
||||||
|
- 22
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: ${{ matrix.node-version }}
|
||||||
|
|
||||||
|
- name: Install pnpm
|
||||||
|
uses: pnpm/action-setup@v3
|
||||||
|
id: pnpm-install
|
||||||
|
with:
|
||||||
|
version: 8
|
||||||
|
run_install: false
|
||||||
|
|
||||||
|
- name: Get pnpm store directory
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Setup pnpm cache
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ${{ env.STORE_PATH }}
|
||||||
|
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pnpm-store-
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pnpm install --frozen-lockfile
|
||||||
|
|
||||||
|
- name: Run test
|
||||||
|
run: pnpm run test
|
|
@ -0,0 +1,39 @@
|
||||||
|
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||||
|
|
||||||
|
# dependencies
|
||||||
|
node_modules
|
||||||
|
/.pnp
|
||||||
|
.pnp.js
|
||||||
|
|
||||||
|
# testing
|
||||||
|
/coverage
|
||||||
|
|
||||||
|
# next.js
|
||||||
|
.next/
|
||||||
|
out/
|
||||||
|
|
||||||
|
# production
|
||||||
|
build/
|
||||||
|
dist/
|
||||||
|
|
||||||
|
# misc
|
||||||
|
.DS_Store
|
||||||
|
*.pem
|
||||||
|
|
||||||
|
# debug
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
.pnpm-debug.log*
|
||||||
|
|
||||||
|
# local env files
|
||||||
|
.env*.local
|
||||||
|
|
||||||
|
# vercel
|
||||||
|
.vercel
|
||||||
|
|
||||||
|
# typescript
|
||||||
|
*.tsbuildinfo
|
||||||
|
next-env.d.ts
|
||||||
|
|
||||||
|
.env
|
|
@ -0,0 +1 @@
|
||||||
|
npm run precommit
|
|
@ -0,0 +1,2 @@
|
||||||
|
enable-pre-post-scripts=true
|
||||||
|
package-manager-strict=false
|
|
@ -0,0 +1,11 @@
|
||||||
|
{
|
||||||
|
"singleQuote": true,
|
||||||
|
"jsxSingleQuote": true,
|
||||||
|
"semi": false,
|
||||||
|
"useTabs": false,
|
||||||
|
"tabWidth": 2,
|
||||||
|
"bracketSpacing": true,
|
||||||
|
"bracketSameLine": false,
|
||||||
|
"arrowParens": "always",
|
||||||
|
"trailingComma": "none"
|
||||||
|
}
|
|
@ -0,0 +1,5 @@
|
||||||
|
**PROPRIETARY LICENSE**
|
||||||
|
|
||||||
|
Copyright (c) 2024 Travis Fischer
|
||||||
|
|
||||||
|
All rights reserved.
|
|
@ -0,0 +1,83 @@
|
||||||
|
{
|
||||||
|
"name": "gptlint",
|
||||||
|
"private": true,
|
||||||
|
"version": "0.1.0",
|
||||||
|
"description": "TODO",
|
||||||
|
"author": "Travis Fischer <travis@transitivebullsh.it>",
|
||||||
|
"license": "PROPRIETARY",
|
||||||
|
"homepage": "https://trywalter.ai",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "transitive-bullshit/walter"
|
||||||
|
},
|
||||||
|
"packageManager": "pnpm@8.15.7",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
|
},
|
||||||
|
"type": "module",
|
||||||
|
"source": "./src/gptlint.ts",
|
||||||
|
"types": "./dist/index.d.ts",
|
||||||
|
"sideEffects": false,
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"types": "./dist/src/index.d.ts",
|
||||||
|
"import": "./dist/src/index.js",
|
||||||
|
"default": "./dist/src/index.js"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsup",
|
||||||
|
"dev": "tsup --watch",
|
||||||
|
"clean": "del dist",
|
||||||
|
"prebuild": "run-s clean",
|
||||||
|
"predev": "run-s clean",
|
||||||
|
"pretest": "run-s build",
|
||||||
|
"prepare": "husky",
|
||||||
|
"precommit": "lint-staged",
|
||||||
|
"test": "run-s test:*",
|
||||||
|
"test:format": "prettier --check \"**/*.{js,ts,tsx}\"",
|
||||||
|
"test:lint": "eslint .",
|
||||||
|
"test:typecheck": "tsc --noEmit",
|
||||||
|
"test:unit": "vitest run"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@dexaai/dexter": "^2.0.0",
|
||||||
|
"dotenv": "^16.4.5",
|
||||||
|
"execa": "^8.0.1",
|
||||||
|
"exit-hook": "^4.0.0",
|
||||||
|
"jsonrepair": "^3.6.1",
|
||||||
|
"ky": "^1.2.4",
|
||||||
|
"openai": "^4.47.1",
|
||||||
|
"p-map": "^7.0.2",
|
||||||
|
"p-retry": "^6.2.0",
|
||||||
|
"tiny-invariant": "^1.3.3",
|
||||||
|
"type-fest": "^4.16.0",
|
||||||
|
"zod": "^3.23.3"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@fisch0920/eslint-config": "^1.3.1",
|
||||||
|
"@total-typescript/ts-reset": "^0.5.1",
|
||||||
|
"@types/node": "^20.12.7",
|
||||||
|
"del-cli": "^5.1.0",
|
||||||
|
"eslint": "^8.57.0",
|
||||||
|
"husky": "^9.0.11",
|
||||||
|
"lint-staged": "^15.2.2",
|
||||||
|
"np": "^10.0.5",
|
||||||
|
"npm-run-all2": "^6.1.2",
|
||||||
|
"prettier": "^3.2.5",
|
||||||
|
"tsup": "^8.0.2",
|
||||||
|
"tsx": "^4.7.2",
|
||||||
|
"typescript": "^5.4.5",
|
||||||
|
"vite": "^5.2.10",
|
||||||
|
"vitest": "^1.5.0"
|
||||||
|
},
|
||||||
|
"lint-staged": {
|
||||||
|
"*.{ts,tsx}": [
|
||||||
|
"eslint --fix",
|
||||||
|
"prettier --ignore-unknown --write"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
Plik diff jest za duży
Load Diff
|
@ -0,0 +1,24 @@
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://trywalter.ai"><img alt="Walter" src="https://trywalter.ai/walter-logo.svg" width="256"></a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<em>Agentic recommendations for AI-native products...</em>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://github.com/transitive-bullshit/walter/actions/workflows/test.yml"><img alt="Build Status" src="https://github.com/transitive-bullshit/walter/actions/workflows/main.yml/badge.svg" /></a>
|
||||||
|
<a href="https://github.com/transitive-bullshit/walter/blob/main/license"><img alt="MIT License" src="https://img.shields.io/badge/license-MIT-blue" /></a>
|
||||||
|
<a href="https://prettier.io"><img alt="Prettier Code Formatting" src="https://img.shields.io/badge/code_style-prettier-brightgreen.svg" /></a>
|
||||||
|
<a href="https://twitter.com/transitive_bs"><img alt="Discuss on Twitter" src="https://img.shields.io/badge/twitter-discussion-blue" /></a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
# Walter <!-- omit from toc -->
|
||||||
|
|
||||||
|
**Coming soon**
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
PROPRIETARY © [Travis Fischer](https://twitter.com/transitive_bs)
|
||||||
|
|
||||||
|
To stay up to date or learn more, follow [@transitive_bs](https://twitter.com/transitive_bs) on Twitter.
|
|
@ -0,0 +1,5 @@
|
||||||
|
import dotenv from 'dotenv'
|
||||||
|
|
||||||
|
import type * as types from './types.js'
|
||||||
|
|
||||||
|
dotenv.config()
|
|
@ -0,0 +1,5 @@
|
||||||
|
export { AbortError, type FailedAttemptError } from 'p-retry'
|
||||||
|
|
||||||
|
export class RetryableError extends Error {}
|
||||||
|
|
||||||
|
export class ParseError extends RetryableError {}
|
|
@ -0,0 +1,2 @@
|
||||||
|
export type * from './types.js'
|
||||||
|
export * from './utils.js'
|
|
@ -0,0 +1,273 @@
|
||||||
|
import { assert, expect, test } from 'vitest'
|
||||||
|
import { z } from 'zod'
|
||||||
|
|
||||||
|
import {
|
||||||
|
extractJSONFromString,
|
||||||
|
parseArrayOutput,
|
||||||
|
parseBooleanOutput,
|
||||||
|
parseNumberOutput,
|
||||||
|
parseObjectOutput,
|
||||||
|
parseStructuredOutput
|
||||||
|
} from './parse-structured-output.js'
|
||||||
|
|
||||||
|
test('extractJSONFromString should extract JSON object from string', () => {
|
||||||
|
let jsonStr = 'Some text {"name":"John Doe"} more text'
|
||||||
|
let result = extractJSONFromString(jsonStr, 'object')
|
||||||
|
assert.deepEqual(result[0], { name: 'John Doe' })
|
||||||
|
|
||||||
|
jsonStr =
|
||||||
|
'Some text {"name":"John Doe","age":42,"address":{"street":"Main Street","number":42}} more text'
|
||||||
|
result = extractJSONFromString(jsonStr, 'object')
|
||||||
|
assert.deepEqual(result[0], {
|
||||||
|
name: 'John Doe',
|
||||||
|
age: 42,
|
||||||
|
address: { street: 'Main Street', number: 42 }
|
||||||
|
})
|
||||||
|
|
||||||
|
jsonStr = 'foo {"name":"John Doe","school":"St. John\'s"} bar'
|
||||||
|
result = extractJSONFromString(jsonStr, 'object')
|
||||||
|
assert.deepEqual(result[0], { name: 'John Doe', school: "St. John's" })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('extractJSONFromString should extract an invalid JSON object from string', () => {
|
||||||
|
let jsonStr = 'Some text {"name":\'John Doe\'} more text'
|
||||||
|
let result = extractJSONFromString(jsonStr, 'object')
|
||||||
|
assert.deepEqual(result[0], { name: 'John Doe' })
|
||||||
|
|
||||||
|
jsonStr = 'Some text {"name":"John Doe","age":42,} more text'
|
||||||
|
result = extractJSONFromString(jsonStr, 'object')
|
||||||
|
assert.deepEqual(result[0], { name: 'John Doe', age: 42 })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('extractJSONFromString should extract multiple JSON objects from string', () => {
|
||||||
|
let jsonStr = 'Some text {"name":"John Doe"} more text {"name":"Jane Doe"}'
|
||||||
|
let result = extractJSONFromString(jsonStr, 'object')
|
||||||
|
assert.deepEqual(result[0], { name: 'John Doe' })
|
||||||
|
assert.deepEqual(result[1], { name: 'Jane Doe' })
|
||||||
|
|
||||||
|
jsonStr =
|
||||||
|
'Some text {"name":"John Doe","age":42,"address":{"street":"Main Street","number":42}} more text {"name":"Jane Doe","age":42,"address":{"street":"Main Street","number":42}}'
|
||||||
|
result = extractJSONFromString(jsonStr, 'object')
|
||||||
|
assert.deepEqual(result[0], {
|
||||||
|
name: 'John Doe',
|
||||||
|
age: 42,
|
||||||
|
address: { street: 'Main Street', number: 42 }
|
||||||
|
})
|
||||||
|
assert.deepEqual(result[1], {
|
||||||
|
name: 'Jane Doe',
|
||||||
|
age: 42,
|
||||||
|
address: { street: 'Main Street', number: 42 }
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
test('extractJSONFromString should extract JSON array from string', () => {
|
||||||
|
let jsonString = 'Some text [1,2,3] more text'
|
||||||
|
let result = extractJSONFromString(jsonString, 'array')
|
||||||
|
assert.deepEqual(result[0], [1, 2, 3])
|
||||||
|
|
||||||
|
jsonString = 'Some text ["foo","bar","\'quoted\'"] more text'
|
||||||
|
result = extractJSONFromString(jsonString, 'array')
|
||||||
|
assert.deepEqual(result[0], ['foo', 'bar', "'quoted'"])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('extractJSONFromString should extract an invalid JSON array from string', () => {
|
||||||
|
let jsonString = 'Some text [1,2,3,] more text'
|
||||||
|
let result = extractJSONFromString(jsonString, 'array')
|
||||||
|
assert.deepEqual(result[0], [1, 2, 3])
|
||||||
|
|
||||||
|
jsonString = "Some text ['foo','bar'] more text"
|
||||||
|
result = extractJSONFromString(jsonString, 'array')
|
||||||
|
assert.deepEqual(result[0], ['foo', 'bar'])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('extractJSONFromString should extract multiple JSON arrays from string', () => {
|
||||||
|
const jsonString = 'Some text [1,2,3] more text [4,5,6]'
|
||||||
|
const result = extractJSONFromString(jsonString, 'array')
|
||||||
|
assert.deepEqual(result[0], [1, 2, 3])
|
||||||
|
assert.deepEqual(result[1], [4, 5, 6])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('extractJSONFromString should return an empty array if no JSON object is found', () => {
|
||||||
|
const jsonString = 'Some text'
|
||||||
|
const result = extractJSONFromString(jsonString, 'object')
|
||||||
|
assert.deepEqual(result, [])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('extractJSONFromString should return an empty array if no JSON array is found', () => {
|
||||||
|
const jsonString = 'Some text'
|
||||||
|
const result = extractJSONFromString(jsonString, 'array')
|
||||||
|
assert.deepEqual(result, [])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseArrayOutput - handles valid arrays correctly', () => {
|
||||||
|
const output1 = parseArrayOutput('[1,2,3]')
|
||||||
|
const output2 = parseArrayOutput('["a", "b", "c"]')
|
||||||
|
const output3 = parseArrayOutput('[{"a": 1}, {"b": 2}]')
|
||||||
|
|
||||||
|
expect(output1).toMatchSnapshot('should return [1, 2, 3] for "[1,2,3]"')
|
||||||
|
expect(output2).toMatchSnapshot(
|
||||||
|
'should return ["a", "b", "c"] for "["a", "b", "c"]'
|
||||||
|
)
|
||||||
|
expect(output3).toMatchSnapshot(
|
||||||
|
'should return [{"a": 1}, {"b": 2}] for [{"a": 1}, {"b": 2}]'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseArrayOutput - handles arrays surrounded by text correctly', () => {
|
||||||
|
const output1 = parseArrayOutput('The array is [1,2,3]')
|
||||||
|
const output2 = parseArrayOutput('Array: ["a", "b", "c"]. That\'s all!')
|
||||||
|
const output3 = parseArrayOutput(
|
||||||
|
'This is the array [{"a": 1}, {"b": 2}] in the text'
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(output1).toMatchSnapshot(
|
||||||
|
'should return [1, 2, 3] for "The array is [1,2,3]"'
|
||||||
|
)
|
||||||
|
expect(output2).toMatchSnapshot(
|
||||||
|
'should return ["a", "b", "c"] for "Array: ["a", "b", "c"]. That\'s all!"'
|
||||||
|
)
|
||||||
|
expect(output3).toMatchSnapshot(
|
||||||
|
'should return [{"a": 1}, {"b": 2}] for "This is the array [{"a": 1}, {"b": 2}] in the text"'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseArrayOutput - throws error for invalid arrays', () => {
|
||||||
|
assert.throws(() => {
|
||||||
|
parseArrayOutput('not a valid array')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseObjectOutput - handles valid objects correctly', () => {
|
||||||
|
const output1 = parseObjectOutput('{"a":1,"b":2,"c":3}')
|
||||||
|
const output2 = parseObjectOutput(
|
||||||
|
'{"name":"John","age":30,"city":"New York"}'
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(output1).toMatchSnapshot(
|
||||||
|
'should return {"a":1,"b":2,"c":3} for {"a":1,"b":2,"c":3}'
|
||||||
|
)
|
||||||
|
expect(output2).toMatchSnapshot(
|
||||||
|
'should return {"name":"John","age":30,"city":"New York"} for {"name":"John","age":30,"city":"New York"}'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseObjectOutput - handles objects surrounded by text correctly', () => {
|
||||||
|
const output1 = parseObjectOutput('The object is {"a":1,"b":2,"c":3}')
|
||||||
|
const output2 = parseObjectOutput(
|
||||||
|
'Object: {"name":"John","age":30,"city":"New York"}. That\'s all!'
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(output1).toMatchSnapshot(
|
||||||
|
'should return {"a":1,"b":2,"c":3} for "The object is {"a":1,"b":2,"c":3}"'
|
||||||
|
)
|
||||||
|
expect(output2).toMatchSnapshot(
|
||||||
|
'should return {"name":"John","age":30,"city":"New York"} for "Object: {"name":"John","age":30,"city":"New York"}. That\'s all!"'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseObjectOutput - handles JSON array of objects', () => {
|
||||||
|
const output = parseObjectOutput('[{"a":1,"b":2},{"c":3,"d":4}]')
|
||||||
|
|
||||||
|
expect(output).toMatchSnapshot(
|
||||||
|
'should return first object {"a":1,"b":2} for [{"a":1,"b":2},{"c":3,"d":4}]'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseObjectOutput - throws error for invalid objects', () => {
|
||||||
|
assert.throws(() => {
|
||||||
|
parseObjectOutput('not a valid object')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseBooleanOutput - handles `true` outputs correctly', () => {
|
||||||
|
const output1 = parseBooleanOutput('True')
|
||||||
|
const output2 = parseBooleanOutput('TRUE')
|
||||||
|
const output3 = parseBooleanOutput('true.')
|
||||||
|
|
||||||
|
expect(output1).toMatchSnapshot('should return true for "True"')
|
||||||
|
expect(output2).toMatchSnapshot('should return true for "TRUE"')
|
||||||
|
expect(output3).toMatchSnapshot('should return true for "true."')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseBooleanOutput - handles `false` outputs correctly', () => {
|
||||||
|
const output1 = parseBooleanOutput('False')
|
||||||
|
const output2 = parseBooleanOutput('FALSE')
|
||||||
|
const output3 = parseBooleanOutput('false!')
|
||||||
|
|
||||||
|
expect(output1).toMatchSnapshot('should return false for "False"')
|
||||||
|
expect(output2).toMatchSnapshot('should return false for "FALSE"')
|
||||||
|
expect(output3).toMatchSnapshot('should return false for "false!"')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseBooleanOutput - throws error for invalid outputs', () => {
|
||||||
|
assert.throws(() => {
|
||||||
|
parseBooleanOutput('NotBooleanValue')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseNumberOutput - handles integer outputs correctly', () => {
|
||||||
|
const output1 = parseNumberOutput('42', z.number().int())
|
||||||
|
const output2 = parseNumberOutput(' -5 ', z.number().int())
|
||||||
|
|
||||||
|
expect(output1).toMatchSnapshot('should return 42 for "42"')
|
||||||
|
expect(output2).toMatchSnapshot('should return -5 for " -5 "')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseNumberOutput - handles float outputs correctly', () => {
|
||||||
|
const output1 = parseNumberOutput('42.42', z.number())
|
||||||
|
const output2 = parseNumberOutput(' -5.5 ', z.number())
|
||||||
|
|
||||||
|
expect(output1).toMatchSnapshot('should return 42.42 for "42.42"')
|
||||||
|
expect(output2).toMatchSnapshot('should return -5.5 for " -5.5 "')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseNumberOutput - throws error for invalid outputs', () => {
|
||||||
|
assert.throws(() => {
|
||||||
|
parseNumberOutput('NotANumber', z.number())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseStructuredOutput - handles arrays correctly', () => {
|
||||||
|
const arraySchema = z.array(z.number())
|
||||||
|
const output = '[1, 2, 3]'
|
||||||
|
const result = parseStructuredOutput(output, arraySchema)
|
||||||
|
|
||||||
|
expect(result).toMatchSnapshot(
|
||||||
|
'should parse and return [1, 2, 3] for "[1, 2, 3]"'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseStructuredOutput - handles objects correctly', () => {
|
||||||
|
const objectSchema = z.object({ a: z.number(), b: z.string() })
|
||||||
|
const output = '{"a": 1, "b": "two"}'
|
||||||
|
const result = parseStructuredOutput(output, objectSchema)
|
||||||
|
|
||||||
|
expect(result).toMatchSnapshot(
|
||||||
|
'should parse and return {"a": 1, "b": "two"} for "{"a": 1, "b": "two"}"'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseStructuredOutput - handles booleans correctly', () => {
|
||||||
|
const booleanSchema = z.boolean()
|
||||||
|
const output = 'True'
|
||||||
|
const result = parseStructuredOutput(output, booleanSchema)
|
||||||
|
|
||||||
|
expect(result).toMatchSnapshot('should parse and return true for "True"')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseStructuredOutput - handles numbers correctly', () => {
|
||||||
|
const numberSchema = z.number()
|
||||||
|
const output = '123.45'
|
||||||
|
const result = parseStructuredOutput(output, numberSchema)
|
||||||
|
|
||||||
|
expect(result).toMatchSnapshot('should parse and return 123.45 for "123.45"')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parseStructuredOutput - throws error for invalid data', () => {
|
||||||
|
const numberSchema = z.number()
|
||||||
|
const output = 'not a number'
|
||||||
|
|
||||||
|
assert.throws(() => {
|
||||||
|
parseStructuredOutput(output, numberSchema)
|
||||||
|
})
|
||||||
|
})
|
|
@ -0,0 +1,283 @@
|
||||||
|
import type { JsonValue } from 'type-fest'
|
||||||
|
import { jsonrepair, JSONRepairError } from 'jsonrepair'
|
||||||
|
import { z, type ZodType } from 'zod'
|
||||||
|
|
||||||
|
import { ParseError } from './errors.js'
|
||||||
|
|
||||||
|
export type SafeParseResult<T> =
|
||||||
|
| {
|
||||||
|
success: true
|
||||||
|
data: T
|
||||||
|
error?: never
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
success: false
|
||||||
|
data?: never
|
||||||
|
error: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses a string which is expected to contain a structured JSON value.
|
||||||
|
*
|
||||||
|
* The JSON value is fuzzily parsed in order to support common issues like
|
||||||
|
* missing commas, trailing commas, and unquoted keys.
|
||||||
|
*
|
||||||
|
* The JSON value is then parsed against a `zod` schema to enforce the shape of
|
||||||
|
* the output.
|
||||||
|
*
|
||||||
|
* @param output - string to parse
|
||||||
|
* @param outputSchema - zod schema
|
||||||
|
*
|
||||||
|
* @returns parsed output
|
||||||
|
*/
|
||||||
|
export function parseStructuredOutput<T>(
|
||||||
|
output: string,
|
||||||
|
outputSchema: ZodType<T>
|
||||||
|
): T {
|
||||||
|
let result
|
||||||
|
if (outputSchema instanceof z.ZodArray) {
|
||||||
|
result = parseArrayOutput(output)
|
||||||
|
} else if (outputSchema instanceof z.ZodObject) {
|
||||||
|
result = parseObjectOutput(output)
|
||||||
|
} else if (outputSchema instanceof z.ZodBoolean) {
|
||||||
|
result = parseBooleanOutput(output)
|
||||||
|
} else if (outputSchema instanceof z.ZodNumber) {
|
||||||
|
result = parseNumberOutput(output, outputSchema)
|
||||||
|
} else {
|
||||||
|
// Default to string output...
|
||||||
|
result = output
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: fix typescript issue here with recursive types
|
||||||
|
const safeResult = (outputSchema.safeParse as any)(result)
|
||||||
|
|
||||||
|
if (!safeResult.success) {
|
||||||
|
throw new ParseError(safeResult.error)
|
||||||
|
}
|
||||||
|
|
||||||
|
return safeResult.data
|
||||||
|
}
|
||||||
|
|
||||||
|
export function safeParseStructuredOutput<T>(
|
||||||
|
output: string,
|
||||||
|
outputSchema: ZodType<T>
|
||||||
|
): SafeParseResult<T> {
|
||||||
|
try {
|
||||||
|
const data = parseStructuredOutput<T>(output, outputSchema)
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
data
|
||||||
|
}
|
||||||
|
} catch (err: any) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: err.message
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if character at the specified index in a string is escaped.
|
||||||
|
*
|
||||||
|
* @param str - string to check
|
||||||
|
* @param i - index of the character to check
|
||||||
|
* @returns whether the character is escaped
|
||||||
|
*/
|
||||||
|
function isEscaped(str: string, i: number): boolean {
|
||||||
|
return i > 0 && str[i - 1] === '\\' && !(i > 1 && str[i - 2] === '\\')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts JSON objects or arrays from a string.
|
||||||
|
*
|
||||||
|
* @param input - string to extract JSON from
|
||||||
|
* @param jsonStructureType - type of JSON structure to extract
|
||||||
|
* @returns array of extracted JSON objects or arrays
|
||||||
|
*/
|
||||||
|
export function extractJSONFromString(
|
||||||
|
input: string,
|
||||||
|
jsonStructureType: 'object' | 'array'
|
||||||
|
) {
|
||||||
|
const startChar = jsonStructureType === 'object' ? '{' : '['
|
||||||
|
const endChar = jsonStructureType === 'object' ? '}' : ']'
|
||||||
|
const extractedJSONValues: JsonValue[] = []
|
||||||
|
let nestingLevel = 0
|
||||||
|
let startIndex = 0
|
||||||
|
const isInsideQuoted = { '"': false, "'": false }
|
||||||
|
|
||||||
|
for (let i = 0; i < input.length; i++) {
|
||||||
|
const ch = input.charAt(i)
|
||||||
|
switch (ch) {
|
||||||
|
case '"':
|
||||||
|
case "'":
|
||||||
|
if (!isInsideQuoted[ch === '"' ? "'" : '"'] && !isEscaped(input, i)) {
|
||||||
|
isInsideQuoted[ch] = !isInsideQuoted[ch]
|
||||||
|
}
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
|
default:
|
||||||
|
if (!isInsideQuoted['"'] && !isInsideQuoted["'"]) {
|
||||||
|
switch (ch) {
|
||||||
|
case startChar:
|
||||||
|
if (nestingLevel === 0) {
|
||||||
|
startIndex = i
|
||||||
|
}
|
||||||
|
|
||||||
|
nestingLevel += 1
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
|
case endChar:
|
||||||
|
nestingLevel -= 1
|
||||||
|
if (nestingLevel === 0) {
|
||||||
|
const candidate = input.slice(startIndex, i + 1)
|
||||||
|
const parsed = JSON.parse(jsonrepair(candidate))
|
||||||
|
if (parsed && typeof parsed === 'object') {
|
||||||
|
extractedJSONValues.push(parsed as JsonValue)
|
||||||
|
}
|
||||||
|
} else if (nestingLevel < 0) {
|
||||||
|
throw new ParseError(
|
||||||
|
`Invalid JSON string: unexpected ${endChar} at position ${i}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nestingLevel !== 0) {
|
||||||
|
throw new ParseError(
|
||||||
|
'Invalid JSON string: unmatched ' + startChar + ' or ' + endChar
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return extractedJSONValues
|
||||||
|
}
|
||||||
|
|
||||||
|
const BOOLEAN_OUTPUTS: Record<string, boolean> = {
|
||||||
|
true: true,
|
||||||
|
false: false,
|
||||||
|
t: true,
|
||||||
|
f: false,
|
||||||
|
yes: true,
|
||||||
|
no: false,
|
||||||
|
y: true,
|
||||||
|
n: false,
|
||||||
|
'1': true,
|
||||||
|
'0': false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses an array output from a string.
|
||||||
|
*
|
||||||
|
* @param output - string to parse
|
||||||
|
* @returns parsed array
|
||||||
|
*/
|
||||||
|
export function parseArrayOutput(output: string): Array<any> {
|
||||||
|
try {
|
||||||
|
const arrayOutput = extractJSONFromString(output, 'array')
|
||||||
|
if (arrayOutput.length === 0) {
|
||||||
|
throw new ParseError(`Invalid JSON array: ${output}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsedOutput = arrayOutput[0]
|
||||||
|
if (!Array.isArray(parsedOutput)) {
|
||||||
|
throw new ParseError(
|
||||||
|
`Invalid JSON array: ${JSON.stringify(parsedOutput)}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsedOutput
|
||||||
|
} catch (err: any) {
|
||||||
|
if (err instanceof JSONRepairError) {
|
||||||
|
throw new ParseError(err.message, { cause: err })
|
||||||
|
} else if (err instanceof SyntaxError) {
|
||||||
|
throw new ParseError(`Invalid JSON array: ${err.message}`, { cause: err })
|
||||||
|
} else {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses an object output from a string.
|
||||||
|
*
|
||||||
|
* @param output - string to parse
|
||||||
|
* @returns parsed object
|
||||||
|
*/
|
||||||
|
export function parseObjectOutput(output: string) {
|
||||||
|
try {
|
||||||
|
const arrayOutput = extractJSONFromString(output, 'object')
|
||||||
|
if (arrayOutput.length === 0) {
|
||||||
|
throw new ParseError(`Invalid JSON object: ${output}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
let parsedOutput = arrayOutput[0]
|
||||||
|
if (Array.isArray(parsedOutput)) {
|
||||||
|
// TODO
|
||||||
|
parsedOutput = parsedOutput[0]
|
||||||
|
} else if (typeof parsedOutput !== 'object') {
|
||||||
|
throw new ParseError(
|
||||||
|
`Invalid JSON object: ${JSON.stringify(parsedOutput)}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsedOutput
|
||||||
|
} catch (err: any) {
|
||||||
|
if (err instanceof JSONRepairError) {
|
||||||
|
throw new ParseError(err.message, { cause: err })
|
||||||
|
} else if (err instanceof SyntaxError) {
|
||||||
|
throw new ParseError(`Invalid JSON object: ${err.message}`, {
|
||||||
|
cause: err
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses a boolean output from a string.
|
||||||
|
*
|
||||||
|
* @param output - string to parse
|
||||||
|
* @returns parsed boolean
|
||||||
|
*/
|
||||||
|
export function parseBooleanOutput(output: string): boolean {
|
||||||
|
output = output
|
||||||
|
.toLowerCase()
|
||||||
|
.trim()
|
||||||
|
.replace(/[!.?]+$/, '')
|
||||||
|
|
||||||
|
const booleanOutput = BOOLEAN_OUTPUTS[output]
|
||||||
|
|
||||||
|
if (booleanOutput === undefined) {
|
||||||
|
throw new ParseError(`Invalid boolean output: ${output}`)
|
||||||
|
} else {
|
||||||
|
return booleanOutput
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses a number output from a string.
|
||||||
|
*
|
||||||
|
* @param output - string to parse
|
||||||
|
* @param outputSchema - zod number schema
|
||||||
|
* @returns parsed number
|
||||||
|
*/
|
||||||
|
export function parseNumberOutput(
|
||||||
|
output: string,
|
||||||
|
outputSchema: z.ZodNumber
|
||||||
|
): number {
|
||||||
|
output = output.trim()
|
||||||
|
|
||||||
|
const numberOutput = outputSchema.isInt
|
||||||
|
? Number.parseInt(output)
|
||||||
|
: Number.parseFloat(output)
|
||||||
|
|
||||||
|
if (Number.isNaN(numberOutput)) {
|
||||||
|
throw new ParseError(`Invalid number output: ${output}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return numberOutput
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
import '@total-typescript/ts-reset'
|
|
@ -0,0 +1,37 @@
|
||||||
|
import { type Prompt } from '@dexaai/dexter'
|
||||||
|
import defaultKy, { type KyInstance } from 'ky'
|
||||||
|
|
||||||
|
import { assert, getEnv } from '../utils.js'
|
||||||
|
|
||||||
|
export class DexaClient {
|
||||||
|
readonly apiKey: string
|
||||||
|
readonly apiBaseUrl: string
|
||||||
|
readonly ky: KyInstance
|
||||||
|
|
||||||
|
constructor({
|
||||||
|
apiKey = getEnv('DEXA_API_KEY'),
|
||||||
|
apiBaseUrl = getEnv('DEXA_API_BASE_URL') ?? 'https://dexa.ai',
|
||||||
|
ky = defaultKy
|
||||||
|
}: {
|
||||||
|
apiKey?: string
|
||||||
|
apiBaseUrl?: string
|
||||||
|
ky?: KyInstance
|
||||||
|
} = {}) {
|
||||||
|
assert(apiKey, 'DEXA_API_KEY is required')
|
||||||
|
|
||||||
|
this.apiKey = apiKey
|
||||||
|
this.apiBaseUrl = apiBaseUrl
|
||||||
|
this.ky = ky.extend({ prefixUrl: this.apiBaseUrl, timeout: 60_000 })
|
||||||
|
}
|
||||||
|
|
||||||
|
async generateResponse({ messages }: { messages: Prompt.Msg[] }) {
|
||||||
|
return this.ky
|
||||||
|
.post('api/ask-dexa', {
|
||||||
|
json: {
|
||||||
|
secret: this.apiKey,
|
||||||
|
messages
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.json<string>()
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,5 @@
|
||||||
|
import '../config.js'
|
||||||
|
|
||||||
|
import { OpenAI } from 'openai'
|
||||||
|
|
||||||
|
export const openaiClient = new OpenAI()
|
|
@ -0,0 +1,67 @@
|
||||||
|
import defaultKy, { type KyInstance } from 'ky'
|
||||||
|
|
||||||
|
export type ScrapeResult = {
|
||||||
|
author: string
|
||||||
|
byline: string
|
||||||
|
/** The HTML for the main content of the page. */
|
||||||
|
content: string
|
||||||
|
description: string
|
||||||
|
imageUrl: string
|
||||||
|
lang: string
|
||||||
|
length: number
|
||||||
|
logoUrl: string
|
||||||
|
/** The text for the main content of the page in markdown format. */
|
||||||
|
markdownContent: string
|
||||||
|
publishedTime: string
|
||||||
|
/** The raw HTML response from the server. */
|
||||||
|
rawHtml: string
|
||||||
|
siteName: string
|
||||||
|
/** The text for the main content of the page. */
|
||||||
|
textContent: string
|
||||||
|
title: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is a single endpoint API for scraping websites. It returns the HTML,
|
||||||
|
* markdown, and plaintext for main body content of the page, as well as
|
||||||
|
* metadata like title and description.
|
||||||
|
*
|
||||||
|
* It tries the simplest and fastest methods first, and falls back to slower
|
||||||
|
* proxies and JavaScript rendering if needed.
|
||||||
|
*/
|
||||||
|
export class ScraperClient {
|
||||||
|
readonly apiBaseUrl: string
|
||||||
|
readonly ky: KyInstance
|
||||||
|
|
||||||
|
constructor({
|
||||||
|
apiBaseUrl = process.env.SCRAPER_API_BASE_URL,
|
||||||
|
ky = defaultKy
|
||||||
|
}: {
|
||||||
|
apiKey?: string
|
||||||
|
apiBaseUrl?: string
|
||||||
|
ky?: KyInstance
|
||||||
|
} = {}) {
|
||||||
|
if (!apiBaseUrl) {
|
||||||
|
throw new Error('SCRAPER_API_BASE_URL is required')
|
||||||
|
}
|
||||||
|
|
||||||
|
this.apiBaseUrl = apiBaseUrl
|
||||||
|
this.ky = ky.extend({ prefixUrl: this.apiBaseUrl })
|
||||||
|
}
|
||||||
|
|
||||||
|
async scrapeUrl(
|
||||||
|
url: string,
|
||||||
|
{
|
||||||
|
timeout = 60_000
|
||||||
|
}: {
|
||||||
|
timeout?: number
|
||||||
|
} = {}
|
||||||
|
): Promise<ScrapeResult> {
|
||||||
|
return this.ky
|
||||||
|
.post('scrape', {
|
||||||
|
json: { url },
|
||||||
|
timeout
|
||||||
|
})
|
||||||
|
.json()
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,699 @@
|
||||||
|
import defaultKy, { type KyInstance } from 'ky'
|
||||||
|
import { z } from 'zod'
|
||||||
|
|
||||||
|
import { aiFunction, AIToolsProvider } from '../fns.js'
|
||||||
|
import { getEnv } from '../utils.js'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All types have been exported from the `serpapi` package, which we're
|
||||||
|
* not using directly because it is bloated and has compatibility issues.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export namespace serpapi {
|
||||||
|
export type BaseResponse<P = Record<string | number | symbol, never>> = {
|
||||||
|
search_metadata: {
|
||||||
|
id: string
|
||||||
|
status: string | 'Queued' | 'Processing' | 'Success'
|
||||||
|
json_endpoint: string
|
||||||
|
created_at: string
|
||||||
|
processed_at: string
|
||||||
|
raw_html_file: string
|
||||||
|
total_time_taken: number
|
||||||
|
}
|
||||||
|
search_parameters: {
|
||||||
|
engine: string
|
||||||
|
} & Omit<BaseParameters & P, 'api_key' | 'no_cache' | 'async' | 'timeout'>
|
||||||
|
serpapi_pagination?: {
|
||||||
|
next: string
|
||||||
|
}
|
||||||
|
pagination?: {
|
||||||
|
next: string
|
||||||
|
}
|
||||||
|
[key: string]: any
|
||||||
|
}
|
||||||
|
|
||||||
|
export type BaseParameters = {
|
||||||
|
/**
|
||||||
|
* Parameter defines the device to use to get the results. It can be set to
|
||||||
|
* `desktop` (default) to use a regular browser, `tablet` to use a tablet browser
|
||||||
|
* (currently using iPads), or `mobile` to use a mobile browser (currently
|
||||||
|
* using iPhones).
|
||||||
|
*/
|
||||||
|
device?: 'desktop' | 'tablet' | 'mobile'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parameter will force SerpApi to fetch the Google results even if a cached
|
||||||
|
* version is already present. A cache is served only if the query and all
|
||||||
|
* parameters are exactly the same. Cache expires after 1h. Cached searches
|
||||||
|
* are free, and are not counted towards your searches per month. It can be set
|
||||||
|
* to `false` (default) to allow results from the cache, or `true` to disallow
|
||||||
|
* results from the cache. `no_cache` and `async` parameters should not be used together.
|
||||||
|
*/
|
||||||
|
no_cache?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parameter defines the way you want to submit your search to SerpApi. It can
|
||||||
|
* be set to `false` (default) to open an HTTP connection and keep it open until
|
||||||
|
* you got your search results, or `true` to just submit your search to SerpApi
|
||||||
|
* and retrieve them later. In this case, you'll need to use our
|
||||||
|
* [Searches Archive API](https://serpapi.com/search-archive-api) to retrieve
|
||||||
|
* your results. `async` and `no_cache` parameters should not be used together.
|
||||||
|
* `async` should not be used on accounts with
|
||||||
|
* [Ludicrous Speed](https://serpapi.com/plan) enabled.
|
||||||
|
*/
|
||||||
|
async?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parameter defines the SerpApi private key to use.
|
||||||
|
*/
|
||||||
|
api_key?: string | null
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specify the client-side timeout of the request. In milliseconds.
|
||||||
|
*/
|
||||||
|
timeout?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export type GoogleParameters = BaseParameters & {
|
||||||
|
/**
|
||||||
|
* Search Query
|
||||||
|
* Parameter defines the query you want to search. You can use anything that you
|
||||||
|
* would use in a regular Google search. e.g. `inurl:`, `site:`, `intitle:`. We
|
||||||
|
* also support advanced search query parameters such as as_dt and as_eq. See the
|
||||||
|
* [full list](https://serpapi.com/advanced-google-query-parameters) of supported
|
||||||
|
* advanced search query parameters.
|
||||||
|
*/
|
||||||
|
q: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Location
|
||||||
|
* Parameter defines from where you want the search to originate. If several
|
||||||
|
* locations match the location requested, we'll pick the most popular one. Head to
|
||||||
|
* the [/locations.json API](https://serpapi.com/locations-api) if you need more
|
||||||
|
* precise control. location and uule parameters can't be used together. Avoid
|
||||||
|
* utilizing location when setting the location outside the U.S. when using Google
|
||||||
|
* Shopping and/or Google Product API.
|
||||||
|
*/
|
||||||
|
location?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Encoded Location
|
||||||
|
* Parameter is the Google encoded location you want to use for the search. uule
|
||||||
|
* and location parameters can't be used together.
|
||||||
|
*/
|
||||||
|
uule?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Google Place ID
|
||||||
|
* Parameter defines the id (`CID`) of the Google My Business listing you want to
|
||||||
|
* scrape. Also known as Google Place ID.
|
||||||
|
*/
|
||||||
|
ludocid?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Additional Google Place ID
|
||||||
|
* Parameter that you might have to use to force the knowledge graph map view to
|
||||||
|
* show up. You can find the lsig ID by using our [Local Pack
|
||||||
|
* API](https://serpapi.com/local-pack) or [Places Results
|
||||||
|
* API](https://serpapi.com/places-results).
|
||||||
|
* lsig ID is also available via a redirect Google uses within [Google My
|
||||||
|
* Business](https://www.google.com/business/).
|
||||||
|
*/
|
||||||
|
lsig?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Google Knowledge Graph ID
|
||||||
|
* Parameter defines the id (`KGMID`) of the Google Knowledge Graph listing you
|
||||||
|
* want to scrape. Also known as Google Knowledge Graph ID. Searches with kgmid
|
||||||
|
* parameter will return results for the originally encrypted search parameters.
|
||||||
|
* For some searches, kgmid may override all other parameters except start, and num
|
||||||
|
* parameters.
|
||||||
|
*/
|
||||||
|
kgmid?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Google Cached Search Parameters ID
|
||||||
|
* Parameter defines the cached search parameters of the Google Search you want to
|
||||||
|
* scrape. Searches with si parameter will return results for the originally
|
||||||
|
* encrypted search parameters. For some searches, si may override all other
|
||||||
|
* parameters except start, and num parameters. si can be used to scrape Google
|
||||||
|
* Knowledge Graph Tabs.
|
||||||
|
*/
|
||||||
|
si?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Domain
|
||||||
|
* Parameter defines the Google domain to use. It defaults to `google.com`. Head to
|
||||||
|
* the [Google domains page](https://serpapi.com/google-domains) for a full list of
|
||||||
|
* supported Google domains.
|
||||||
|
*/
|
||||||
|
google_domain?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Country
|
||||||
|
* Parameter defines the country to use for the Google search. It's a two-letter
|
||||||
|
* country code. (e.g., `us` for the United States, `uk` for United Kingdom, or
|
||||||
|
* `fr` for France). Head to the [Google countries
|
||||||
|
* page](https://serpapi.com/google-countries) for a full list of supported Google
|
||||||
|
* countries.
|
||||||
|
*/
|
||||||
|
gl?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Language
|
||||||
|
* Parameter defines the language to use for the Google search. It's a two-letter
|
||||||
|
* language code. (e.g., `en` for English, `es` for Spanish, or `fr` for French).
|
||||||
|
* Head to the [Google languages page](https://serpapi.com/google-languages) for a
|
||||||
|
* full list of supported Google languages.
|
||||||
|
*/
|
||||||
|
hl?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set Multiple Languages
|
||||||
|
* Parameter defines one or multiple languages to limit the search to. It uses
|
||||||
|
* `lang_{two-letter language code}` to specify languages and `|` as a delimiter.
|
||||||
|
* (e.g., `lang_fr|lang_de` will only search French and German pages). Head to the
|
||||||
|
* [Google lr languages page](https://serpapi.com/google-lr-languages) for a full
|
||||||
|
* list of supported languages.
|
||||||
|
*/
|
||||||
|
lr?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* as_dt
|
||||||
|
* Parameter controls whether to include or exclude results from the site named in
|
||||||
|
* the as_sitesearch parameter.
|
||||||
|
*/
|
||||||
|
as_dt?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* as_epq
|
||||||
|
* Parameter identifies a phrase that all documents in the search results must
|
||||||
|
* contain. You can also use the [phrase
|
||||||
|
* search](https://developers.google.com/custom-search/docs/xml_results#PhraseSearchqt)
|
||||||
|
* query term to search for a phrase.
|
||||||
|
*/
|
||||||
|
as_epq?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* as_eq
|
||||||
|
* Parameter identifies a word or phrase that should not appear in any documents in
|
||||||
|
* the search results. You can also use the [exclude
|
||||||
|
* query](https://developers.google.com/custom-search/docs/xml_results#Excludeqt)
|
||||||
|
* term to ensure that a particular word or phrase will not appear in the documents
|
||||||
|
* in a set of search results.
|
||||||
|
*/
|
||||||
|
as_eq?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* as_lq
|
||||||
|
* Parameter specifies that all search results should contain a link to a
|
||||||
|
* particular URL. You can also use the
|
||||||
|
* [link:](https://developers.google.com/custom-search/docs/xml_results#BackLinksqt)
|
||||||
|
* query term for this type of query.
|
||||||
|
*/
|
||||||
|
as_lq?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* as_nlo
|
||||||
|
* Parameter specifies the starting value for a search range. Use as_nlo and as_nhi
|
||||||
|
* to append an inclusive search range.
|
||||||
|
*/
|
||||||
|
as_nlo?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* as_nhi
|
||||||
|
* Parameter specifies the ending value for a search range. Use as_nlo and as_nhi
|
||||||
|
* to append an inclusive search range.
|
||||||
|
*/
|
||||||
|
as_nhi?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* as_oq
|
||||||
|
* Parameter provides additional search terms to check for in a document, where
|
||||||
|
* each document in the search results must contain at least one of the additional
|
||||||
|
* search terms. You can also use the [Boolean
|
||||||
|
* OR](https://developers.google.com/custom-search/docs/xml_results#BooleanOrqt)
|
||||||
|
* query term for this type of query.
|
||||||
|
*/
|
||||||
|
as_oq?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* as_q
|
||||||
|
* Parameter provides search terms to check for in a document. This parameter is
|
||||||
|
* also commonly used to allow users to specify additional terms to search for
|
||||||
|
* within a set of search results.
|
||||||
|
*/
|
||||||
|
as_q?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* as_qdr
|
||||||
|
* Parameter requests search results from a specified time period (quick date
|
||||||
|
* range). The following values are supported:
|
||||||
|
* `d[number]`: requests results from the specified number of past days. Example
|
||||||
|
* for the past 10 days: `as_qdr=d10`
|
||||||
|
* `w[number]`: requests results from the specified number of past weeks.
|
||||||
|
* `m[number]`: requests results from the specified number of past months.
|
||||||
|
* `y[number]`: requests results from the specified number of past years. Example
|
||||||
|
* for the past year: `as_qdr=y`
|
||||||
|
*/
|
||||||
|
as_qdr?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* as_rq
|
||||||
|
* Parameter specifies that all search results should be pages that are related to
|
||||||
|
* the specified URL. The parameter value should be a URL. You can also use the
|
||||||
|
* [related:](https://developers.google.com/custom-search/docs/xml_results#RelatedLinksqt)
|
||||||
|
* query term for this type of query.
|
||||||
|
*/
|
||||||
|
as_rq?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* as_sitesearch
|
||||||
|
* Parameter allows you to specify that all search results should be pages from a
|
||||||
|
* given site. By setting the as_dt parameter, you can also use it to exclude pages
|
||||||
|
* from a given site from your search resutls.
|
||||||
|
*/
|
||||||
|
as_sitesearch?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Advanced Search Parameters
|
||||||
|
* (to be searched) parameter defines advanced search parameters that aren't
|
||||||
|
* possible in the regular query field. (e.g., advanced search for patents, dates,
|
||||||
|
* news, videos, images, apps, or text contents).
|
||||||
|
*/
|
||||||
|
tbs?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adult Content Filtering
|
||||||
|
* Parameter defines the level of filtering for adult content. It can be set to
|
||||||
|
* `active`, or `off` (default).
|
||||||
|
*/
|
||||||
|
safe?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Exclude Auto-corrected Results
|
||||||
|
* Parameter defines the exclusion of results from an auto-corrected query that is
|
||||||
|
* spelled wrong. It can be set to `1` to exclude these results, or `0` to include
|
||||||
|
* them (default).
|
||||||
|
*/
|
||||||
|
nfpr?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Results Filtering
|
||||||
|
* Parameter defines if the filters for 'Similar Results' and 'Omitted Results' are
|
||||||
|
* on or off. It can be set to `1` (default) to enable these filters, or `0` to
|
||||||
|
* disable these filters.
|
||||||
|
*/
|
||||||
|
filter?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Search Type
|
||||||
|
* (to be matched) parameter defines the type of search you want to do.
|
||||||
|
* It can be set to:
|
||||||
|
* `(no tbm parameter)`: regular Google Search,
|
||||||
|
* `isch`: [Google Images API](https://serpapi.com/images-results),
|
||||||
|
* `lcl` - [Google Local API](https://serpapi.com/local-results)
|
||||||
|
* `vid`: [Google Videos API](https://serpapi.com/videos-results),
|
||||||
|
* `nws`: [Google News API](https://serpapi.com/news-results),
|
||||||
|
* `shop`: [Google Shopping API](https://serpapi.com/shopping-results),
|
||||||
|
* or any other Google service.
|
||||||
|
*/
|
||||||
|
tbm?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result Offset
|
||||||
|
* Parameter defines the result offset. It skips the given number of results. It's
|
||||||
|
* used for pagination. (e.g., `0` (default) is the first page of results, `10` is
|
||||||
|
* the 2nd page of results, `20` is the 3rd page of results, etc.).
|
||||||
|
* Google Local Results only accepts multiples of `20`(e.g. `20` for the second
|
||||||
|
* page results, `40` for the third page results, etc.) as the start value.
|
||||||
|
*/
|
||||||
|
start?: number
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Number of Results
|
||||||
|
* Parameter defines the maximum number of results to return. (e.g., `10` (default)
|
||||||
|
* returns 10 results, `40` returns 40 results, and `100` returns 100 results).
|
||||||
|
*/
|
||||||
|
num?: number
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Page Number (images)
|
||||||
|
* Parameter defines the page number for [Google
|
||||||
|
* Images](https://serpapi.com/images-results). There are 100 images per page. This
|
||||||
|
* parameter is equivalent to start (offset) = ijn * 100. This parameter works only
|
||||||
|
* for [Google Images](https://serpapi.com/images-results) (set tbm to `isch`).
|
||||||
|
*/
|
||||||
|
ijn?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SearchResult extends BaseResponse<GoogleParameters> {
|
||||||
|
search_metadata: SearchMetadata
|
||||||
|
search_parameters: SearchParameters
|
||||||
|
search_information: SearchInformation
|
||||||
|
local_map?: LocalMap
|
||||||
|
local_results?: LocalResults
|
||||||
|
answer_box?: AnswerBox
|
||||||
|
knowledge_graph?: KnowledgeGraph
|
||||||
|
inline_images?: InlineImage[]
|
||||||
|
inline_people_also_search_for?: InlinePeopleAlsoSearchFor[]
|
||||||
|
related_questions?: SearchResultRelatedQuestion[]
|
||||||
|
organic_results?: OrganicResult[]
|
||||||
|
related_searches?: RelatedSearch[]
|
||||||
|
pagination: Pagination
|
||||||
|
serpapi_pagination: Pagination
|
||||||
|
twitter_results?: TwitterResults
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TwitterResults {
|
||||||
|
title: string
|
||||||
|
link: string
|
||||||
|
displayed_link: string
|
||||||
|
tweets: Tweet[]
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Tweet {
|
||||||
|
link: string
|
||||||
|
snippet: string
|
||||||
|
published_date: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface AnswerBox {
|
||||||
|
type: string
|
||||||
|
title: string
|
||||||
|
link: string
|
||||||
|
displayed_link: string
|
||||||
|
snippet: string
|
||||||
|
snippet_highlighted_words: string[]
|
||||||
|
images: string[]
|
||||||
|
about_this_result: AboutThisResult
|
||||||
|
about_page_link: string
|
||||||
|
cached_page_link: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface InlineImage {
|
||||||
|
link: string
|
||||||
|
source: string
|
||||||
|
thumbnail: string
|
||||||
|
original: string
|
||||||
|
source_name: string
|
||||||
|
title?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface InlinePeopleAlsoSearchFor {
|
||||||
|
title: string
|
||||||
|
items: SearchItem[]
|
||||||
|
see_more_link: string
|
||||||
|
see_more_serpapi_link: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SearchItem {
|
||||||
|
name: string
|
||||||
|
image: string
|
||||||
|
link: string
|
||||||
|
serpapi_link: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface KnowledgeGraph {
|
||||||
|
type: string
|
||||||
|
kgmid: string
|
||||||
|
knowledge_graph_search_link: string
|
||||||
|
serpapi_knowledge_graph_search_link: string
|
||||||
|
header_images: HeaderImage[]
|
||||||
|
description: string
|
||||||
|
source: Source
|
||||||
|
buttons: Button[]
|
||||||
|
people_also_search_for: SearchItem[]
|
||||||
|
people_also_search_for_link: string
|
||||||
|
people_also_search_for_stick: string
|
||||||
|
list: { [key: string]: string[] }
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Button {
|
||||||
|
text: string
|
||||||
|
subtitle: string
|
||||||
|
title: string
|
||||||
|
link: string
|
||||||
|
displayed_link: string
|
||||||
|
snippet?: string
|
||||||
|
snippet_highlighted_words?: string[]
|
||||||
|
answer?: string
|
||||||
|
thumbnail: string
|
||||||
|
search_link: string
|
||||||
|
serpapi_search_link: string
|
||||||
|
date?: string
|
||||||
|
list?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
interface HeaderImage {
|
||||||
|
image: string
|
||||||
|
source: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Source {
|
||||||
|
name: string
|
||||||
|
link: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface LocalMap {
|
||||||
|
link: string
|
||||||
|
image: string
|
||||||
|
gps_coordinates: LocalMapGpsCoordinates
|
||||||
|
}
|
||||||
|
|
||||||
|
interface LocalMapGpsCoordinates {
|
||||||
|
latitude: number
|
||||||
|
longitude: number
|
||||||
|
altitude: number
|
||||||
|
}
|
||||||
|
|
||||||
|
interface LocalResults {
|
||||||
|
places: Place[]
|
||||||
|
more_locations_link: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Place {
|
||||||
|
position: number
|
||||||
|
title: string
|
||||||
|
rating?: number
|
||||||
|
reviews_original?: string
|
||||||
|
reviews?: number
|
||||||
|
place_id: string
|
||||||
|
place_id_search: string
|
||||||
|
lsig: string
|
||||||
|
thumbnail: string
|
||||||
|
gps_coordinates: PlaceGpsCoordinates
|
||||||
|
service_options: ServiceOptions
|
||||||
|
address?: string
|
||||||
|
type?: string
|
||||||
|
hours?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PlaceGpsCoordinates {
|
||||||
|
latitude: number
|
||||||
|
longitude: number
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ServiceOptions {
|
||||||
|
dine_in?: boolean
|
||||||
|
takeout: boolean
|
||||||
|
no_delivery?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
interface OrganicResult {
|
||||||
|
position: number
|
||||||
|
title: string
|
||||||
|
link: string
|
||||||
|
displayed_link: string
|
||||||
|
thumbnail?: string
|
||||||
|
favicon?: string
|
||||||
|
snippet: string
|
||||||
|
snippet_highlighted_words: string[]
|
||||||
|
sitelinks?: Sitelinks
|
||||||
|
rich_snippet?: RichSnippet
|
||||||
|
about_this_result: AboutThisResult
|
||||||
|
cached_page_link: string
|
||||||
|
related_pages_link?: string
|
||||||
|
source: string
|
||||||
|
related_results?: RelatedResult[]
|
||||||
|
date?: string
|
||||||
|
related_questions?: OrganicResultRelatedQuestion[]
|
||||||
|
}
|
||||||
|
|
||||||
|
interface AboutThisResult {
|
||||||
|
keywords: string[]
|
||||||
|
languages: string[]
|
||||||
|
regions: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
interface OrganicResultRelatedQuestion {
|
||||||
|
question: string
|
||||||
|
snippet: string
|
||||||
|
snippet_links: SnippetLink[]
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SnippetLink {
|
||||||
|
text: string
|
||||||
|
link: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface RelatedResult {
|
||||||
|
position: number
|
||||||
|
title: string
|
||||||
|
link: string
|
||||||
|
displayed_link: string
|
||||||
|
snippet: string
|
||||||
|
snippet_highlighted_words: string[]
|
||||||
|
about_this_result: AboutThisResult
|
||||||
|
cached_page_link: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface RichSnippet {
|
||||||
|
bottom: Bottom
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Bottom {
|
||||||
|
extensions?: string[]
|
||||||
|
questions?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Sitelinks {
|
||||||
|
inline: Inline[]
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Inline {
|
||||||
|
title: string
|
||||||
|
link: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Pagination {
|
||||||
|
current: number
|
||||||
|
next: string
|
||||||
|
other_pages: { [key: string]: string }
|
||||||
|
next_link?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SearchResultRelatedQuestion {
|
||||||
|
question: string
|
||||||
|
snippet: string
|
||||||
|
title: string
|
||||||
|
link: string
|
||||||
|
displayed_link: string
|
||||||
|
thumbnail: string
|
||||||
|
next_page_token: string
|
||||||
|
serpapi_link: string
|
||||||
|
date?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface RelatedSearch {
|
||||||
|
query: string
|
||||||
|
link: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SearchInformation {
|
||||||
|
organic_results_state: string
|
||||||
|
query_displayed: string
|
||||||
|
total_results: number
|
||||||
|
time_taken_displayed: number
|
||||||
|
menu_items: MenuItem[]
|
||||||
|
}
|
||||||
|
|
||||||
|
interface MenuItem {
|
||||||
|
position: number
|
||||||
|
title: string
|
||||||
|
link: string
|
||||||
|
serpapi_link?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SearchMetadata {
|
||||||
|
id: string
|
||||||
|
status: string
|
||||||
|
json_endpoint: string
|
||||||
|
created_at: string
|
||||||
|
processed_at: string
|
||||||
|
google_url: string
|
||||||
|
raw_html_file: string
|
||||||
|
total_time_taken: number
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SearchParameters {
|
||||||
|
engine: string
|
||||||
|
q: string
|
||||||
|
google_domain: string
|
||||||
|
device?: 'desktop' | 'tablet' | 'mobile'
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Params = Omit<GoogleParameters, 'q'>
|
||||||
|
|
||||||
|
export interface ClientOptions extends Partial<Params> {
|
||||||
|
apiKey?: string
|
||||||
|
apiBaseUrl?: string
|
||||||
|
ky?: KyInstance
|
||||||
|
}
|
||||||
|
|
||||||
|
export const BASE_URL = 'https://serpapi.com'
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lightweight wrapper around SerpAPI for Google search.
|
||||||
|
*
|
||||||
|
* @see https://serpapi.com/search-api
|
||||||
|
*/
|
||||||
|
export class SerpAPIClient extends AIToolsProvider {
|
||||||
|
protected api: KyInstance
|
||||||
|
protected apiKey: string
|
||||||
|
protected apiBaseUrl: string
|
||||||
|
protected params: Partial<serpapi.Params>
|
||||||
|
|
||||||
|
constructor({
|
||||||
|
apiKey = getEnv('SERPAPI_API_KEY') ?? getEnv('SERP_API_KEY'),
|
||||||
|
apiBaseUrl = serpapi.BASE_URL,
|
||||||
|
ky = defaultKy,
|
||||||
|
...params
|
||||||
|
}: serpapi.ClientOptions = {}) {
|
||||||
|
if (!apiKey) {
|
||||||
|
throw new Error(`Error SerpAPIClient missing required "apiKey"`)
|
||||||
|
}
|
||||||
|
|
||||||
|
super()
|
||||||
|
|
||||||
|
this.apiKey = apiKey
|
||||||
|
this.apiBaseUrl = apiBaseUrl
|
||||||
|
this.params = params
|
||||||
|
|
||||||
|
this.api = ky.extend({
|
||||||
|
prefixUrl: this.apiBaseUrl
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
@aiFunction({
|
||||||
|
name: 'serpapiGoogleSearch',
|
||||||
|
description:
|
||||||
|
'Uses Google Search to return the most relevant web pages for a given query. Can also be used to find up-to-date news and information about many topics.',
|
||||||
|
schema: z.object({
|
||||||
|
q: z.string().describe('search query'),
|
||||||
|
num: z.number().int().positive().default(5).optional()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
async search(queryOrOpts: string | serpapi.GoogleParameters) {
|
||||||
|
const defaultGoogleParams: Partial<serpapi.GoogleParameters> = {}
|
||||||
|
const options: serpapi.GoogleParameters =
|
||||||
|
typeof queryOrOpts === 'string'
|
||||||
|
? { ...defaultGoogleParams, q: queryOrOpts }
|
||||||
|
: queryOrOpts
|
||||||
|
const { timeout, ...rest } = this.params
|
||||||
|
|
||||||
|
// console.log('SerpAPIClient.search', options)
|
||||||
|
return this.api
|
||||||
|
.get('search', {
|
||||||
|
searchParams: {
|
||||||
|
...rest,
|
||||||
|
engine: 'google',
|
||||||
|
api_key: this.apiKey,
|
||||||
|
...(options as any) // TODO
|
||||||
|
},
|
||||||
|
timeout
|
||||||
|
})
|
||||||
|
.json<serpapi.SearchResult>()
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,274 @@
|
||||||
|
import defaultKy, { type KyInstance } from 'ky'
|
||||||
|
import { z } from 'zod'
|
||||||
|
|
||||||
|
import { aiFunction, AIToolsProvider } from '../fns.js'
|
||||||
|
import { getEnv } from '../utils.js'
|
||||||
|
|
||||||
|
export namespace serper {
|
||||||
|
export const BASE_URL = 'https://google.serper.dev'
|
||||||
|
|
||||||
|
export const SearchParamsSchema = z.object({
|
||||||
|
q: z.string().describe('search query'),
|
||||||
|
autocorrect: z.boolean().default(true).optional(),
|
||||||
|
gl: z.string().default('us').optional(),
|
||||||
|
hl: z.string().default('en').optional(),
|
||||||
|
page: z.number().int().positive().default(1).optional(),
|
||||||
|
num: z.number().int().positive().default(10).optional()
|
||||||
|
})
|
||||||
|
export type SearchParams = z.infer<typeof SearchParamsSchema>
|
||||||
|
|
||||||
|
export interface SearchResponse {
|
||||||
|
searchParameters: SearchParameters & { type: 'search' }
|
||||||
|
organic: Organic[]
|
||||||
|
answerBox?: AnswerBox
|
||||||
|
knowledgeGraph?: KnowledgeGraph
|
||||||
|
topStories?: TopStory[]
|
||||||
|
peopleAlsoAsk?: PeopleAlsoAsk[]
|
||||||
|
relatedSearches?: RelatedSearch[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SearchImagesResponse {
|
||||||
|
searchParameters: SearchParameters & { type: 'images' }
|
||||||
|
images: Image[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SearchVideosResponse {
|
||||||
|
searchParameters: SearchParameters & { type: 'videos' }
|
||||||
|
videos: Video[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SearchPlacesResponse {
|
||||||
|
searchParameters: SearchParameters & { type: 'places' }
|
||||||
|
places: Place[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SearchNewsResponse {
|
||||||
|
searchParameters: SearchParameters & { type: 'news' }
|
||||||
|
news: News[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SearchShoppingResponse {
|
||||||
|
searchParameters: SearchParameters & { type: 'shopping' }
|
||||||
|
shopping: Shopping[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Response =
|
||||||
|
| SearchResponse
|
||||||
|
| SearchImagesResponse
|
||||||
|
| SearchVideosResponse
|
||||||
|
| SearchPlacesResponse
|
||||||
|
| SearchNewsResponse
|
||||||
|
| SearchShoppingResponse
|
||||||
|
|
||||||
|
export interface KnowledgeGraph {
|
||||||
|
title: string
|
||||||
|
type: string
|
||||||
|
website: string
|
||||||
|
imageUrl: string
|
||||||
|
description: string
|
||||||
|
descriptionSource: string
|
||||||
|
descriptionLink: string
|
||||||
|
attributes: Record<string, string>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Organic {
|
||||||
|
title: string
|
||||||
|
link: string
|
||||||
|
snippet: string
|
||||||
|
position: number
|
||||||
|
imageUrl?: string
|
||||||
|
sitelinks?: SiteLink[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AnswerBox {
|
||||||
|
snippet: string
|
||||||
|
snippetHighlighted?: string[]
|
||||||
|
title: string
|
||||||
|
link: string
|
||||||
|
date?: string
|
||||||
|
position?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SiteLink {
|
||||||
|
title: string
|
||||||
|
link: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PeopleAlsoAsk {
|
||||||
|
question: string
|
||||||
|
snippet: string
|
||||||
|
title: string
|
||||||
|
link: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RelatedSearch {
|
||||||
|
query: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SearchParameters {
|
||||||
|
q: string
|
||||||
|
gl: string
|
||||||
|
hl: string
|
||||||
|
num: number
|
||||||
|
autocorrect: boolean
|
||||||
|
page: number
|
||||||
|
type: string
|
||||||
|
engine: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TopStory {
|
||||||
|
title: string
|
||||||
|
link: string
|
||||||
|
source: string
|
||||||
|
date: string
|
||||||
|
imageUrl: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Image {
|
||||||
|
title: string
|
||||||
|
imageUrl: string
|
||||||
|
imageWidth: number
|
||||||
|
imageHeight: number
|
||||||
|
thumbnailUrl: string
|
||||||
|
thumbnailWidth: number
|
||||||
|
thumbnailHeight: number
|
||||||
|
source: string
|
||||||
|
domain: string
|
||||||
|
link: string
|
||||||
|
googleUrl: string
|
||||||
|
position: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Video {
|
||||||
|
title: string
|
||||||
|
link: string
|
||||||
|
snippet: string
|
||||||
|
date: string
|
||||||
|
imageUrl: string
|
||||||
|
position: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Place {
|
||||||
|
position: number
|
||||||
|
title: string
|
||||||
|
address: string
|
||||||
|
latitude: number
|
||||||
|
longitude: number
|
||||||
|
category: string
|
||||||
|
phoneNumber?: string
|
||||||
|
website: string
|
||||||
|
cid: string
|
||||||
|
rating?: number
|
||||||
|
ratingCount?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface News {
|
||||||
|
title: string
|
||||||
|
link: string
|
||||||
|
snippet: string
|
||||||
|
date: string
|
||||||
|
source: string
|
||||||
|
imageUrl: string
|
||||||
|
position: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Shopping {
|
||||||
|
title: string
|
||||||
|
source: string
|
||||||
|
link: string
|
||||||
|
price: string
|
||||||
|
imageUrl: string
|
||||||
|
delivery?: Record<string, string>
|
||||||
|
rating?: number
|
||||||
|
ratingCount?: number
|
||||||
|
offers?: string
|
||||||
|
productId?: string
|
||||||
|
position: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ClientOptions extends Omit<Partial<SearchParams>, 'q'> {
|
||||||
|
apiKey?: string
|
||||||
|
apiBaseUrl?: string
|
||||||
|
ky?: KyInstance
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lightweight wrapper around Serper for Google search.
|
||||||
|
*
|
||||||
|
* @see https://serper.dev
|
||||||
|
*/
|
||||||
|
export class SerperClient extends AIToolsProvider {
|
||||||
|
protected api: KyInstance
|
||||||
|
protected apiKey: string
|
||||||
|
protected apiBaseUrl: string
|
||||||
|
protected params: Omit<Partial<serper.SearchParams>, 'q'>
|
||||||
|
|
||||||
|
constructor({
|
||||||
|
apiKey = getEnv('SERPER_API_KEY'),
|
||||||
|
apiBaseUrl = serper.BASE_URL,
|
||||||
|
ky = defaultKy,
|
||||||
|
...params
|
||||||
|
}: serper.ClientOptions = {}) {
|
||||||
|
if (!apiKey) {
|
||||||
|
throw new Error(
|
||||||
|
`SerperClient missing required "apiKey" (defaults to "SERPER_API_KEY" env var)`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
super()
|
||||||
|
|
||||||
|
this.apiKey = apiKey
|
||||||
|
this.apiBaseUrl = apiBaseUrl
|
||||||
|
this.params = params
|
||||||
|
|
||||||
|
this.api = ky.extend({
|
||||||
|
prefixUrl: this.apiBaseUrl,
|
||||||
|
headers: {
|
||||||
|
'X-API-KEY': this.apiKey
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
@aiFunction({
|
||||||
|
name: 'serperGoogleSearch',
|
||||||
|
description:
|
||||||
|
'Uses Google Search to return the most relevant web pages for a given query. Can also be used to find up-to-date news and information about many topics.',
|
||||||
|
schema: serper.SearchParamsSchema
|
||||||
|
})
|
||||||
|
async search(queryOrOpts: string | serper.SearchParams) {
|
||||||
|
return this._fetch<serper.SearchResponse>('search', queryOrOpts)
|
||||||
|
}
|
||||||
|
|
||||||
|
async searchImages(queryOrOpts: string | serper.SearchParams) {
|
||||||
|
return this._fetch<serper.SearchImagesResponse>('images', queryOrOpts)
|
||||||
|
}
|
||||||
|
|
||||||
|
async searchVideos(queryOrOpts: string | serper.SearchParams) {
|
||||||
|
return this._fetch<serper.SearchVideosResponse>('videos', queryOrOpts)
|
||||||
|
}
|
||||||
|
|
||||||
|
async searchPlaces(queryOrOpts: string | serper.SearchParams) {
|
||||||
|
return this._fetch<serper.SearchPlacesResponse>('places', queryOrOpts)
|
||||||
|
}
|
||||||
|
|
||||||
|
async searchNews(queryOrOpts: string | serper.SearchParams) {
|
||||||
|
return this._fetch<serper.SearchNewsResponse>('news', queryOrOpts)
|
||||||
|
}
|
||||||
|
|
||||||
|
async searchProducts(queryOrOpts: string | serper.SearchParams) {
|
||||||
|
return this._fetch<serper.SearchShoppingResponse>('shopping', queryOrOpts)
|
||||||
|
}
|
||||||
|
|
||||||
|
protected async _fetch<T extends serper.Response>(
|
||||||
|
endpoint: string,
|
||||||
|
queryOrOpts: string | serper.SearchParams
|
||||||
|
) {
|
||||||
|
const params = {
|
||||||
|
...this.params,
|
||||||
|
...(typeof queryOrOpts === 'string' ? { q: queryOrOpts } : queryOrOpts)
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.api.post(endpoint, { json: params }).json<T>()
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,95 @@
|
||||||
|
import { Nango } from '@nangohq/node'
|
||||||
|
import { auth, Client as TwitterClient } from 'twitter-api-sdk'
|
||||||
|
|
||||||
|
import * as config from '../config.js'
|
||||||
|
import { assert } from '../utils.js'
|
||||||
|
|
||||||
|
// The Twitter+Nango client auth connection key
|
||||||
|
const nangoTwitterProviderConfigKey = 'twitter-v2'
|
||||||
|
|
||||||
|
// The Twitter OAuth2User class requires a client id, which we don't have
|
||||||
|
// since we're using Nango for auth, so instead we just pass a dummy value
|
||||||
|
// and allow Nango to handle all auth/refresh/access token management.
|
||||||
|
const twitterClientId = 'xbot'
|
||||||
|
|
||||||
|
const defaultRequiredTwitterOAuthScopes = new Set<string>([
|
||||||
|
'tweet.read',
|
||||||
|
'users.read',
|
||||||
|
'offline.access',
|
||||||
|
'tweet.write'
|
||||||
|
])
|
||||||
|
|
||||||
|
let _nango: Nango | null = null
|
||||||
|
|
||||||
|
function getNango(): Nango {
|
||||||
|
if (!_nango) {
|
||||||
|
const secretKey = process.env.NANGO_SECRET_KEY?.trim()
|
||||||
|
if (!secretKey) {
|
||||||
|
throw new Error(`Missing required "NANGO_SECRET_KEY"`)
|
||||||
|
}
|
||||||
|
|
||||||
|
_nango = new Nango({ secretKey })
|
||||||
|
}
|
||||||
|
|
||||||
|
return _nango
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getTwitterAuth({
|
||||||
|
scopes = defaultRequiredTwitterOAuthScopes
|
||||||
|
}: { scopes?: Set<string> } = {}): Promise<auth.OAuth2User> {
|
||||||
|
const nango = getNango()
|
||||||
|
const connection = await nango.getConnection(
|
||||||
|
nangoTwitterProviderConfigKey,
|
||||||
|
config.nangoConnectionId
|
||||||
|
)
|
||||||
|
|
||||||
|
// console.debug('nango twitter connection', connection)
|
||||||
|
// connection.credentials.raw
|
||||||
|
// {
|
||||||
|
// token_type: 'bearer',
|
||||||
|
// expires_in: number,
|
||||||
|
// access_token: string
|
||||||
|
// scope: string
|
||||||
|
// expires_at: string
|
||||||
|
// }
|
||||||
|
const connectionScopes = new Set<string>(
|
||||||
|
connection.credentials.raw.scope.split(' ')
|
||||||
|
)
|
||||||
|
const missingScopes = new Set<string>()
|
||||||
|
|
||||||
|
for (const scope of scopes) {
|
||||||
|
if (!connectionScopes.has(scope)) {
|
||||||
|
missingScopes.add(scope)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (missingScopes.size > 0) {
|
||||||
|
throw new Error(
|
||||||
|
`Nango connection ${
|
||||||
|
config.nangoConnectionId
|
||||||
|
} is missing required OAuth scopes: ${[...missingScopes.values()].join(
|
||||||
|
', '
|
||||||
|
)}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const token = connection.credentials.raw
|
||||||
|
assert(token)
|
||||||
|
|
||||||
|
return new auth.OAuth2User({
|
||||||
|
client_id: twitterClientId,
|
||||||
|
callback: config.nangoCallbackUrl,
|
||||||
|
scopes: [...scopes.values()] as any,
|
||||||
|
token
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getTwitterClient({
|
||||||
|
scopes = defaultRequiredTwitterOAuthScopes
|
||||||
|
}: { scopes?: Set<string> } = {}): Promise<TwitterClient> {
|
||||||
|
// NOTE: Nango handles refreshing the oauth access token for us
|
||||||
|
const twitterAuth = await getTwitterAuth({ scopes })
|
||||||
|
|
||||||
|
// Twitter API v2 using OAuth 2.0
|
||||||
|
return new TwitterClient(twitterAuth)
|
||||||
|
}
|
|
@ -0,0 +1,143 @@
|
||||||
|
import defaultKy, { type KyInstance } from 'ky'
|
||||||
|
import { z } from 'zod'
|
||||||
|
|
||||||
|
import { getEnv } from '../../utils/helpers.js'
|
||||||
|
import { aiFunction, AIToolsProvider } from '../fns.js'
|
||||||
|
|
||||||
|
export namespace weatherapi {
|
||||||
|
export const BASE_URL = 'https://api.weatherapi.com/v1'
|
||||||
|
|
||||||
|
export interface CurrentWeatherResponse {
|
||||||
|
current: CurrentWeather
|
||||||
|
location: WeatherLocation
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CurrentWeather {
|
||||||
|
cloud: number
|
||||||
|
condition: WeatherCondition
|
||||||
|
feelslike_c: number
|
||||||
|
feelslike_f: number
|
||||||
|
gust_kph: number
|
||||||
|
gust_mph: number
|
||||||
|
humidity: number
|
||||||
|
is_day: number
|
||||||
|
last_updated: string
|
||||||
|
last_updated_epoch: number
|
||||||
|
precip_in: number
|
||||||
|
precip_mm: number
|
||||||
|
pressure_in: number
|
||||||
|
pressure_mb: number
|
||||||
|
temp_c: number
|
||||||
|
temp_f: number
|
||||||
|
uv: number
|
||||||
|
vis_km: number
|
||||||
|
vis_miles: number
|
||||||
|
wind_degree: number
|
||||||
|
wind_dir: string
|
||||||
|
wind_kph: number
|
||||||
|
wind_mph: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WeatherCondition {
|
||||||
|
code: number
|
||||||
|
icon: string
|
||||||
|
text: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WeatherLocation {
|
||||||
|
country: string
|
||||||
|
lat: number
|
||||||
|
localtime: string
|
||||||
|
localtime_epoch: number
|
||||||
|
lon: number
|
||||||
|
name: string
|
||||||
|
region: string
|
||||||
|
tz_id: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WeatherIPInfoResponse {
|
||||||
|
ip: string
|
||||||
|
type: string
|
||||||
|
continent_code: string
|
||||||
|
continent_name: string
|
||||||
|
country_code: string
|
||||||
|
country_name: string
|
||||||
|
is_eu: string
|
||||||
|
geoname_id: number
|
||||||
|
city: string
|
||||||
|
region: string
|
||||||
|
lat: number
|
||||||
|
lon: number
|
||||||
|
tz_id: string
|
||||||
|
localtime_epoch: number
|
||||||
|
localtime: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class WeatherClient extends AIToolsProvider {
|
||||||
|
protected api: KyInstance
|
||||||
|
protected apiKey: string
|
||||||
|
protected apiBaseUrl: string
|
||||||
|
|
||||||
|
constructor({
|
||||||
|
apiKey = getEnv('WEATHER_API_KEY'),
|
||||||
|
apiBaseUrl = weatherapi.BASE_URL,
|
||||||
|
ky = defaultKy
|
||||||
|
}: {
|
||||||
|
apiKey?: string
|
||||||
|
apiBaseUrl?: string
|
||||||
|
ky?: KyInstance
|
||||||
|
} = {}) {
|
||||||
|
if (!apiKey) {
|
||||||
|
throw new Error(`Error WeatherClient missing required "apiKey"`)
|
||||||
|
}
|
||||||
|
|
||||||
|
super()
|
||||||
|
|
||||||
|
this.apiKey = apiKey
|
||||||
|
this.apiBaseUrl = apiBaseUrl
|
||||||
|
|
||||||
|
this.api = ky.extend({ prefixUrl: apiBaseUrl })
|
||||||
|
}
|
||||||
|
|
||||||
|
@aiFunction({
|
||||||
|
name: 'getCurrentWeather',
|
||||||
|
description: 'Gets info about the current weather at a given location.',
|
||||||
|
schema: z.object({
|
||||||
|
q: z
|
||||||
|
.string()
|
||||||
|
.describe(
|
||||||
|
'Location to get the weather for. May be a city name, zipcode, IP address, or lat/lng coordinates. Example: "London"'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
async getCurrentWeather(queryOrOptions: string | { q: string }) {
|
||||||
|
const options =
|
||||||
|
typeof queryOrOptions === 'string'
|
||||||
|
? { q: queryOrOptions }
|
||||||
|
: queryOrOptions
|
||||||
|
|
||||||
|
return this.api
|
||||||
|
.get('current.json', {
|
||||||
|
searchParams: {
|
||||||
|
key: this.apiKey,
|
||||||
|
...options
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.json<weatherapi.CurrentWeatherResponse>()
|
||||||
|
}
|
||||||
|
|
||||||
|
async ipInfo(ipOrOptions: string | { q: string }) {
|
||||||
|
const options =
|
||||||
|
typeof ipOrOptions === 'string' ? { q: ipOrOptions } : ipOrOptions
|
||||||
|
|
||||||
|
return this.api
|
||||||
|
.get('ip.json', {
|
||||||
|
searchParams: {
|
||||||
|
key: this.apiKey,
|
||||||
|
...options
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.json<weatherapi.WeatherIPInfoResponse>()
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
// TODO
|
||||||
|
export type TODO = 'TODO'
|
|
@ -0,0 +1,17 @@
|
||||||
|
import { expect, test } from 'vitest'
|
||||||
|
|
||||||
|
import { omit, pick } from './utils.js'
|
||||||
|
|
||||||
|
test('pick', () => {
|
||||||
|
expect(pick({ a: 1, b: 2, c: 3 }, 'a', 'c')).toEqual({ a: 1, c: 3 })
|
||||||
|
expect(
|
||||||
|
pick({ a: { b: 'foo' }, d: -1, foo: null } as any, 'b', 'foo')
|
||||||
|
).toEqual({ foo: null })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('omit', () => {
|
||||||
|
expect(omit({ a: 1, b: 2, c: 3 }, 'a', 'c')).toEqual({ b: 2 })
|
||||||
|
expect(
|
||||||
|
omit({ a: { b: 'foo' }, d: -1, foo: null } as any, 'b', 'foo')
|
||||||
|
).toEqual({ a: { b: 'foo' }, d: -1 })
|
||||||
|
})
|
|
@ -0,0 +1,62 @@
|
||||||
|
export { default as assert } from 'tiny-invariant'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* From `inputObj`, create a new object that does not include `keys`.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```js
|
||||||
|
* omit({ a: 1, b: 2, c: 3 }, 'a', 'c') // { b: 2 }
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export const omit = <
|
||||||
|
T extends Record<any, unknown>,
|
||||||
|
K extends keyof T = keyof T
|
||||||
|
>(
|
||||||
|
inputObj: T,
|
||||||
|
...keys: K[]
|
||||||
|
): Omit<T, K> => {
|
||||||
|
const keysSet = new Set(keys)
|
||||||
|
return Object.fromEntries(
|
||||||
|
Object.entries(inputObj).filter(([k]) => !keysSet.has(k as any))
|
||||||
|
) as any
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* From `inputObj`, create a new object that only includes `keys`.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```js
|
||||||
|
* pick({ a: 1, b: 2, c: 3 }, 'a', 'c') // { a: 1, c: 3 }
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export const pick = <
|
||||||
|
T extends Record<any, unknown>,
|
||||||
|
K extends keyof T = keyof T
|
||||||
|
>(
|
||||||
|
inputObj: T,
|
||||||
|
...keys: K[]
|
||||||
|
): Pick<T, K> => {
|
||||||
|
const keysSet = new Set(keys)
|
||||||
|
return Object.fromEntries(
|
||||||
|
Object.entries(inputObj).filter(([k]) => keysSet.has(k as any))
|
||||||
|
) as any
|
||||||
|
}
|
||||||
|
|
||||||
|
export function pruneUndefined<T extends Record<string, any>>(
|
||||||
|
obj: T
|
||||||
|
): NonNullable<T> {
|
||||||
|
return Object.fromEntries(
|
||||||
|
Object.entries(obj).filter(([, value]) => value !== undefined)
|
||||||
|
) as NonNullable<T>
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getEnv(name: string): string | undefined {
|
||||||
|
try {
|
||||||
|
return typeof process !== 'undefined'
|
||||||
|
? // eslint-disable-next-line no-process-env
|
||||||
|
process.env?.[name]
|
||||||
|
: undefined
|
||||||
|
} catch {
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,25 @@
|
||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ES2020",
|
||||||
|
"lib": ["ESNext"],
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"verbatimModuleSyntax": true,
|
||||||
|
"allowJs": true,
|
||||||
|
"resolveJsonModule": true,
|
||||||
|
"moduleDetection": "force",
|
||||||
|
"isolatedModules": true,
|
||||||
|
"useDefineForClassFields": true,
|
||||||
|
"jsx": "preserve",
|
||||||
|
|
||||||
|
"strict": true,
|
||||||
|
"noUncheckedIndexedAccess": true,
|
||||||
|
"forceConsistentCasingInFileNames": true,
|
||||||
|
|
||||||
|
"module": "NodeNext",
|
||||||
|
"moduleResolution": "NodeNext",
|
||||||
|
"outDir": "dist",
|
||||||
|
"sourceMap": true
|
||||||
|
},
|
||||||
|
"include": ["src", "bin"]
|
||||||
|
}
|
|
@ -0,0 +1,16 @@
|
||||||
|
import { defineConfig } from 'tsup'
|
||||||
|
|
||||||
|
export default defineConfig([
|
||||||
|
{
|
||||||
|
entry: ['src/index.ts'],
|
||||||
|
outDir: 'dist',
|
||||||
|
target: 'node18',
|
||||||
|
platform: 'node',
|
||||||
|
format: ['esm'],
|
||||||
|
splitting: false,
|
||||||
|
sourcemap: true,
|
||||||
|
minify: false,
|
||||||
|
shims: true,
|
||||||
|
dts: true
|
||||||
|
}
|
||||||
|
])
|
Ładowanie…
Reference in New Issue