Lol, I was bored and decided to reverse engineer Vercel AI. I had to reverse engineer the custom encoding header, but it was pretty easy, so here you go:
EDIT: Actually, I can’t say I was bored. I was making an API for a friend and decided to reverse engineer Vercel. I just said screw it and reversed another site instead, as it had GPT-4
, which Vercel has, but it’s in the Pro version, not the free one
from typing import Any, TypedDict
from tls_client import Session
import random
import execjs
import base64
import uuid
import json
class ModelInfo(TypedDict):
id: str
default_params: dict[str, Any]
model_info: dict[str, ModelInfo] = {
'code-davinci-002': {
'id': 'openai:code-davinci-002',
'default_params': {
'temperature': 0.5,
'maximumLength': 1024,
'topP': 1,
'presencePenalty': 0,
'frequencyPenalty': 0,
'stopSequences': [],
},
},
'gpt-3.5-turbo': {
'id': 'openai:gpt-3.5-turbo',
'default_params': {
'temperature': 0.7,
'maximumLength': 4096,
'topP': 1,
'topK': 1,
'presencePenalty': 1,
'frequencyPenalty': 1,
'stopSequences': [],
},
},
'gpt-3.5-turbo-16k': {
'id': 'openai:gpt-3.5-turbo-16k',
'default_params': {
'temperature': 0.7,
'maximumLength': 16280,
'topP': 1,
'topK': 1,
'presencePenalty': 1,
'frequencyPenalty': 1,
'stopSequences': [],
},
},
'gpt-3.5-turbo-16k-0613': {
'id': 'openai:gpt-3.5-turbo-16k-0613',
'default_params': {
'temperature': 0.7,
'maximumLength': 16280,
'topP': 1,
'topK': 1,
'presencePenalty': 1,
'frequencyPenalty': 1,
'stopSequences': [],
},
},
'text-ada-001': {
'id': 'openai:text-ada-001',
'default_params': {
'temperature': 0.5,
'maximumLength': 1024,
'topP': 1,
'presencePenalty': 0,
'frequencyPenalty': 0,
'stopSequences': [],
},
},
'text-babbage-001': {
'id': 'openai:text-babbage-001',
'default_params': {
'temperature': 0.5,
'maximumLength': 1024,
'topP': 1,
'presencePenalty': 0,
'frequencyPenalty': 0,
'stopSequences': [],
},
},
'text-curie-001': {
'id': 'openai:text-curie-001',
'default_params': {
'temperature': 0.5,
'maximumLength': 1024,
'topP': 1,
'presencePenalty': 0,
'frequencyPenalty': 0,
'stopSequences': [],
},
},
'text-davinci-002': {
'id': 'openai:text-davinci-002',
'default_params': {
'temperature': 0.5,
'maximumLength': 1024,
'topP': 1,
'presencePenalty': 0,
'frequencyPenalty': 0,
'stopSequences': [],
},
},
'text-davinci-003': {
'id': 'openai:text-davinci-003',
'default_params': {
'temperature': 0.5,
'maximumLength': 4097,
'topP': 1,
'presencePenalty': 0,
'frequencyPenalty': 0,
'stopSequences': [],
},
},
}
class VercelAI:
def __init__(self):
self.http_session = Session(client_identifier='chrome_109')
def _get_anti_bot_token(self):
headers = {
'authority': 'sdk.vercel.ai',
'accept': '*/*',
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
'cache-control': 'no-cache',
'pragma': 'no-cache',
'referer': 'https://sdk.vercel.ai/',
'sec-ch-ua': '"Google Chrome";v="117", "Not;A=Brand";v="8", "Chromium";v="117"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.%s.%s Safari/537.36' % (
random.randint(99, 999),
random.randint(99, 999)
)
}
response = self.http_session.get('https://sdk.vercel.ai/openai.jpeg', headers=headers)
raw = json.loads(base64.b64decode(response.text, validate=True))
js_script = '''const globalThis={marker:"mark"};String.prototype.fontcolor=function(){return `<font>${this}</font>`};
return (%s)(%s)''' % (raw['c'], raw['a'])
raw_token = json.dumps({'r': execjs.compile(js_script).call(''), 't': raw['t']}, separators=(",", ":"))
return base64.b64encode(raw_token.encode('utf-16le')).decode()
def generate_response(self, model: str, messages: list) -> None:
headers = {
'authority': 'sdk.vercel.ai',
'accept': '*/*',
'accept-language': 'en-US,en;q=0.9',
'content-type': 'application/json',
'custom-encoding': self._get_anti_bot_token(),
'origin': 'https://sdk.vercel.ai',
'referer': 'https://sdk.vercel.ai/',
'sec-ch-ua': '"Chromium";v="116", "Not)A;Brand";v="24", "Google Chrome";v="116"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Chrome OS"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (X11; CrOS x86_64 14541.0.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36',
}
json_data = {
'model': model_info[model]['id'],
'messages': messages,
'playgroundId': str(uuid.uuid4()),
'chatIndex': 0
} | model_info[model]['default_params']
for _ in range(10): # retry.
response = self.http_session.post('https://sdk.vercel.ai/api/generate', headers=headers, json=json_data)
if response.status_code == 500:
pass
elif response.status_code == 200:
return response.text
else:
return response.text
EDIT: Probably broken, I’m not sure. You might need to add proxies. If the code returns a 429
status code, it doesn’t necessarily mean it’s rate-limited. It’s most likely due to the signature/anti-bot token. However, using proxies might work, so give it a try, lmao.