feat: Add Anthropic Messages API compatibility layer (#2704)

* feat: add Anthropic Messages API compatibility layer

Add a new endpoint at /puterai/anthropic/v1/messages that implements the
Anthropic Messages API wire format, allowing clients using the Anthropic
SDK to point directly at Puter.

- New router translates between Anthropic format and Puter's internal
  svcAiChat.complete() pipeline
- Supports non-streaming, SSE streaming (proper Anthropic event sequence),
  and tool use round-trips
- Translates system field, tool definitions (input_schema -> parameters),
  and tool_result content blocks
- Integration tests covering non-streaming, streaming, tool use,
  Anthropic SDK compatibility, and system parameter

Closes #2554

* Fix authentication middleware

---------

Co-authored-by: ProgrammerIn-wonderland <30693865+ProgrammerIn-wonderland@users.noreply.github.com>
This commit is contained in:
iamsrishanth
2026-03-24 15:38:25 +05:30
committed by GitHub
parent bed84cad76
commit 4fe255347a
3 changed files with 695 additions and 8 deletions
@@ -0,0 +1,442 @@
/*
* Copyright (C) 2024-present Puter Technologies Inc.
*
* This file is part of Puter.
*
* Puter is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
'use strict';
const crypto = require('node:crypto');
const APIError = require('../../../api/APIError.js');
const eggspress = require('../../../api/eggspress.js');
const { TypedValue } = require('../../../services/drivers/meta/Runtime.js');
const { Context } = require('../../../util/context.js');
const auth2 = require('../../../middleware/auth2.js');
const DEFAULT_PROVIDER = 'claude';
/**
* Translate Anthropic-style tool definitions to the OpenAI/Puter internal
* format so that `svcAiChat.complete()` handles them uniformly.
*/
const normalizeTools = (tools) => {
if ( !Array.isArray(tools) || tools.length === 0 ) return undefined;
return tools.map((t) => {
// Already in OpenAI format (e.g. from passthrough)
if ( t.type === 'function' && t.function ) return t;
// Anthropic format: { name, description, input_schema }
return {
type: 'function',
function: {
name: t.name,
description: t.description || '',
parameters: t.input_schema || { type: 'object', properties: {} },
},
};
});
};
/**
* Extract plain text from a Puter/OpenAI-style message content field.
*/
const extractTextContent = (content) => {
if ( content === undefined || content === null ) return '';
if ( typeof content === 'string' ) return content;
if ( Array.isArray(content) ) {
return content.map((part) => {
if ( typeof part === 'string' ) return part;
if ( part && typeof part.text === 'string' ) return part.text;
if ( part && typeof part.content === 'string' ) return part.content;
return '';
}).join('');
}
if ( typeof content === 'object' ) {
if ( typeof content.text === 'string' ) return content.text;
if ( typeof content.content === 'string' ) return content.content;
}
return '';
};
/**
* Build an Anthropic-style usage object from internal usage data.
*/
const buildUsage = (usage) => {
return {
input_tokens: usage?.input_tokens ?? usage?.prompt_tokens ?? 0,
output_tokens: usage?.output_tokens ?? usage?.completion_tokens ?? 0,
};
};
/**
* Extract tool_use blocks from an internal message result and return them
* as Anthropic content blocks.
*/
const extractToolUseBlocks = (message) => {
const blocks = [];
// Check for OpenAI-style tool_calls on the message object
if ( message.tool_calls && Array.isArray(message.tool_calls) ) {
for ( const tc of message.tool_calls ) {
blocks.push({
type: 'tool_use',
id: tc.id,
name: tc.function?.name ?? '',
input: typeof tc.function?.arguments === 'string'
? (() => {
try {
return JSON.parse(tc.function.arguments);
} catch {
return {};
}
})()
: (tc.function?.arguments ?? {}),
});
}
}
// Check for tool_use blocks inside array-style content
if ( Array.isArray(message.content) ) {
for ( const part of message.content ) {
if ( !part || typeof part !== 'object' ) continue;
if ( part.type === 'tool_use' ) {
blocks.push({
type: 'tool_use',
id: part.id,
name: part.name,
input: typeof part.input === 'string'
? (() => {
try {
return JSON.parse(part.input);
} catch {
return {};
}
})()
: (part.input ?? {}),
});
}
}
}
return blocks;
};
/**
* Translate Anthropic-format messages into Puter/OpenAI-format messages.
* Specifically, this converts `tool_result` content blocks into `tool` role
* messages that Puter's internal pipeline expects.
*/
const normalizeMessages = (messages, system) => {
const result = [];
// Inject system message at the start if supplied
if ( system ) {
if ( typeof system === 'string' ) {
result.push({ role: 'system', content: system });
} else if ( Array.isArray(system) ) {
const text = system.map((s) => {
if ( typeof s === 'string' ) return s;
if ( s && typeof s.text === 'string' ) return s.text;
return '';
}).join('\n');
if ( text ) result.push({ role: 'system', content: text });
}
}
for ( const msg of messages ) {
// Anthropic places tool_result blocks inside user messages.
// Convert each to a separate `role: 'tool'` message.
if ( msg.role === 'user' && Array.isArray(msg.content) ) {
const toolResults = [];
const otherParts = [];
for ( const part of msg.content ) {
if ( part && part.type === 'tool_result' ) {
toolResults.push(part);
} else {
otherParts.push(part);
}
}
// Push non-tool content first (if any)
if ( otherParts.length > 0 ) {
result.push({ role: 'user', content: otherParts });
}
// Convert each tool_result to a `tool` message
for ( const tr of toolResults ) {
let contentStr = '';
if ( typeof tr.content === 'string' ) {
contentStr = tr.content;
} else if ( Array.isArray(tr.content) ) {
contentStr = tr.content.map((p) => {
if ( typeof p === 'string' ) return p;
if ( p && typeof p.text === 'string' ) return p.text;
return '';
}).join('');
}
result.push({
role: 'tool',
tool_call_id: tr.tool_use_id,
content: contentStr,
});
}
// If the message was entirely tool_results, we already handled it
if ( otherParts.length === 0 && toolResults.length > 0 ) continue;
if ( toolResults.length > 0 ) continue; // already pushed otherParts above
}
result.push(msg);
}
return result;
};
const svc_web = Context.get('services').get('web-server');
svc_web.allow_undefined_origin(/^\/puterai\/anthropic\/v1\/messages(\/.*)?$/);
module.exports = eggspress('/anthropic/v1/messages', {
json: true,
jsonCanBeLarge: true,
allowedMethods: ['POST'],
mw: [(req, _res, next) => {
if ( !req.headers.authorization && req.headers['x-api-key'] ) {
req.headers.authorization = `Bearer ${req.headers['x-api-key']}`;
}
next();
}, auth2],
}, async (req, res) => {
// We don't allow apps
if ( Context.get('actor').type.app ) {
throw APIError.create('permission_denied');
}
const body = req.body || {};
const stream = !!body.stream;
if ( ! Array.isArray(body.messages) ) {
throw APIError.create('field_invalid', {
key: 'messages',
expected: 'an array of chat messages',
got: typeof body.messages,
});
}
const ctx = Context.get();
const services = ctx.get('services');
const svcAiChat = services.get('ai-chat');
let model = body.model;
if ( ! model ) {
const providerName = body.provider || DEFAULT_PROVIDER;
const provider = svcAiChat.getProvider(providerName);
if ( ! provider ) {
throw APIError.create('field_missing', { key: 'model' });
}
model = provider.getDefaultModel();
}
// Translate messages from Anthropic format to Puter internal format
const normalizedMessages = normalizeMessages(body.messages, body.system);
const tools = normalizeTools(body.tools);
const completeArgs = {
messages: normalizedMessages,
model,
stream,
...(tools ? { tools } : {}),
...(body.temperature !== undefined ? { temperature: body.temperature } : {}),
...(body.max_tokens !== undefined ? { max_tokens: body.max_tokens } : {}),
...(body.provider ? { provider: body.provider } : {}),
};
const messageId = `msg_${crypto.randomUUID().replace(/-/g, '')}`;
const result = await svcAiChat.complete(completeArgs);
// ================================================================
// STREAMING RESPONSE — Anthropic SSE format
// ================================================================
if ( stream ) {
if ( ! (result instanceof TypedValue) ) {
throw APIError.create('internal_error', { message: 'expected streaming response' });
}
res.setHeader('Content-Type', 'text/event-stream; charset=utf-8');
res.setHeader('Cache-Control', 'no-cache, no-transform');
res.setHeader('Connection', 'keep-alive');
const sendEvent = (eventType, data) => {
res.write(`event: ${eventType}\ndata: ${JSON.stringify(data)}\n\n`);
};
// message_start
sendEvent('message_start', {
type: 'message_start',
message: {
id: messageId,
type: 'message',
role: 'assistant',
content: [],
model,
stop_reason: null,
stop_sequence: null,
usage: { input_tokens: 0, output_tokens: 0 },
},
});
let buffer = '';
let usage = null;
let contentIndex = 0;
let blockOpen = false;
let sawToolCalls = false;
const openTextBlock = () => {
if ( blockOpen ) return;
sendEvent('content_block_start', {
type: 'content_block_start',
index: contentIndex,
content_block: { type: 'text', text: '' },
});
blockOpen = true;
};
const closeBlock = () => {
if ( ! blockOpen ) return;
sendEvent('content_block_stop', {
type: 'content_block_stop',
index: contentIndex,
});
blockOpen = false;
contentIndex++;
};
const streamValue = result.value;
streamValue.on('data', (chunk) => {
buffer += chunk.toString('utf8');
let newlineIndex;
while ( (newlineIndex = buffer.indexOf('\n')) >= 0 ) {
const line = buffer.slice(0, newlineIndex).trim();
buffer = buffer.slice(newlineIndex + 1);
if ( ! line ) continue;
let event;
try {
event = JSON.parse(line);
} catch {
continue;
}
if ( event.type === 'text' && typeof event.text === 'string' ) {
openTextBlock();
sendEvent('content_block_delta', {
type: 'content_block_delta',
index: contentIndex,
delta: { type: 'text_delta', text: event.text },
});
}
if ( event.type === 'tool_use' ) {
sawToolCalls = true;
closeBlock(); // close any open text block first
sendEvent('content_block_start', {
type: 'content_block_start',
index: contentIndex,
content_block: {
type: 'tool_use',
id: event.id,
name: event.name,
input: {},
},
});
blockOpen = true;
// Emit the input as a single JSON delta
const inputStr = typeof event.input === 'string'
? event.input
: JSON.stringify(event.input ?? {});
sendEvent('content_block_delta', {
type: 'content_block_delta',
index: contentIndex,
delta: { type: 'input_json_delta', partial_json: inputStr },
});
closeBlock();
}
if ( event.type === 'usage' ) {
usage = event.usage;
}
}
});
streamValue.on('end', () => {
closeBlock();
const stopReason = sawToolCalls ? 'tool_use' : 'end_turn';
const resolvedUsage = buildUsage(usage || {});
sendEvent('message_delta', {
type: 'message_delta',
delta: { stop_reason: stopReason, stop_sequence: null },
usage: { output_tokens: resolvedUsage.output_tokens },
});
sendEvent('message_stop', { type: 'message_stop' });
res.end();
});
streamValue.on('error', (err) => {
sendEvent('error', {
type: 'error',
error: {
type: 'api_error',
message: err?.message || 'stream error',
},
});
res.end();
});
return;
}
// ================================================================
// NON-STREAMING RESPONSE — Anthropic message object
// ================================================================
const message = result.message || {};
const toolUseBlocks = extractToolUseBlocks(message);
const textContent = extractTextContent(message.content);
const contentBlocks = [];
if ( textContent ) {
contentBlocks.push({ type: 'text', text: textContent });
}
contentBlocks.push(...toolUseBlocks);
// If there's no content at all, include an empty text block
if ( contentBlocks.length === 0 ) {
contentBlocks.push({ type: 'text', text: '' });
}
const stopReason = toolUseBlocks.length > 0 ? 'tool_use' : 'end_turn';
res.json({
id: messageId,
type: 'message',
role: 'assistant',
content: contentBlocks,
model,
stop_reason: stopReason,
stop_sequence: null,
usage: buildUsage(result.usage),
});
});
+9 -8
View File
@@ -40,7 +40,7 @@ class ChatAPIService extends BaseService {
* @param {Express} options.app Express application instance to install routes on
* @returns {Promise<void>}
*/
async '__on_install.routes' (_, { app }) {
async '__on_install.routes'(_, { app }) {
// Create a router for chat API endpoints
const router = (() => {
const require = this.require;
@@ -61,10 +61,11 @@ class ChatAPIService extends BaseService {
* @param {express.Router} options.router Express router to install endpoints on
* @private
*/
install_chat_endpoints_ ({ router }) {
install_chat_endpoints_({ router }) {
const Endpoint = this.require('Endpoint');
router.use(require('../routers/puterai/openai/completions'));
router.use(require('../routers/puterai/openai/chat_completions'));
router.use(require('../routers/puterai/anthropic/messages'));
// Endpoint to list available AI chat models
Endpoint({
route: '/chat/models',
@@ -81,7 +82,7 @@ class ChatAPIService extends BaseService {
// Return the list of models
res.json({ models: models.filter(e => !['costly', 'fake', 'abuse', 'model-fallback-test-1'].includes(e)) });
} catch ( error ) {
} catch (error) {
this.log.error('Error fetching models:', error);
throw APIError.create('internal_server_error');
}
@@ -104,7 +105,7 @@ class ChatAPIService extends BaseService {
// Return the detailed list of models
res.json({ models: models.filter((e) => !['costly', 'fake', 'abuse', 'model-fallback-test-1'].includes(e.id)) });
} catch ( error ) {
} catch (error) {
this.log.error('Error fetching model details:', error);
throw APIError.create('internal_server_error');
}
@@ -125,7 +126,7 @@ class ChatAPIService extends BaseService {
});
// Return the list of models
res.json({ models });
} catch ( error ) {
} catch (error) {
this.log.error('Error fetching image models:', error);
throw APIError.create('internal_server_error');
}
@@ -146,7 +147,7 @@ class ChatAPIService extends BaseService {
});
// Return the detailed list of models
res.json({ models });
} catch ( error ) {
} catch (error) {
this.log.error('Error fetching image model details:', error);
throw APIError.create('internal_server_error');
}
@@ -164,7 +165,7 @@ class ChatAPIService extends BaseService {
return svc_video.models();
});
res.json({ models });
} catch ( error ) {
} catch (error) {
this.log.error('Error fetching video model details:', error);
throw APIError.create('internal_server_error');
}
@@ -182,7 +183,7 @@ class ChatAPIService extends BaseService {
return svc_video.list();
});
res.json({ models });
} catch ( error ) {
} catch (error) {
this.log.error('Error fetching video models:', error);
throw APIError.create('internal_server_error');
}
@@ -0,0 +1,244 @@
import { describe, expect, it } from 'vitest';
import * as fs from 'fs';
import * as path from 'path';
import * as yaml from 'yaml';
import Anthropic from '@anthropic-ai/sdk';
interface ClientConfig {
api_url: string;
auth_token?: string;
do_expensive_ai_tests?: boolean;
}
const loadConfig = (): ClientConfig => {
const envApiUrl = process.env.PUTER_API_URL;
const envAuthToken = process.env.PUTER_AUTH_TOKEN;
if (envApiUrl) {
return {
api_url: envApiUrl,
auth_token: envAuthToken,
do_expensive_ai_tests: process.env.PUTER_DO_EXPENSIVE_AI_TESTS === 'true',
};
}
const configPath = path.join(__dirname, '../client-config.yaml');
if (!fs.existsSync(configPath)) {
throw new Error('Missing client-config.yaml. Create tests/client-config.yaml ' +
'or set PUTER_API_URL and PUTER_AUTH_TOKEN.');
}
return yaml.parse(fs.readFileSync(configPath, 'utf8')) as ClientConfig;
};
const buildHeaders = (authToken?: string) => {
const headers: Record<string, string> = {
'Content-Type': 'application/json',
};
if (authToken) {
headers['Authorization'] = `Bearer ${authToken}`;
}
return headers;
};
const postMessages = async (body: unknown) => {
const config = loadConfig();
const url = `${config.api_url}/puterai/anthropic/v1/messages`;
const response = await fetch(url, {
method: 'POST',
headers: buildHeaders(config.auth_token),
body: JSON.stringify(body),
});
return { response, config };
};
describe('Puter Anthropic-Compatible Messages API', () => {
it('returns a well-formed non-streaming message', async () => {
const config = loadConfig();
if (!config.do_expensive_ai_tests) return;
const { response } = await postMessages({
model: 'claude-haiku-4-5',
max_tokens: 256,
messages: [
{ role: 'user', content: 'Say hello in exactly three words.' },
],
});
expect(response.status).toBe(200);
const json = await response.json() as any;
expect(json.type).toBe('message');
expect(json.role).toBe('assistant');
expect(json.id).toMatch(/^msg_/);
expect(Array.isArray(json.content)).toBe(true);
expect(json.content.length).toBeGreaterThan(0);
expect(json.content[0].type).toBe('text');
expect(typeof json.content[0].text).toBe('string');
expect(json.stop_reason).toBe('end_turn');
expect(typeof json.usage?.input_tokens).toBe('number');
expect(typeof json.usage?.output_tokens).toBe('number');
}, 20000);
it('streams and returns well-formed SSE events', async () => {
const config = loadConfig();
if (!config.do_expensive_ai_tests) return;
const { response } = await postMessages({
model: 'claude-haiku-4-5',
max_tokens: 256,
stream: true,
messages: [
{ role: 'user', content: 'What is 2 + 2?' },
],
});
expect(response.status).toBe(200);
const reader = response.body?.getReader();
expect(reader).toBeTruthy();
if (!reader) return;
const decoder = new TextDecoder();
let buffer = '';
const events: string[] = [];
while (true) {
const { value, done } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
let idx;
while ((idx = buffer.indexOf('\n\n')) >= 0) {
const block = buffer.slice(0, idx);
buffer = buffer.slice(idx + 2);
const eventMatch = block.match(/^event: (\S+)/m);
if (eventMatch) events.push(eventMatch[1]);
}
}
expect(events).toContain('message_start');
expect(events).toContain('content_block_start');
expect(events).toContain('content_block_delta');
expect(events).toContain('content_block_stop');
expect(events).toContain('message_delta');
expect(events).toContain('message_stop');
}, 20000);
it('handles tool use round-trip (non-streaming)', async () => {
const config = loadConfig();
if (!config.do_expensive_ai_tests) return;
const tools = [
{
name: 'calculate',
description: 'Perform a mathematical calculation',
input_schema: {
type: 'object',
properties: {
expression: {
type: 'string',
description: 'Mathematical expression to evaluate',
},
},
required: ['expression'],
},
},
];
// First turn: ask the model to use the tool
const { response: firstResponse } = await postMessages({
model: 'claude-haiku-4-5',
max_tokens: 1024,
messages: [
{ role: 'user', content: 'Use the calculate tool to compute 2 + 2.' },
],
tools,
});
expect(firstResponse.status).toBe(200);
const firstJson = await firstResponse.json() as any;
const toolUseBlocks = (firstJson.content || []).filter(
(b: any) => b.type === 'tool_use',
);
if (toolUseBlocks.length === 0) {
// Model did not call tools — inconclusive, skip
return;
}
expect(toolUseBlocks[0].name).toBe('calculate');
expect(toolUseBlocks[0].id).toBeTruthy();
// Second turn: provide tool result and get final answer
const { response: secondResponse } = await postMessages({
model: 'claude-haiku-4-5',
max_tokens: 1024,
messages: [
{ role: 'user', content: 'Use the calculate tool to compute 2 + 2.' },
{ role: 'assistant', content: firstJson.content },
{
role: 'user',
content: [
{
type: 'tool_result',
tool_use_id: toolUseBlocks[0].id,
content: JSON.stringify({ expression: '2 + 2', result: 4 }),
},
],
},
],
tools,
});
expect(secondResponse.status).toBe(200);
const secondJson = await secondResponse.json() as any;
expect(secondJson.type).toBe('message');
const textBlocks = (secondJson.content || []).filter(
(b: any) => b.type === 'text',
);
expect(textBlocks.length).toBeGreaterThan(0);
expect(textBlocks[0].text).toBeTruthy();
}, 30000);
it('works with the Anthropic SDK', async () => {
const config = loadConfig();
if (!config.do_expensive_ai_tests) return;
const apiKey = config.auth_token;
if (!apiKey) throw new Error('Missing auth token for Anthropic SDK test');
const client = new Anthropic({
apiKey,
baseURL: `${config.api_url}/puterai/anthropic/v1`,
});
const message = await client.messages.create({
model: 'claude-haiku-4-5',
max_tokens: 256,
messages: [
{ role: 'user', content: 'Say hello.' },
],
});
expect(message.type).toBe('message');
expect(message.role).toBe('assistant');
expect(message.content.length).toBeGreaterThan(0);
expect(message.content[0].type).toBe('text');
}, 20000);
it('accepts a system parameter', async () => {
const config = loadConfig();
if (!config.do_expensive_ai_tests) return;
const { response } = await postMessages({
model: 'claude-haiku-4-5',
max_tokens: 256,
system: 'You are a pirate. Always respond in pirate speak.',
messages: [
{ role: 'user', content: 'Say hello.' },
],
});
expect(response.status).toBe(200);
const json = await response.json() as any;
expect(json.type).toBe('message');
expect(json.content[0].text).toBeTruthy();
}, 20000);
});