Add gemini-robotics-er-1.6 model
This commit is contained in:
@@ -4,6 +4,7 @@ import { createAnthropic } from '@ai-sdk/anthropic';
|
||||
import { createOpenAI } from '@ai-sdk/openai';
|
||||
import { createGoogleGenerativeAI } from '@ai-sdk/google';
|
||||
import { createOpenAICompatible } from '@ai-sdk/openai-compatible';
|
||||
import { GoogleGenAI } from '@google/genai';
|
||||
import { Job } from '../db/models.js';
|
||||
import { broadcast } from '../ws/broadcast.js';
|
||||
import { findModel, DEFAULT_MODEL_ID, normalizeModelId } from '../models.js';
|
||||
@@ -23,6 +24,10 @@ const google = createGoogleGenerativeAI({
|
||||
apiKey: process.env.GOOGLE_API_KEY,
|
||||
});
|
||||
|
||||
const geminiApi = new GoogleGenAI({
|
||||
apiKey: process.env.GOOGLE_API_KEY,
|
||||
});
|
||||
|
||||
// Ollama Cloud exposes an OpenAI-compatible /v1 endpoint.
|
||||
// Using @ai-sdk/openai-compatible avoids the local-Ollama schema validation
|
||||
// in ollama-ai-provider which requires fields (eval_duration etc.) that
|
||||
@@ -48,6 +53,29 @@ function resolveModel(modelId) {
|
||||
return PROVIDERS[meta.provider](meta.id);
|
||||
}
|
||||
|
||||
function resolveModelMeta(modelId) {
|
||||
const normalized = normalizeModelId(modelId);
|
||||
const meta = findModel(normalized) ?? findModel(DEFAULT_MODEL_ID);
|
||||
return { normalized, meta };
|
||||
}
|
||||
|
||||
function dataUrlToInlineData(dataUrl) {
|
||||
if (!dataUrl || typeof dataUrl !== 'string') return null;
|
||||
// Expected: data:<mime>;base64,<data>
|
||||
if (!dataUrl.startsWith('data:')) return null;
|
||||
|
||||
const comma = dataUrl.indexOf(',');
|
||||
if (comma < 0) return null;
|
||||
|
||||
const header = dataUrl.slice(5, comma); // drop "data:"
|
||||
const data = dataUrl.slice(comma + 1);
|
||||
const isBase64 = header.includes(';base64');
|
||||
const mimeType = header.split(';')[0] || 'application/octet-stream';
|
||||
if (!isBase64 || !data) return null;
|
||||
|
||||
return { mimeType, data };
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// p-queue: in-process queue, no external server needed
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -81,6 +109,37 @@ async function runJob(jobId) {
|
||||
await setStatus(job, 'running');
|
||||
|
||||
try {
|
||||
const { meta } = resolveModelMeta(job.model);
|
||||
|
||||
// Robotics-ER models are available via the Gemini API but may not be
|
||||
// exposed through all provider wrappers. Route them through @google/genai.
|
||||
if (String(meta.id).startsWith('gemini-robotics-er-')) {
|
||||
const inline = dataUrlToInlineData(job.imageDataUrl);
|
||||
const contents = [
|
||||
{
|
||||
role: 'user',
|
||||
parts: [
|
||||
...(inline ? [{ inlineData: inline }] : []),
|
||||
{ text: job.prompt ?? '' },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const res = await geminiApi.models.generateContent({
|
||||
model: meta.id,
|
||||
contents,
|
||||
});
|
||||
|
||||
const text = res?.text ?? '';
|
||||
|
||||
await setStatus(job, 'done', {
|
||||
result: text,
|
||||
inputTokens: null,
|
||||
outputTokens: null,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const { text, usage } = await generateText({
|
||||
model: resolveModel(job.model),
|
||||
messages: [
|
||||
|
||||
Reference in New Issue
Block a user