Follow this guide on how to integrate CogCache using Node.js
1️⃣ Create a CogCache account
If you don't have a CogCache account you can create a CogCache account through the Microsoft Azure store. You can find the CogCache listing here.
2️⃣ Generate a CogCache API key
To authenticate the CogCache Proxy API, you need a CogCache API key. You can easily generate an API key during onboarding. Alternatively, you can go to the Keys page to generate it there.
3️⃣ Integrate CogCache with your setup
Integration steps:
- Set the
base_url
and use a value ofhttps://proxy-api.cogcache.com/v1/
. - Add the CogCache authorization header and set the CogCache API key as value.
- Since you're not using your own LLM also set the value of the
api_key
to the CogCache API key. - Choose the right model for you from this table by setting the
COGCACHE_LLM_MODEL
value.
import OpenAI from "openai";
const COGCACHE_LLM_MODEL = ""; // the model of choice
const COGCACHE_API_KEY = ""; // the generated CogCache API key
const openai = new OpenAI({
baseURL: "https://proxy-api.cogcache.com/v1/",
apiKey: COGCACHE_API_KEY,
defaultHeaders: {
Authorization: `Bearer ${COGCACHE_API_KEY}`,
},
});
async function main() {
try {
const response = await openai.chat.completions.create({
messages: [
{
role: "system",
content: "Assistant is a large language model trained by OpenAI.",
},
{
role: "user",
content: "Write a blog post about Generative AI"
},
],
model: COGCACHE_LLM_MODEL,
stream: true,
});
// Check if response is async iterable
if (response[Symbol.asyncIterator]) {
for await (const chunk of response) {
const text = chunk.choices[0]?.delta?.content; // Adjust based on actual response structure
if (text) {
console.log(text);
}
}
} else {
console.log('Response is not an async iterable');
}
} catch (error) {
console.error('An error occurred:', error);
}
}
main();