Deepseek API Examples
This page provides examples of using the Agentsflare Deepseek API to help you quickly integrate and use our AI services.
Basic Configuration
Before starting to use the API, please ensure you have obtained an API Key. If not, please refer to Create API Key.
Basic Information
- API Base URL:
https://api.agentsflare.com/v1/chat/completions - Authentication Method: Bearer Token
- Content Type:
application/json
Request Examples
bash
curl -X POST "https://api.agentsflare.com/v1/chat/completions" \
-H "Authorization: Bearer YOUR_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"model": "deepseek-chat",
"messages": [
{
"role": "user",
"content": "Hello, how are you?"
}
],
"max_tokens": 100,
"temperature": 0.7
}'python
from openai import OpenAI
url = "https://api.agentsflare.com/v1"
client = OpenAI(
base_url=url,
api_key="YOUR_API_KEY"
)
completion = client.chat.completions.create(
model="deepseek-chat",
messages=[
{"role": "user", "content": "You are a helpful assistant."}
]
)
print(completion.choices[0].message)javascript
import OpenAI from "openai";
const client = new OpenAI({
apiKey: process.env.AGENTSFLARE_API_KEY,
baseURL: "https://api.agentsflare.com/v1"
});
async function main() {
try {
const res = await client.chat.completions.create({
model: "deepseek-chat",
messages: [{ role: "user", content: "Hello, how are you?" }],
max_tokens: 100,
temperature: 0.7
});
// You can also get only the text
console.log(res.choices?.[0]?.message?.content);
// Or print the full response
// console.log(res);
} catch (err) {
// The OpenAI SDK error object usually has more detailed response
console.error(err?.response?.data ?? err);
}
}
main();java
import com.openai.client.OpenAIClient;
import com.openai.client.okhttp.OpenAIOkHttpClient;
import com.openai.models.chat.completions.ChatCompletionCreateParams;
import com.openai.models.chat.completions.ChatCompletion;
public class Main {
public static void main(String[] args) {
String apiKey = System.getenv("AGENTSFLARE_API_KEY");
if (apiKey == null || apiKey.isBlank()) {
throw new IllegalStateException("Missing AGENTSFLARE_API_KEY env var");
}
OpenAIClient client = OpenAIOkHttpClient.builder()
.apiKey(apiKey)
.baseUrl("https://api.agentsflare.com/v1")
.build();
ChatCompletionCreateParams params = ChatCompletionCreateParams.builder()
.model("deepseek-chat")
.addMessage(ChatCompletionCreateParams.Message.builder()
.role(ChatCompletionCreateParams.Message.Role.USER)
.content("Hello, how are you?")
.build())
.maxTokens(100)
.temperature(0.7)
.build();
ChatCompletion res = client.chat().completions().create(params);
String content = res.choices().get(0).message().content();
System.out.println(content);
}
}go
package main
import (
"context"
"fmt"
"log"
"os"
openai "github.com/openai/openai-go"
"github.com/openai/openai-go/option"
)
func main() {
apiKey := os.Getenv("AGENTSFLARE_API_KEY") // Recommended to use environment variables
if apiKey == "" {
log.Fatal("missing env AGENTSFLARE_API_KEY")
}
client := openai.NewClient(
option.WithAPIKey(apiKey),
// Key: point the SDK's base url to agentsflare
option.WithBaseURL("https://api.agentsflare.com/v1"),
)
ctx := context.Background()
resp, err := client.Chat.Completions.New(ctx, openai.ChatCompletionNewParams{
Model: openai.F("deepseek-chat"),
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{
openai.UserMessage("Hello, how are you?"),
}),
MaxTokens: openai.F(int64(100)),
Temperature: openai.F(0.7),
})
if err != nil {
log.Fatalf("chat completion failed: %v", err)
}
// Print the reply text
if len(resp.Choices) > 0 && resp.Choices[0].Message.Content != "" {
fmt.Println(resp.Choices[0].Message.Content)
} else {
fmt.Printf("empty response: %+v\n", resp)
}
}javascript
const { OpenAI } = require("openai");
const client = new OpenAI({
apiKey: process.env.AGENTSFLARE_API_KEY,
baseURL: "https://api.agentsflare.com/v1"
});
async function main() {
try {
const res = await client.chat.completions.create({
model: "deepseek-chat",
messages: [{ role: "user", content: "Hello, how are you?" }],
max_tokens: 100,
temperature: 0.7
});
// You can also get only the text
console.log(res.choices?.[0]?.message?.content);
// Or print the full response
// console.log(res);
} catch (err) {
// The OpenAI SDK error object usually has more detailed response
console.error(err?.response?.data ?? err);
}
}
main();Response Example
{
"choices": [
{
"content_filter_results": {
"hate": {
"filtered": false,
"severity": "safe"
},
"protected_material_code": {
"filtered": false,
"detected": false
},
"protected_material_text": {
"filtered": false,
"detected": false
},
"self_harm": {
"filtered": false,
"severity": "safe"
},
"sexual": {
"filtered": false,
"severity": "safe"
},
"violence": {
"filtered": false,
"severity": "safe"
}
},
"finish_reason": "stop",
"index": 0,
"logprobs": null,
"message": {
"annotations": [
],
"content": "Hello! How can I help you today?",
"refusal": null,
"role": "assistant"
}
}
],
"created": 1767765293,
"id": "chatcmpl-CvGnN6pDPthK2Aw5pToFy1K098dhV",
"model": "deepseek-chat",
"object": "chat.completion",
"prompt_filter_results": [
{
"prompt_index": 0,
"content_filter_results": {
"hate": {
"filtered": false,
"severity": "safe"
},
"jailbreak": {
"filtered": false,
"detected": false
},
"self_harm": {
"filtered": false,
"severity": "safe"
},
"sexual": {
"filtered": false,
"severity": "safe"
},
"violence": {
"filtered": false,
"severity": "safe"
}
}
}
],
"system_fingerprint": "fp_3dcd5944f5",
"usage": {
"completion_tokens": 11,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens": 10,
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
},
"total_tokens": 21
}
}Request Parameters
Parameters see Chat Completions API