article
Google Gemini
Google 推出的多模态人工智能模型,旨在处理多种数据类型,包括文本、图像、音频、视频和代码。
check_circle
本 API 与 OpenAI 接口格式兼容。
gemini-1.5-pro
gemini-1.5-pro-001
gemini-1.5-pro-002
gemini-1.5-flash-001
gemini-1.5-flash-002
gemini-1.5-flash-8b
gemini-1.5-pro-latest
gemini-2.0-flash
gemini-2.0-flash-lite-preview-02-05
gemini-2.5-flash-preview-04-17
gemini-2.5-flash-preview-05-20
gemini-2.5-flash
gemini-2.5-pro-preview-05-06
gemini-2.5-pro-preview-06-05
gemini-2.5-pro
gemini-2.5-flash-lite-preview-06-17
warning
模型名称里包含exp
的为实验性模型,不太稳定,建议只用来进行实验性测试。
https://gateway.theturbo.ai/v1/chat/completions
3.1 Head参数 link
参数名称 |
类型 |
必填 |
说明 |
示例值 |
Content-Type |
string |
是 |
设置请求头类型,必须为application/json |
application/json |
Accept |
string |
是 |
设置响应类型,建议统一为application/json |
application/json |
Authorization |
string |
是 |
身份验证所需的 API_KEY,格式Bearer $YOUR_API_KEY |
Bearer $YOUR_API_KEY |
3.2 Body 参数 (application/json) link
参数名称 |
类型 |
必填 |
说明 |
示例 |
model |
string |
是 |
要使用的模型 ID。详见概述列出的可用版本,如 gemini-1.5-pro 。 |
gemini-1.5-pro |
messages |
array |
是 |
聊天消息列表。数组中的每个对象包含role (角色) 与 content (内容)。 |
[{"role": "user","content": "你好"}] |
role |
string |
否 |
消息角色,可选值: system 、user 、assistant 。 |
user |
content |
string |
否 |
消息的具体内容。 |
你好,请给我讲个笑话。 |
temperature |
number |
否 |
采样温度,取值0~2 。数值越大,输出越随机;数值越小,输出越集中和确定。 |
0.7 |
top_p |
number |
否 |
另一种调节采样分布的方式,取值 0~1 。和 temperature 通常二选一设置。 |
0.9 |
n |
number |
否 |
为每条输入消息生成多少条回复。 |
1 |
stream |
boolean |
否 |
是否开启流式输出。设置为 true 时,返回类似 ChatGPT 的流式数据。 |
false |
stop |
string |
否 |
最多可指定 4 个字符串,一旦生成的内容出现这几个字符串之一,就停止生成更多 tokens。 |
\n |
max_tokens |
number |
否 |
单次回复可生成的最大 token 数量,受模型上下文长度限制。 |
1024 |
presence_penalty |
number |
否 |
-2.0 ~ 2.0,正值会鼓励模型输出更多新话题,负值会降低输出新话题的概率。 |
0 |
frequency_penalty |
number |
否 |
-2.0 ~ 2.0,正值会降低模型重复字句的频率,负值会提高重复字句出现的概率。 |
0 |
reasoning_effort |
string |
否 |
用来控制模型在推理任务中投入多少“计算精力”。目前只有gemini-2.5-flash-preview-04-17 支持。支持low medium high none 。默认为low 。 |
low |
POST /v1/chat/completions
Content-Type: application/json
Accept: application/json
Authorization: Bearer $YOUR_API_KEY
{
"model": "gemini-1.5-pro",
"messages": [
{
"role": "user",
"content": "你好,给我科普一下量子力学吧"
}
],
"temperature": 0.7,
"max_tokens": 1024
}
curl https://gateway.theturbo.ai/v1/chat/completions \
-H "Content-Type: application/json" \
-H "Accept: application/json" \
-H "Authorization: Bearer $YOUR_API_KEY" \
-d "{
\"model\": \"gemini-1.5-pro\",
\"messages\": [{
\"role\": \"user\",
\"content\": \"你好,给我科普一下量子力学吧\"
}]
}"
package main
import (
"fmt"
"io/ioutil"
"net/http"
"strings"
)
const (
YOUR_API_KEY = "sk-123456789012345678901234567890123456789012345678"
REQUEST_PAYLOAD = `{
"model": "gemini-1.5-pro",
"messages": [{
"role": "user",
"content": "你好,给我科普一下量子力学吧"
}],
"temperature": 0.7,
"max_tokens": 1024
}`
)
func main() {
requestURL := "https://gateway.theturbo.ai/v1/chat/completions"
requestMethod := "POST"
requestPayload := strings.NewReader(REQUEST_PAYLOAD)
req, err := http.NewRequest(requestMethod, requestURL, requestPayload)
if err != nil {
fmt.Println("Create request failed, err: ", err)
return
}
req.Header.Add("Content-Type", "application/json")
req.Header.Add("Accept", "application/json")
req.Header.Add("Authorization", "Bearer "+YOUR_API_KEY)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
fmt.Println("Do request failed, err: ", err)
return
}
defer resp.Body.Close()
respBodyBytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
fmt.Println("Read response body failed, err: ", err)
return
}
fmt.Println(string(respBodyBytes))
}
4.2媒体文件理解 link
媒体文件理解功能,支持理解文档、图像、音频、视频,详细支持类型请参照下方“支持的媒体类型”。
POST /v1/chat/completions
Content-Type: application/json
Accept: application/json
Authorization: Bearer $YOUR_API_KEY
{
"model": "gemini-1.5-pro",
"messages": [
{
"role": "user",
"content": [
{
"type": "text",
"text": "这张图片里有什么?"
},
{
"type": "image_url",
"image_url": {
"url": "data:image/jpeg;base64,${base64_image}"
}
}
]
}
],
"temperature": 0.7,
"max_tokens": 1024
}
base64_image=$(base64 -i "Path/to/agi/image.jpeg");
curl https://gateway.theturbo.ai/v1/chat/completions \
-H "Content-Type: application/json" \
-H "Accept: application/json" \
-H "Authorization: Bearer $YOUR_API_KEY" \
-d "{
\"model\": \"gemini-1.5-pro\",
\"messages\": [{
\"role\": \"user\",
\"content\": [{
\"type\": \"text\",
\"text\": \"这张图片里有什么?\"
},
{
\"type\": \"image_url\",
\"image_url\": {
\"url\": \"data:image/jpeg;base64,${base64_image}\"
}
}
]
}]
}"
package main
import (
"fmt"
"io/ioutil"
"net/http"
"strings"
)
const (
YOUR_API_KEY = "sk-123456789012345678901234567890123456789012345678"
REQUEST_PAYLOAD = `{
"model": "gemini-1.5-pro",
"messages": [{
"role": "user",
"content": "你好,给我科普一下量子力学吧"
}],
"temperature": 0.7,
"max_tokens": 1024
}`
)
func main() {
requestURL := "https://gateway.theturbo.ai/v1/chat/completions"
requestMethod := "POST"
requestPayload := strings.NewReader(REQUEST_PAYLOAD)
req, err := http.NewRequest(requestMethod, requestURL, requestPayload)
if err != nil {
fmt.Println("Create request failed, err: ", err)
return
}
req.Header.Add("Content-Type", "application/json")
req.Header.Add("Accept", "application/json")
req.Header.Add("Authorization", "Bearer "+YOUR_API_KEY)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
fmt.Println("Do request failed, err: ", err)
return
}
defer resp.Body.Close()
respBodyBytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
fmt.Println("Read response body failed, err: ", err)
return
}
fmt.Println(string(respBodyBytes))
}
image/png
image/jpeg
image/webp
image/heic
image/heif
audio/wav
audio/mp3
audio/aiff
audio/aac
audio/ogg
audio/flac
video/mp4
video/mpeg
video/mov
video/avi
video/x-flv
video/mpg
video/webm
video/wmv
video/3gpp
application/pdf
application/x-javascript
text/javascript
application/x-python
text/x-python
text/plain
text/html
text/css
text/md
text/csv
text/xml
text/rtf
本API兼容 OpenAI 接口格式,采用 OpenAI 的image_url 字段支持所有媒体类型。
report
本API提交媒体文件只支持以base64的形式上传,不支持http地址。
POST /v1/chat/completions
Content-Type: application/json
Accept: application/json
Authorization: Bearer $YOUR_API_KEY
{
"model": "gemini-1.5-pro",
"messages": [{
"role": "user",
"content": "What's the weather like in Boston today?"
}],
"tools": [{
"type": "function",
"function": {
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"]
}
},
"required": ["location"]
}
}
}],
"tool_choice": "auto"
}
curl https://gateway.theturbo.ai/v1/chat/completions \
-H "Content-Type: application/json" \
-H "Accept: application/json" \
-H "Authorization: Bearer $YOUR_API_KEY" \
-d "{
\"model\": \"gemini-1.5-pro\",
\"messages\": [{
\"role\": \"user\",
\"content\": \"What's the weather like in Boston today?\"
}],
\"tools\": [{
\"type\": \"function\",
\"function\": {
\"name\": \"get_current_weather\",
\"description\": \"Get the current weather in a given location\",
\"parameters\": {
\"type\": \"object\",
\"properties\": {
\"location\": {
\"type\": \"string\",
\"description\": \"The city and state, e.g. San Francisco, CA\"
},
\"unit\": {
\"type\": \"string\",
\"enum\": [\"celsius\", \"fahrenheit\"]
}
},
\"required\": [\"location\"]
}
}
}],
\"tool_choice\": \"auto\"
}"
package main
import (
"fmt"
"io/ioutil"
"net/http"
"strings"
)
const (
YOUR_API_KEY = "sk-123456789012345678901234567890123456789012345678"
REQUEST_PAYLOAD = `{
"model": "gemini-1.5-pro",
"messages": [{
"role": "user",
"content": "What's the weather like in Boston today?"
}],
"tools": [{
"type": "function",
"function": {
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"]
}
},
"required": ["location"]
}
}
}],
"tool_choice": "auto"
}`
)
func main() {
requestURL := "https://gateway.theturbo.ai/v1/chat/completions"
requestMethod := "POST"
requestPayload := strings.NewReader(REQUEST_PAYLOAD)
req, err := http.NewRequest(requestMethod, requestURL, requestPayload)
if err != nil {
fmt.Println("Create request failed, err: ", err)
return
}
req.Header.Add("Content-Type", "application/json")
req.Header.Add("Accept", "application/json")
req.Header.Add("Authorization", "Bearer "+YOUR_API_KEY)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
fmt.Println("Do request failed, err: ", err)
return
}
defer resp.Body.Close()
respBodyBytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
fmt.Println("Read response body failed, err: ", err)
return
}
fmt.Println(string(respBodyBytes))
}
{
"id": "chatcmpl-1234567890",
"object": "chat.completion",
"created": 1699999999,
"model": "gemini-1.5-pro",
"choices": [
{
"message": {
"role": "assistant",
"content": "量子力学是研究微观世界的物理学分支……"
},
"finish_reason": "stop"
}
],
"usage": {
"prompt_tokens": 10,
"completion_tokens": 30,
"total_tokens": 40
}
}