Skip to content

Commit a3cfd23

Browse files
tomsun28haiyang679
andauthored
feat: support glm-4.6 (#57)
Co-authored-by: haiyang679 <[email protected]>
1 parent 3120753 commit a3cfd23

File tree

10 files changed

+45
-27
lines changed

10 files changed

+45
-27
lines changed

README.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ Add the following dependency to your `pom.xml`:
3030
<dependency>
3131
<groupId>ai.z.openapi</groupId>
3232
<artifactId>zai-sdk</artifactId>
33-
<version>0.0.5</version>
33+
<version>0.0.6</version>
3434
</dependency>
3535
```
3636

@@ -39,7 +39,7 @@ Add the following dependency to your `build.gradle` (for Groovy DSL):
3939

4040
```groovy
4141
dependencies {
42-
implementation 'ai.z.openapi:zai-sdk:0.0.5'
42+
implementation 'ai.z.openapi:zai-sdk:0.0.6'
4343
}
4444
```
4545

@@ -124,7 +124,7 @@ ZaiClient client = ZaiClient.builder()
124124

125125
// Create chat request
126126
ChatCompletionCreateParams request = ChatCompletionCreateParams.builder()
127-
.model(Constants.ModelChatGLM4)
127+
.model("glm-4.6")
128128
.messages(Arrays.asList(
129129
ChatMessage.builder()
130130
.role(ChatMessageRole.USER.value())
@@ -152,7 +152,7 @@ if (response.isSuccess()) {
152152
```java
153153
// Create streaming request
154154
ChatCompletionCreateParams streamRequest = ChatCompletionCreateParams.builder()
155-
.model(Constants.ModelChatGLM4)
155+
.model("glm-4.6")
156156
.messages(Arrays.asList(
157157
ChatMessage.builder()
158158
.role(ChatMessageRole.USER.value())
@@ -281,7 +281,7 @@ public class AIController {
281281
@PostMapping("/chat")
282282
public ResponseEntity<String> chat(@RequestBody ChatRequest request) {
283283
ChatCompletionCreateParams params = ChatCompletionCreateParams.builder()
284-
.model(Constants.ModelChatGLM4)
284+
.model("glm-4.6")
285285
.messages(Arrays.asList(
286286
ChatMessage.builder()
287287
.role(ChatMessageRole.USER.value())

README_CN.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ Z.ai AI 平台官方 Java SDK,提供统一接口访问强大的AI能力,包
3030
<dependency>
3131
<groupId>ai.z.openapi</groupId>
3232
<artifactId>zai-sdk</artifactId>
33-
<version>0.0.5</version>
33+
<version>0.0.6</version>
3434
</dependency>
3535
```
3636

@@ -39,7 +39,7 @@ Z.ai AI 平台官方 Java SDK,提供统一接口访问强大的AI能力,包
3939

4040
```groovy
4141
dependencies {
42-
implementation 'ai.z.openapi:zai-sdk:0.0.5'
42+
implementation 'ai.z.openapi:zai-sdk:0.0.6'
4343
}
4444
```
4545

@@ -123,7 +123,7 @@ ZaiClient client = ZaiClient.builder()
123123

124124
// 创建对话请求
125125
ChatCompletionCreateParams request = ChatCompletionCreateParams.builder()
126-
.model(Constants.ModelChatGLM4)
126+
.model("glm-4.6")
127127
.messages(Arrays.asList(
128128
ChatMessage.builder()
129129
.role(ChatMessageRole.USER.value())
@@ -151,7 +151,7 @@ if (response.isSuccess()) {
151151
```java
152152
// 创建流式请求
153153
ChatCompletionCreateParams streamRequest = ChatCompletionCreateParams.builder()
154-
.model(Constants.ModelChatGLM4)
154+
.model("glm-4.6")
155155
.messages(Arrays.asList(
156156
ChatMessage.builder()
157157
.role(ChatMessageRole.USER.value())
@@ -282,7 +282,7 @@ public class AIController {
282282
@PostMapping("/chat")
283283
public ResponseEntity<String> chat(@RequestBody ChatRequest request) {
284284
ChatCompletionCreateParams params = ChatCompletionCreateParams.builder()
285-
.model(Constants.ModelChatGLM4)
285+
.model("glm-4.6")
286286
.messages(Arrays.asList(
287287
ChatMessage.builder()
288288
.role(ChatMessageRole.USER.value())

core/src/main/java/ai/z/openapi/core/Constants.java

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,21 @@ private Constants() {
3535
// Text Generation Models
3636
// =============================================================================
3737

38+
/**
39+
* GLM-4.6 model code
40+
*/
41+
public static final String ModelGLM4_6 = "glm-4.6";
42+
43+
/**
44+
* GLM-4.6-air model code
45+
*/
46+
public static final String ModelGLM4_6_AIR = "glm-4.6-air";
47+
48+
/**
49+
* GLM-4.6-flash model code
50+
*/
51+
public static final String ModelGLM4_6_FLASH = "glm-4.6-flash";
52+
3853
/**
3954
* GLM-4.5 model code
4055
*/

core/src/main/java/ai/z/openapi/core/token/HttpRequestInterceptor.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ public Response intercept(Chain chain) throws IOException {
4040
.newBuilder()
4141
.header("Authorization", "Bearer " + accessToken)
4242
.header("x-source-channel", source_channel)
43-
.header("Zai-SDK-Ver", "0.0.5")
43+
.header("Zai-SDK-Ver", "0.0.6")
4444
.header("Accept-Language", "en-US,en");
4545
if (Objects.nonNull(config.getCustomHeaders())) {
4646
for (Map.Entry<String, String> entry : config.getCustomHeaders().entrySet()) {

core/src/main/java/ai/z/openapi/service/model/ChatCompletionCreateParams.java

Lines changed: 14 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -42,21 +42,19 @@ public class ChatCompletionCreateParams extends CommonRequest implements ClientR
4242
private Boolean stream;
4343

4444
/**
45-
* Sampling temperature, controls output randomness, must be positive Range:
46-
* (0.0,1.0], cannot equal 0, default value is 0.95 Higher values make output more
47-
* random and creative; lower values make output more stable or deterministic It's
48-
* recommended to adjust either top_p or temperature parameter based on your use case,
49-
* but not both simultaneously
45+
* Sampling temperature, controls output randomness, must be positive Range: [0.0,1.0]
46+
* default value is 0.95 Higher values make output more random and creative; lower
47+
* values make output more stable or deterministic It's recommended to adjust either
48+
* top_p or temperature parameter based on your use case, but not both simultaneously
5049
*/
5150
private Float temperature;
5251

5352
/**
54-
* Another method for temperature sampling, called nucleus sampling Range: (0.0, 1.0)
55-
* open interval, cannot equal 0 or 1, default value is 0.7 Model considers results
56-
* with top_p probability mass tokens For example: 0.1 means the model decoder only
57-
* considers tokens from the top 10% probability candidate set It's recommended to
58-
* adjust either top_p or temperature parameter based on your use case, but not both
59-
* simultaneously
53+
* Another method for temperature sampling, called nucleus sampling Range: (0.0, 1.0]
54+
* Model considers results with top_p probability mass tokens For example: 0.1 means
55+
* the model decoder only considers tokens from the top 10% probability candidate set
56+
* It's recommended to adjust either top_p or temperature parameter based on your use
57+
* case, but not both simultaneously
6058
*/
6159
@JsonProperty("top_p")
6260
private Float topP;
@@ -118,6 +116,11 @@ public class ChatCompletionCreateParams extends CommonRequest implements ClientR
118116
*/
119117
private ChatThinking thinking;
120118

119+
/**
120+
* Whether to stream tool calls
121+
*/
122+
private Boolean tool_stream;
123+
121124
/**
122125
* Forced watermark switch
123126
*/

pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@
4545
</scm>
4646

4747
<properties>
48-
<revision>0.0.5.1</revision>
48+
<revision>0.0.6</revision>
4949
<java.version>8</java.version>
5050
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
5151
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>

samples/src/main/ai.z.openapi.samples/ChatAsyncCompletionExample.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ public static void main(String[] args) {
3030

3131
// Create chat request
3232
ChatCompletionCreateParams request = ChatCompletionCreateParams.builder()
33-
.model(Constants.ModelChatGLM4_5)
33+
.model("glm-4.6")
3434
.messages(Arrays.asList(
3535
ChatMessage.builder()
3636
.role(ChatMessageRole.USER.value())

samples/src/main/ai.z.openapi.samples/ChatCompletionExample.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ public static void main(String[] args) {
2525

2626
// Create chat request
2727
ChatCompletionCreateParams request = ChatCompletionCreateParams.builder()
28-
.model(Constants.ModelChatGLM4_5)
28+
.model("glm-4.6")
2929
.messages(Arrays.asList(
3030
ChatMessage.builder()
3131
.role(ChatMessageRole.USER.value())

samples/src/main/ai.z.openapi.samples/ChatCompletionStreamExample.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ public static void main(String[] args) {
1818

1919
// Create chat request
2020
ChatCompletionCreateParams streamRequest = ChatCompletionCreateParams.builder()
21-
.model(Constants.ModelChatGLM4_5)
21+
.model("glm-4.6")
2222
.messages(Arrays.asList(
2323
ChatMessage.builder()
2424
.role(ChatMessageRole.USER.value())

samples/src/main/ai.z.openapi.samples/ChatCompletionWithCustomHeadersExample.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ public static void main(String[] args) {
2222

2323
// Create chat request
2424
ChatCompletionCreateParams request = ChatCompletionCreateParams.builder()
25-
.model(Constants.ModelChatGLM4_5)
25+
.model("glm-4.6")
2626
.messages(Arrays.asList(
2727
ChatMessage.builder()
2828
.role(ChatMessageRole.USER.value())

0 commit comments

Comments
 (0)