diff --git a/spring-ai-commons/src/main/java/org/springframework/ai/embedding/TokenCountBatchingStrategy.java b/spring-ai-commons/src/main/java/org/springframework/ai/embedding/TokenCountBatchingStrategy.java index f421b11591e..584a471a721 100644 --- a/spring-ai-commons/src/main/java/org/springframework/ai/embedding/TokenCountBatchingStrategy.java +++ b/spring-ai-commons/src/main/java/org/springframework/ai/embedding/TokenCountBatchingStrategy.java @@ -32,8 +32,8 @@ /** * Token count based strategy implementation for {@link BatchingStrategy}. Using openai - * max input token as the default: - * https://platform.openai.com/docs/guides/embeddings/embedding-models. + * max input token as the default: embedding-models * * This strategy incorporates a reserve percentage to provide a buffer for potential * overhead or unexpected increases in token count during processing. The actual max input