Skip to content

Commit b6af173

Browse files
stainless-app[bot]stainless-bot
authored andcommitted
feat(api): OpenAPI spec update via Stainless API (#62)
1 parent bad19ff commit b6af173

File tree

5 files changed

+42
-49
lines changed

5 files changed

+42
-49
lines changed

.stats.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
configured_endpoints: 15
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/togetherai%2FTogetherAI-0e56a0b2cfbbe66858d33d487ae2bef80a5acdaf99bf16029ef2006d5804b27d.yml
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/togetherai%2FTogetherAI-4365478ddf8533c4aeced4c0be4bde57b5930a13385c7500da6e4eb493b2cbcc.yml

src/core.ts

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -994,11 +994,6 @@ const validatePositiveInteger = (name: string, n: unknown): number => {
994994

995995
export const castToError = (err: any): Error => {
996996
if (err instanceof Error) return err;
997-
if (typeof err === 'object' && err !== null) {
998-
try {
999-
return new Error(JSON.stringify(err));
1000-
} catch {}
1001-
}
1002997
return new Error(err);
1003998
};
1004999

src/error.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ export class APIError extends TogetherError {
4949
headers: Headers | undefined,
5050
) {
5151
if (!status) {
52-
return new APIConnectionError({ message, cause: castToError(errorResponse) });
52+
return new APIConnectionError({ cause: castToError(errorResponse) });
5353
}
5454

5555
const error = errorResponse as Record<string, any>;
@@ -101,7 +101,7 @@ export class APIUserAbortError extends APIError {
101101
export class APIConnectionError extends APIError {
102102
override readonly status: undefined = undefined;
103103

104-
constructor({ message, cause }: { message?: string | undefined; cause?: Error | undefined }) {
104+
constructor({ message, cause }: { message?: string; cause?: Error | undefined }) {
105105
super(undefined, undefined, message || 'Connection error.', undefined);
106106
// in some environments the 'cause' property is already declared
107107
// @ts-ignore

src/resources/fine-tune.ts

Lines changed: 38 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -79,16 +79,6 @@ export interface FineTune {
7979

8080
learning_rate?: number;
8181

82-
lora?: boolean;
83-
84-
lora_alpha?: number;
85-
86-
lora_dropout?: number;
87-
88-
lora_r?: number;
89-
90-
lora_trainable_modules?: string;
91-
9282
model?: string;
9383

9484
model_output_name?: string;
@@ -111,6 +101,8 @@ export interface FineTune {
111101

112102
training_file?: string;
113103

104+
training_type?: FineTune.FullTrainingType | FineTune.LoRaTrainingType;
105+
114106
trainingfile_numlines?: number;
115107

116108
trainingfile_size?: number;
@@ -169,6 +161,22 @@ export namespace FineTune {
169161

170162
wandb_url?: string;
171163
}
164+
165+
export interface FullTrainingType {
166+
type: 'Full';
167+
}
168+
169+
export interface LoRaTrainingType {
170+
lora_alpha: number;
171+
172+
lora_r: number;
173+
174+
type: 'Lora';
175+
176+
lora_dropout?: number;
177+
178+
lora_trainable_modules?: string;
179+
}
172180
}
173181

174182
export interface FineTuneEvent {
@@ -269,32 +277,6 @@ export interface FineTuneCreateParams {
269277
*/
270278
learning_rate?: number;
271279

272-
/**
273-
* Whether to enable LoRA training. If not provided, full fine-tuning will be
274-
* applied.
275-
*/
276-
lora?: boolean;
277-
278-
/**
279-
* The alpha value for LoRA adapter training.
280-
*/
281-
lora_alpha?: number;
282-
283-
/**
284-
* The dropout probability for Lora layers.
285-
*/
286-
lora_dropout?: number;
287-
288-
/**
289-
* Rank for LoRA adapter weights
290-
*/
291-
lora_r?: number;
292-
293-
/**
294-
* A list of LoRA trainable modules, separated by a comma
295-
*/
296-
lora_trainable_modules?: string;
297-
298280
/**
299281
* Number of checkpoints to save during fine-tuning
300282
*/
@@ -315,6 +297,8 @@ export interface FineTuneCreateParams {
315297
*/
316298
suffix?: string;
317299

300+
training_type?: FineTuneCreateParams.FullTrainingType | FineTuneCreateParams.LoRaTrainingType;
301+
318302
/**
319303
* File-ID of a validation file uploaded to the Together API
320304
*/
@@ -326,6 +310,24 @@ export interface FineTuneCreateParams {
326310
wandb_api_key?: string;
327311
}
328312

313+
export namespace FineTuneCreateParams {
314+
export interface FullTrainingType {
315+
type: 'Full';
316+
}
317+
318+
export interface LoRaTrainingType {
319+
lora_alpha: number;
320+
321+
lora_r: number;
322+
323+
type: 'Lora';
324+
325+
lora_dropout?: number;
326+
327+
lora_trainable_modules?: string;
328+
}
329+
}
330+
329331
export interface FineTuneDownloadParams {
330332
/**
331333
* Fine-tune ID to download. A string that starts with `ft-`.

tests/api-resources/fine-tune.test.ts

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -26,15 +26,11 @@ describe('resource fineTune', () => {
2626
training_file: 'training_file',
2727
batch_size: 0,
2828
learning_rate: 0,
29-
lora: true,
30-
lora_alpha: 0,
31-
lora_dropout: 0,
32-
lora_r: 0,
33-
lora_trainable_modules: 'lora_trainable_modules',
3429
n_checkpoints: 0,
3530
n_epochs: 0,
3631
n_evals: 0,
3732
suffix: 'suffix',
33+
training_type: { type: 'Full' },
3834
validation_file: 'validation_file',
3935
wandb_api_key: 'wandb_api_key',
4036
});

0 commit comments

Comments
 (0)