11import type { NextApiRequest , NextApiResponse } from 'next' ;
2+ import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller' ;
23import { getUploadModel } from '@fastgpt/service/common/file/multer' ;
34import { authDataset } from '@fastgpt/service/support/permission/dataset/auth' ;
45import { type FileCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api' ;
6+ import { removeFilesByPaths } from '@fastgpt/service/common/file/utils' ;
57import { createCollectionAndInsertData } from '@fastgpt/service/core/dataset/collection/controller' ;
68import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants' ;
9+ import { getNanoid } from '@fastgpt/global/common/string/tools' ;
10+ import { BucketNameEnum } from '@fastgpt/global/common/file/constants' ;
711import { NextAPI } from '@/service/middleware/entry' ;
812import { WritePermissionVal } from '@fastgpt/global/support/permission/constant' ;
913import { type CreateCollectionResponse } from '@/global/core/dataset/api' ;
10- import { getS3DatasetSource } from '@fastgpt/service/common/s3/sources/dataset' ;
11- import { removeS3TTL } from '@fastgpt/service/common/s3/utils' ;
1214
1315async function handler ( req : NextApiRequest , res : NextApiResponse < any > ) : CreateCollectionResponse {
16+ let filePaths : string [ ] = [ ] ;
17+
1418 try {
15- const upload = getUploadModel ( { maxSize : global . feConfigs ?. uploadFileMaxSize } ) ;
16- const { buffer, originalname } = await upload . getFileBuffer ( req , res ) ;
17- const data = ( ( ) => {
18- try {
19- return JSON . parse ( req . body ?. data || '{}' ) ;
20- } catch ( error ) {
21- return { } ;
22- }
23- } ) ( ) as FileCreateDatasetCollectionParams ;
19+ // Create multer uploader
20+ const upload = getUploadModel ( {
21+ maxSize : global . feConfigs ?. uploadFileMaxSize
22+ } ) ;
23+ const { file, data, bucketName } =
24+ await upload . getUploadFile < FileCreateDatasetCollectionParams > (
25+ req ,
26+ res ,
27+ BucketNameEnum . dataset
28+ ) ;
29+ filePaths = [ file . path ] ;
30+
31+ if ( ! file || ! bucketName ) {
32+ throw new Error ( 'file is empty' ) ;
33+ }
2434
2535 const { teamId, tmbId, dataset } = await authDataset ( {
2636 req,
@@ -30,24 +40,29 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>): CreateCo
3040 datasetId : data . datasetId
3141 } ) ;
3242
33- const s3DatasetSource = getS3DatasetSource ( ) ;
43+ const { fileMetadata, collectionMetadata, ...collectionData } = data ;
44+ const collectionName = file . originalname ;
3445
35- const fileId = await s3DatasetSource . uploadDatasetFileByBuffer ( {
36- datasetId : String ( dataset . _id ) ,
37- buffer,
38- filename : originalname
46+ // 1. upload file
47+ const fileId = await uploadFile ( {
48+ teamId,
49+ uid : tmbId ,
50+ bucketName,
51+ path : file . path ,
52+ filename : file . originalname ,
53+ contentType : file . mimetype ,
54+ metadata : fileMetadata
3955 } ) ;
4056
41- await removeS3TTL ( { key : fileId , bucketName : 'private' } ) ;
42-
43- const { fileMetadata, collectionMetadata, ...collectionData } = data ;
57+ // 2. delete tmp file
58+ removeFilesByPaths ( filePaths ) ;
4459
4560 // 3. Create collection
4661 const { collectionId, insertResults } = await createCollectionAndInsertData ( {
4762 dataset,
4863 createCollectionParams : {
4964 ...collectionData ,
50- name : originalname ,
65+ name : collectionName ,
5166 teamId,
5267 tmbId,
5368 type : DatasetCollectionTypeEnum . file ,
@@ -61,6 +76,8 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>): CreateCo
6176
6277 return { collectionId, results : insertResults } ;
6378 } catch ( error ) {
79+ removeFilesByPaths ( filePaths ) ;
80+
6481 return Promise . reject ( error ) ;
6582 }
6683}
0 commit comments