11import type { NextApiRequest , NextApiResponse } from 'next' ;
2- import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller' ;
32import { getUploadModel } from '@fastgpt/service/common/file/multer' ;
43import { authDataset } from '@fastgpt/service/support/permission/dataset/auth' ;
54import { type FileCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api' ;
6- import { removeFilesByPaths } from '@fastgpt/service/common/file/utils' ;
75import { createCollectionAndInsertData } from '@fastgpt/service/core/dataset/collection/controller' ;
86import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants' ;
9- import { getNanoid } from '@fastgpt/global/common/string/tools' ;
10- import { BucketNameEnum } from '@fastgpt/global/common/file/constants' ;
117import { NextAPI } from '@/service/middleware/entry' ;
128import { WritePermissionVal } from '@fastgpt/global/support/permission/constant' ;
139import { type CreateCollectionResponse } from '@/global/core/dataset/api' ;
10+ import { getS3DatasetSource } from '@fastgpt/service/common/s3/sources/dataset' ;
11+ import { removeS3TTL } from '@fastgpt/service/common/s3/utils' ;
1412
1513async function handler ( req : NextApiRequest , res : NextApiResponse < any > ) : CreateCollectionResponse {
16- let filePaths : string [ ] = [ ] ;
17-
1814 try {
19- // Create multer uploader
20- const upload = getUploadModel ( {
21- maxSize : global . feConfigs ?. uploadFileMaxSize
22- } ) ;
23- const { file, data, bucketName } =
24- await upload . getUploadFile < FileCreateDatasetCollectionParams > (
25- req ,
26- res ,
27- BucketNameEnum . dataset
28- ) ;
29- filePaths = [ file . path ] ;
30-
31- if ( ! file || ! bucketName ) {
32- throw new Error ( 'file is empty' ) ;
33- }
15+ const upload = getUploadModel ( { maxSize : global . feConfigs ?. uploadFileMaxSize } ) ;
16+ const { buffer, originalname } = await upload . getFileBuffer ( req , res ) ;
17+ const data = ( ( ) => {
18+ try {
19+ return JSON . parse ( req . body ?. data || '{}' ) ;
20+ } catch ( error ) {
21+ return { } ;
22+ }
23+ } ) ( ) as FileCreateDatasetCollectionParams ;
3424
3525 const { teamId, tmbId, dataset } = await authDataset ( {
3626 req,
@@ -40,29 +30,24 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>): CreateCo
4030 datasetId : data . datasetId
4131 } ) ;
4232
43- const { fileMetadata, collectionMetadata, ...collectionData } = data ;
44- const collectionName = file . originalname ;
33+ const s3DatasetSource = getS3DatasetSource ( ) ;
4534
46- // 1. upload file
47- const fileId = await uploadFile ( {
48- teamId,
49- uid : tmbId ,
50- bucketName,
51- path : file . path ,
52- filename : file . originalname ,
53- contentType : file . mimetype ,
54- metadata : fileMetadata
35+ const fileId = await s3DatasetSource . uploadDatasetFileByBuffer ( {
36+ datasetId : String ( dataset . _id ) ,
37+ buffer,
38+ filename : originalname
5539 } ) ;
5640
57- // 2. delete tmp file
58- removeFilesByPaths ( filePaths ) ;
41+ await removeS3TTL ( { key : fileId , bucketName : 'private' } ) ;
42+
43+ const { fileMetadata, collectionMetadata, ...collectionData } = data ;
5944
6045 // 3. Create collection
6146 const { collectionId, insertResults } = await createCollectionAndInsertData ( {
6247 dataset,
6348 createCollectionParams : {
6449 ...collectionData ,
65- name : collectionName ,
50+ name : originalname ,
6651 teamId,
6752 tmbId,
6853 type : DatasetCollectionTypeEnum . file ,
@@ -76,8 +61,6 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>): CreateCo
7661
7762 return { collectionId, results : insertResults } ;
7863 } catch ( error ) {
79- removeFilesByPaths ( filePaths ) ;
80-
8164 return Promise . reject ( error ) ;
8265 }
8366}
0 commit comments