diff --git a/frontend/src/network/network.ts b/frontend/src/network/network.ts index 37ea139..8045a89 100644 --- a/frontend/src/network/network.ts +++ b/frontend/src/network/network.ts @@ -520,7 +520,7 @@ class Network { } async initFileUpload(filename: string, description: string, fileSize: number, - resourceId: number, storageId: number, md5: string): Promise> { + resourceId: number, storageId: number): Promise> { try { const response = await axios.post(`${this.apiBaseUrl}/files/upload/init`, { filename, @@ -528,7 +528,6 @@ class Network { file_size: fileSize, resource_id: resourceId, storage_id: storageId, - md5 }); return response.data; } catch (e: any) { @@ -561,9 +560,9 @@ class Network { } } - async finishFileUpload(fileId: number): Promise> { + async finishFileUpload(fileId: number, md5: string): Promise> { try { - const response = await axios.post(`${this.apiBaseUrl}/files/upload/finish/${fileId}`); + const response = await axios.post(`${this.apiBaseUrl}/files/upload/finish/${fileId}?md5=${md5}`); return response.data; } catch (e: any) { console.error(e); diff --git a/frontend/src/network/uploading.ts b/frontend/src/network/uploading.ts index f2817a1..cebeb75 100644 --- a/frontend/src/network/uploading.ts +++ b/frontend/src/network/uploading.ts @@ -58,6 +58,33 @@ export class UploadingTask extends Listenable { this.onFinished = onFinished; } + async calculateMd5(file: File): Promise { + return new Promise((resolve, reject) => { + const reader = new FileReader(); + const spark = new SparkMD5.ArrayBuffer(); + const chunkSize = 4 * 1024 * 1024; + let offset = 0; + reader.onload = (e) => { + spark.append(e.target!.result as ArrayBuffer); + offset += chunkSize; + if (offset < file.size) { + readSlice(offset); + } else { + resolve(spark.end()); + } + }; + reader.onerror = (e) => { + reject(e); + }; + const readSlice = (o: number) => { + const end = o + chunkSize >= file.size ? file.size : o + chunkSize; + const slice = file.slice(o, end); + reader.readAsArrayBuffer(slice); + }; + readSlice(0); + }); + } + async upload() { let index = 0; while (index < this.blocks.length) { @@ -108,7 +135,17 @@ export class UploadingTask extends Listenable { if (this.status !== UploadingStatus.UPLOADING) { return; } - const res = await network.finishFileUpload(this.id); + let md5 = ""; + try { + md5 = await this.calculateMd5(this.file); + } + catch (e) { + this.status = UploadingStatus.ERROR; + this.errorMessage = "Failed to calculate md5"; + this.notifyListeners(); + return; + } + const res = await network.finishFileUpload(this.id, md5); if (res.success) { this.status = UploadingStatus.DONE; this.notifyListeners(); @@ -150,43 +187,12 @@ class UploadingManager extends Listenable { } async addTask(file: File, resourceID: number, storageID: number, description: string, onFinished: () => void): Promise> { - // Calculate hash of the file - async function calculateMd5(file: File): Promise { - return new Promise((resolve, reject) => { - const reader = new FileReader(); - const spark = new SparkMD5.ArrayBuffer(); - const chunkSize = 4 * 1024 * 1024; - let offset = 0; - reader.onload = (e) => { - spark.append(e.target!.result as ArrayBuffer); - offset += chunkSize; - if (offset < file.size) { - readSlice(offset); - } else { - resolve(spark.end()); - } - }; - reader.onerror = (e) => { - reject(e); - }; - const readSlice = (o: number) => { - const end = o + chunkSize >= file.size ? file.size : o + chunkSize; - const slice = file.slice(o, end); - reader.readAsArrayBuffer(slice); - }; - readSlice(0); - }); - } - - const md5 = await calculateMd5(file); - const res = await network.initFileUpload( file.name, description, file.size, resourceID, storageID, - md5, ) if (!res.success) { return { diff --git a/server/api/file.go b/server/api/file.go index 96464a4..a4d5194 100644 --- a/server/api/file.go +++ b/server/api/file.go @@ -34,7 +34,6 @@ func initUpload(c fiber.Ctx) error { FileSize int64 `json:"file_size"` ResourceID uint `json:"resource_id"` StorageID uint `json:"storage_id"` - Md5 string `json:"md5"` } var req InitUploadRequest @@ -42,7 +41,7 @@ func initUpload(c fiber.Ctx) error { return model.NewRequestError("Invalid request parameters") } - result, err := service.CreateUploadingFile(uid, req.Filename, req.Description, req.FileSize, req.ResourceID, req.StorageID, req.Md5) + result, err := service.CreateUploadingFile(uid, req.Filename, req.Description, req.FileSize, req.ResourceID, req.StorageID) if err != nil { return err } @@ -86,7 +85,12 @@ func finishUpload(c fiber.Ctx) error { return model.NewRequestError("Invalid file ID") } - result, err := service.FinishUploadingFile(uid, uint(id)) + md5 := c.Query("md5") + if md5 == "" { + return model.NewRequestError("MD5 checksum is required") + } + + result, err := service.FinishUploadingFile(uid, uint(id), md5) if err != nil { return err } diff --git a/server/dao/file.go b/server/dao/file.go index 471c8c0..8b0a6cf 100644 --- a/server/dao/file.go +++ b/server/dao/file.go @@ -10,7 +10,7 @@ import ( "gorm.io/gorm/clause" ) -func CreateUploadingFile(filename string, description string, fileSize int64, blockSize int64, tempPath string, resourceID, storageID, userID uint, md5 string) (*model.UploadingFile, error) { +func CreateUploadingFile(filename string, description string, fileSize int64, blockSize int64, tempPath string, resourceID, storageID, userID uint) (*model.UploadingFile, error) { blocksCount := (fileSize + blockSize - 1) / blockSize uf := &model.UploadingFile{ Filename: filename, @@ -22,7 +22,6 @@ func CreateUploadingFile(filename string, description string, fileSize int64, bl TargetResourceID: resourceID, TargetStorageID: storageID, UserID: userID, - Md5: md5, } if err := db.Create(uf).Error; err != nil { return nil, err @@ -40,7 +39,6 @@ func GetUploadingFile(id uint) (*model.UploadingFile, error) { func UpdateUploadingBlock(id uint, blockIndex int) error { return db.Transaction(func(tx *gorm.DB) error { - // 使用 FOR UPDATE 锁获取记录 uf := &model.UploadingFile{} if err := tx.Clauses(clause.Locking{Strength: "UPDATE"}).Where("id = ?", id).First(uf).Error; err != nil { return err @@ -52,7 +50,6 @@ func UpdateUploadingBlock(id uint, blockIndex int) error { uf.Blocks[blockIndex] = true - // 在事务中立即保存更改 return tx.Save(uf).Error }) } diff --git a/server/model/uploading_file.go b/server/model/uploading_file.go index 9791e72..b68db7b 100644 --- a/server/model/uploading_file.go +++ b/server/model/uploading_file.go @@ -21,7 +21,6 @@ type UploadingFile struct { TempPath string Resource Resource `gorm:"foreignKey:TargetResourceID"` Storage Storage `gorm:"foreignKey:TargetStorageID"` - Md5 string } func (uf *UploadingFile) BlocksCount() int { @@ -86,7 +85,6 @@ type UploadingFileView struct { BlocksCount int `json:"blocksCount"` StorageID uint `json:"storageId"` ResourceID uint `json:"resourceId"` - Md5 string `json:"md5"` } func (uf *UploadingFile) ToView() *UploadingFileView { @@ -99,6 +97,5 @@ func (uf *UploadingFile) ToView() *UploadingFileView { BlocksCount: uf.BlocksCount(), StorageID: uf.TargetStorageID, ResourceID: uf.TargetResourceID, - Md5: uf.Md5, } } diff --git a/server/service/file.go b/server/service/file.go index afc5923..287ec22 100644 --- a/server/service/file.go +++ b/server/service/file.go @@ -84,13 +84,10 @@ func init() { }() } -func CreateUploadingFile(uid uint, filename string, description string, fileSize int64, resourceID, storageID uint, md5Str string) (*model.UploadingFileView, error) { +func CreateUploadingFile(uid uint, filename string, description string, fileSize int64, resourceID, storageID uint) (*model.UploadingFileView, error) { if filename == "" { return nil, model.NewRequestError("filename is empty") } - if md5Str == "" { - return nil, model.NewRequestError("md5 is empty") - } if len([]rune(filename)) > 128 { return nil, model.NewRequestError("filename is too long") } @@ -118,7 +115,7 @@ func CreateUploadingFile(uid uint, filename string, description string, fileSize log.Error("failed to create temp dir: ", err) return nil, model.NewInternalServerError("failed to create temp dir") } - uploadingFile, err := dao.CreateUploadingFile(filename, description, fileSize, blockSize, tempPath, resourceID, storageID, uid, md5Str) + uploadingFile, err := dao.CreateUploadingFile(filename, description, fileSize, blockSize, tempPath, resourceID, storageID, uid) if err != nil { log.Error("failed to create uploading file: ", err) _ = os.Remove(tempPath) @@ -165,7 +162,7 @@ func UploadBlock(uid uint, fid uint, index int, data []byte) error { return nil } -func FinishUploadingFile(uid uint, fid uint) (*model.FileView, error) { +func FinishUploadingFile(uid uint, fid uint, md5Str string) (*model.FileView, error) { uploadingFile, err := dao.GetUploadingFile(fid) if err != nil { log.Error("failed to get uploading file: ", err) @@ -234,7 +231,7 @@ func FinishUploadingFile(uid uint, fid uint) (*model.FileView, error) { sum := h.Sum(nil) sumStr := hex.EncodeToString(sum) - if sumStr != uploadingFile.Md5 { + if sumStr != md5Str { _ = os.Remove(resultFilePath) return nil, model.NewRequestError("md5 checksum is not correct") }