mirror of
https://github.com/wgh136/nysoure.git
synced 2025-09-27 12:17:24 +00:00
Calculate md5 checksum when finishing uploading.
This commit is contained in:
@@ -520,7 +520,7 @@ class Network {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async initFileUpload(filename: string, description: string, fileSize: number,
|
async initFileUpload(filename: string, description: string, fileSize: number,
|
||||||
resourceId: number, storageId: number, md5: string): Promise<Response<UploadingFile>> {
|
resourceId: number, storageId: number): Promise<Response<UploadingFile>> {
|
||||||
try {
|
try {
|
||||||
const response = await axios.post(`${this.apiBaseUrl}/files/upload/init`, {
|
const response = await axios.post(`${this.apiBaseUrl}/files/upload/init`, {
|
||||||
filename,
|
filename,
|
||||||
@@ -528,7 +528,6 @@ class Network {
|
|||||||
file_size: fileSize,
|
file_size: fileSize,
|
||||||
resource_id: resourceId,
|
resource_id: resourceId,
|
||||||
storage_id: storageId,
|
storage_id: storageId,
|
||||||
md5
|
|
||||||
});
|
});
|
||||||
return response.data;
|
return response.data;
|
||||||
} catch (e: any) {
|
} catch (e: any) {
|
||||||
@@ -561,9 +560,9 @@ class Network {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async finishFileUpload(fileId: number): Promise<Response<RFile>> {
|
async finishFileUpload(fileId: number, md5: string): Promise<Response<RFile>> {
|
||||||
try {
|
try {
|
||||||
const response = await axios.post(`${this.apiBaseUrl}/files/upload/finish/${fileId}`);
|
const response = await axios.post(`${this.apiBaseUrl}/files/upload/finish/${fileId}?md5=${md5}`);
|
||||||
return response.data;
|
return response.data;
|
||||||
} catch (e: any) {
|
} catch (e: any) {
|
||||||
console.error(e);
|
console.error(e);
|
||||||
|
@@ -58,6 +58,33 @@ export class UploadingTask extends Listenable {
|
|||||||
this.onFinished = onFinished;
|
this.onFinished = onFinished;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async calculateMd5(file: File): Promise<string> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const reader = new FileReader();
|
||||||
|
const spark = new SparkMD5.ArrayBuffer();
|
||||||
|
const chunkSize = 4 * 1024 * 1024;
|
||||||
|
let offset = 0;
|
||||||
|
reader.onload = (e) => {
|
||||||
|
spark.append(e.target!.result as ArrayBuffer);
|
||||||
|
offset += chunkSize;
|
||||||
|
if (offset < file.size) {
|
||||||
|
readSlice(offset);
|
||||||
|
} else {
|
||||||
|
resolve(spark.end());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
reader.onerror = (e) => {
|
||||||
|
reject(e);
|
||||||
|
};
|
||||||
|
const readSlice = (o: number) => {
|
||||||
|
const end = o + chunkSize >= file.size ? file.size : o + chunkSize;
|
||||||
|
const slice = file.slice(o, end);
|
||||||
|
reader.readAsArrayBuffer(slice);
|
||||||
|
};
|
||||||
|
readSlice(0);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
async upload() {
|
async upload() {
|
||||||
let index = 0;
|
let index = 0;
|
||||||
while (index < this.blocks.length) {
|
while (index < this.blocks.length) {
|
||||||
@@ -108,7 +135,17 @@ export class UploadingTask extends Listenable {
|
|||||||
if (this.status !== UploadingStatus.UPLOADING) {
|
if (this.status !== UploadingStatus.UPLOADING) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const res = await network.finishFileUpload(this.id);
|
let md5 = "";
|
||||||
|
try {
|
||||||
|
md5 = await this.calculateMd5(this.file);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
this.status = UploadingStatus.ERROR;
|
||||||
|
this.errorMessage = "Failed to calculate md5";
|
||||||
|
this.notifyListeners();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const res = await network.finishFileUpload(this.id, md5);
|
||||||
if (res.success) {
|
if (res.success) {
|
||||||
this.status = UploadingStatus.DONE;
|
this.status = UploadingStatus.DONE;
|
||||||
this.notifyListeners();
|
this.notifyListeners();
|
||||||
@@ -150,43 +187,12 @@ class UploadingManager extends Listenable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async addTask(file: File, resourceID: number, storageID: number, description: string, onFinished: () => void): Promise<Response<void>> {
|
async addTask(file: File, resourceID: number, storageID: number, description: string, onFinished: () => void): Promise<Response<void>> {
|
||||||
// Calculate hash of the file
|
|
||||||
async function calculateMd5(file: File): Promise<string> {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const reader = new FileReader();
|
|
||||||
const spark = new SparkMD5.ArrayBuffer();
|
|
||||||
const chunkSize = 4 * 1024 * 1024;
|
|
||||||
let offset = 0;
|
|
||||||
reader.onload = (e) => {
|
|
||||||
spark.append(e.target!.result as ArrayBuffer);
|
|
||||||
offset += chunkSize;
|
|
||||||
if (offset < file.size) {
|
|
||||||
readSlice(offset);
|
|
||||||
} else {
|
|
||||||
resolve(spark.end());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
reader.onerror = (e) => {
|
|
||||||
reject(e);
|
|
||||||
};
|
|
||||||
const readSlice = (o: number) => {
|
|
||||||
const end = o + chunkSize >= file.size ? file.size : o + chunkSize;
|
|
||||||
const slice = file.slice(o, end);
|
|
||||||
reader.readAsArrayBuffer(slice);
|
|
||||||
};
|
|
||||||
readSlice(0);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const md5 = await calculateMd5(file);
|
|
||||||
|
|
||||||
const res = await network.initFileUpload(
|
const res = await network.initFileUpload(
|
||||||
file.name,
|
file.name,
|
||||||
description,
|
description,
|
||||||
file.size,
|
file.size,
|
||||||
resourceID,
|
resourceID,
|
||||||
storageID,
|
storageID,
|
||||||
md5,
|
|
||||||
)
|
)
|
||||||
if (!res.success) {
|
if (!res.success) {
|
||||||
return {
|
return {
|
||||||
|
@@ -34,7 +34,6 @@ func initUpload(c fiber.Ctx) error {
|
|||||||
FileSize int64 `json:"file_size"`
|
FileSize int64 `json:"file_size"`
|
||||||
ResourceID uint `json:"resource_id"`
|
ResourceID uint `json:"resource_id"`
|
||||||
StorageID uint `json:"storage_id"`
|
StorageID uint `json:"storage_id"`
|
||||||
Md5 string `json:"md5"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var req InitUploadRequest
|
var req InitUploadRequest
|
||||||
@@ -42,7 +41,7 @@ func initUpload(c fiber.Ctx) error {
|
|||||||
return model.NewRequestError("Invalid request parameters")
|
return model.NewRequestError("Invalid request parameters")
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := service.CreateUploadingFile(uid, req.Filename, req.Description, req.FileSize, req.ResourceID, req.StorageID, req.Md5)
|
result, err := service.CreateUploadingFile(uid, req.Filename, req.Description, req.FileSize, req.ResourceID, req.StorageID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -86,7 +85,12 @@ func finishUpload(c fiber.Ctx) error {
|
|||||||
return model.NewRequestError("Invalid file ID")
|
return model.NewRequestError("Invalid file ID")
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := service.FinishUploadingFile(uid, uint(id))
|
md5 := c.Query("md5")
|
||||||
|
if md5 == "" {
|
||||||
|
return model.NewRequestError("MD5 checksum is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := service.FinishUploadingFile(uid, uint(id), md5)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@@ -10,7 +10,7 @@ import (
|
|||||||
"gorm.io/gorm/clause"
|
"gorm.io/gorm/clause"
|
||||||
)
|
)
|
||||||
|
|
||||||
func CreateUploadingFile(filename string, description string, fileSize int64, blockSize int64, tempPath string, resourceID, storageID, userID uint, md5 string) (*model.UploadingFile, error) {
|
func CreateUploadingFile(filename string, description string, fileSize int64, blockSize int64, tempPath string, resourceID, storageID, userID uint) (*model.UploadingFile, error) {
|
||||||
blocksCount := (fileSize + blockSize - 1) / blockSize
|
blocksCount := (fileSize + blockSize - 1) / blockSize
|
||||||
uf := &model.UploadingFile{
|
uf := &model.UploadingFile{
|
||||||
Filename: filename,
|
Filename: filename,
|
||||||
@@ -22,7 +22,6 @@ func CreateUploadingFile(filename string, description string, fileSize int64, bl
|
|||||||
TargetResourceID: resourceID,
|
TargetResourceID: resourceID,
|
||||||
TargetStorageID: storageID,
|
TargetStorageID: storageID,
|
||||||
UserID: userID,
|
UserID: userID,
|
||||||
Md5: md5,
|
|
||||||
}
|
}
|
||||||
if err := db.Create(uf).Error; err != nil {
|
if err := db.Create(uf).Error; err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@@ -40,7 +39,6 @@ func GetUploadingFile(id uint) (*model.UploadingFile, error) {
|
|||||||
|
|
||||||
func UpdateUploadingBlock(id uint, blockIndex int) error {
|
func UpdateUploadingBlock(id uint, blockIndex int) error {
|
||||||
return db.Transaction(func(tx *gorm.DB) error {
|
return db.Transaction(func(tx *gorm.DB) error {
|
||||||
// 使用 FOR UPDATE 锁获取记录
|
|
||||||
uf := &model.UploadingFile{}
|
uf := &model.UploadingFile{}
|
||||||
if err := tx.Clauses(clause.Locking{Strength: "UPDATE"}).Where("id = ?", id).First(uf).Error; err != nil {
|
if err := tx.Clauses(clause.Locking{Strength: "UPDATE"}).Where("id = ?", id).First(uf).Error; err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -52,7 +50,6 @@ func UpdateUploadingBlock(id uint, blockIndex int) error {
|
|||||||
|
|
||||||
uf.Blocks[blockIndex] = true
|
uf.Blocks[blockIndex] = true
|
||||||
|
|
||||||
// 在事务中立即保存更改
|
|
||||||
return tx.Save(uf).Error
|
return tx.Save(uf).Error
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@@ -21,7 +21,6 @@ type UploadingFile struct {
|
|||||||
TempPath string
|
TempPath string
|
||||||
Resource Resource `gorm:"foreignKey:TargetResourceID"`
|
Resource Resource `gorm:"foreignKey:TargetResourceID"`
|
||||||
Storage Storage `gorm:"foreignKey:TargetStorageID"`
|
Storage Storage `gorm:"foreignKey:TargetStorageID"`
|
||||||
Md5 string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (uf *UploadingFile) BlocksCount() int {
|
func (uf *UploadingFile) BlocksCount() int {
|
||||||
@@ -86,7 +85,6 @@ type UploadingFileView struct {
|
|||||||
BlocksCount int `json:"blocksCount"`
|
BlocksCount int `json:"blocksCount"`
|
||||||
StorageID uint `json:"storageId"`
|
StorageID uint `json:"storageId"`
|
||||||
ResourceID uint `json:"resourceId"`
|
ResourceID uint `json:"resourceId"`
|
||||||
Md5 string `json:"md5"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (uf *UploadingFile) ToView() *UploadingFileView {
|
func (uf *UploadingFile) ToView() *UploadingFileView {
|
||||||
@@ -99,6 +97,5 @@ func (uf *UploadingFile) ToView() *UploadingFileView {
|
|||||||
BlocksCount: uf.BlocksCount(),
|
BlocksCount: uf.BlocksCount(),
|
||||||
StorageID: uf.TargetStorageID,
|
StorageID: uf.TargetStorageID,
|
||||||
ResourceID: uf.TargetResourceID,
|
ResourceID: uf.TargetResourceID,
|
||||||
Md5: uf.Md5,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -84,13 +84,10 @@ func init() {
|
|||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
func CreateUploadingFile(uid uint, filename string, description string, fileSize int64, resourceID, storageID uint, md5Str string) (*model.UploadingFileView, error) {
|
func CreateUploadingFile(uid uint, filename string, description string, fileSize int64, resourceID, storageID uint) (*model.UploadingFileView, error) {
|
||||||
if filename == "" {
|
if filename == "" {
|
||||||
return nil, model.NewRequestError("filename is empty")
|
return nil, model.NewRequestError("filename is empty")
|
||||||
}
|
}
|
||||||
if md5Str == "" {
|
|
||||||
return nil, model.NewRequestError("md5 is empty")
|
|
||||||
}
|
|
||||||
if len([]rune(filename)) > 128 {
|
if len([]rune(filename)) > 128 {
|
||||||
return nil, model.NewRequestError("filename is too long")
|
return nil, model.NewRequestError("filename is too long")
|
||||||
}
|
}
|
||||||
@@ -118,7 +115,7 @@ func CreateUploadingFile(uid uint, filename string, description string, fileSize
|
|||||||
log.Error("failed to create temp dir: ", err)
|
log.Error("failed to create temp dir: ", err)
|
||||||
return nil, model.NewInternalServerError("failed to create temp dir")
|
return nil, model.NewInternalServerError("failed to create temp dir")
|
||||||
}
|
}
|
||||||
uploadingFile, err := dao.CreateUploadingFile(filename, description, fileSize, blockSize, tempPath, resourceID, storageID, uid, md5Str)
|
uploadingFile, err := dao.CreateUploadingFile(filename, description, fileSize, blockSize, tempPath, resourceID, storageID, uid)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("failed to create uploading file: ", err)
|
log.Error("failed to create uploading file: ", err)
|
||||||
_ = os.Remove(tempPath)
|
_ = os.Remove(tempPath)
|
||||||
@@ -165,7 +162,7 @@ func UploadBlock(uid uint, fid uint, index int, data []byte) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func FinishUploadingFile(uid uint, fid uint) (*model.FileView, error) {
|
func FinishUploadingFile(uid uint, fid uint, md5Str string) (*model.FileView, error) {
|
||||||
uploadingFile, err := dao.GetUploadingFile(fid)
|
uploadingFile, err := dao.GetUploadingFile(fid)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("failed to get uploading file: ", err)
|
log.Error("failed to get uploading file: ", err)
|
||||||
@@ -234,7 +231,7 @@ func FinishUploadingFile(uid uint, fid uint) (*model.FileView, error) {
|
|||||||
|
|
||||||
sum := h.Sum(nil)
|
sum := h.Sum(nil)
|
||||||
sumStr := hex.EncodeToString(sum)
|
sumStr := hex.EncodeToString(sum)
|
||||||
if sumStr != uploadingFile.Md5 {
|
if sumStr != md5Str {
|
||||||
_ = os.Remove(resultFilePath)
|
_ = os.Remove(resultFilePath)
|
||||||
return nil, model.NewRequestError("md5 checksum is not correct")
|
return nil, model.NewRequestError("md5 checksum is not correct")
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user