refactor file upload process to use MD5 instead of SHA-1 for hash calculation and update related API and model structures

This commit is contained in:
nyne
2025-05-16 16:55:45 +08:00
parent 313e696248
commit 7bfd33c714
7 changed files with 61 additions and 139 deletions

View File

@@ -8,7 +8,6 @@
"name": "frontend",
"version": "0.0.0",
"dependencies": {
"@aws-crypto/sha1-browser": "^5.2.0",
"@marsidev/react-turnstile": "^1.1.0",
"@tailwindcss/vite": "^4.1.5",
"axios": "^1.9.0",
@@ -21,12 +20,14 @@
"react-icons": "^5.5.0",
"react-markdown": "^10.1.0",
"react-router": "^7.5.3",
"spark-md5": "^3.0.2",
"tailwindcss": "^4.1.5"
},
"devDependencies": {
"@eslint/js": "^9.22.0",
"@types/react": "^19.0.10",
"@types/react-dom": "^19.0.4",
"@types/spark-md5": "^3.0.5",
"@vitejs/plugin-react": "^4.3.4",
"daisyui": "^5.0.35",
"eslint": "^9.22.0",
@@ -52,60 +53,6 @@
"node": ">=6.0.0"
}
},
"node_modules/@aws-crypto/sha1-browser": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/sha1-browser/-/sha1-browser-5.2.0.tgz",
"integrity": "sha512-OH6lveCFfcDjX4dbAvCFSYUjJZjDr/3XJ3xHtjn3Oj5b9RjojQo8npoLeA/bNwkOkrSQ0wgrHzXk4tDRxGKJeg==",
"dependencies": {
"@aws-crypto/supports-web-crypto": "^5.2.0",
"@aws-crypto/util": "^5.2.0",
"@aws-sdk/types": "^3.222.0",
"@aws-sdk/util-locate-window": "^3.0.0",
"@smithy/util-utf8": "^2.0.0",
"tslib": "^2.6.2"
}
},
"node_modules/@aws-crypto/supports-web-crypto": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz",
"integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==",
"dependencies": {
"tslib": "^2.6.2"
}
},
"node_modules/@aws-crypto/util": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz",
"integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==",
"dependencies": {
"@aws-sdk/types": "^3.222.0",
"@smithy/util-utf8": "^2.0.0",
"tslib": "^2.6.2"
}
},
"node_modules/@aws-sdk/types": {
"version": "3.804.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.804.0.tgz",
"integrity": "sha512-A9qnsy9zQ8G89vrPPlNG9d1d8QcKRGqJKqwyGgS0dclJpwy6d1EWgQLIolKPl6vcFpLoe6avLOLxr+h8ur5wpg==",
"dependencies": {
"@smithy/types": "^4.2.0",
"tslib": "^2.6.2"
},
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/@aws-sdk/util-locate-window": {
"version": "3.804.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.804.0.tgz",
"integrity": "sha512-zVoRfpmBVPodYlnMjgVjfGoEZagyRF5IPn3Uo6ZvOZp24chnW/FRstH7ESDHDDRga4z3V+ElUQHKpFDXWyBW5A==",
"dependencies": {
"tslib": "^2.6.2"
},
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/@babel/code-frame": {
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
@@ -1502,52 +1449,6 @@
"win32"
]
},
"node_modules/@smithy/is-array-buffer": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz",
"integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==",
"dependencies": {
"tslib": "^2.6.2"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/@smithy/types": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.2.0.tgz",
"integrity": "sha512-7eMk09zQKCO+E/ivsjQv+fDlOupcFUCSC/L2YUPgwhvowVGWbPQHjEFcmjt7QQ4ra5lyowS92SV53Zc6XD4+fg==",
"dependencies": {
"tslib": "^2.6.2"
},
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/@smithy/util-buffer-from": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz",
"integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==",
"dependencies": {
"@smithy/is-array-buffer": "^2.2.0",
"tslib": "^2.6.2"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/@smithy/util-utf8": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz",
"integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==",
"dependencies": {
"@smithy/util-buffer-from": "^2.2.0",
"tslib": "^2.6.2"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/@tailwindcss/node": {
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.5.tgz",
@@ -1921,6 +1822,12 @@
"@types/react": "^19.0.0"
}
},
"node_modules/@types/spark-md5": {
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/@types/spark-md5/-/spark-md5-3.0.5.tgz",
"integrity": "sha512-lWf05dnD42DLVKQJZrDHtWFidcLrHuip01CtnC2/S6AMhX4t9ZlEUj4iuRlAnts0PQk7KESOqKxeGE/b6sIPGg==",
"dev": true
},
"node_modules/@types/unist": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
@@ -5816,6 +5723,11 @@
"url": "https://github.com/sponsors/wooorm"
}
},
"node_modules/spark-md5": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/spark-md5/-/spark-md5-3.0.2.tgz",
"integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw=="
},
"node_modules/statuses": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
@@ -6007,7 +5919,8 @@
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
"license": "0BSD"
"license": "0BSD",
"optional": true
},
"node_modules/turbo-stream": {
"version": "2.4.0",

View File

@@ -10,7 +10,6 @@
"preview": "vite preview"
},
"dependencies": {
"@aws-crypto/sha1-browser": "^5.2.0",
"@marsidev/react-turnstile": "^1.1.0",
"@tailwindcss/vite": "^4.1.5",
"axios": "^1.9.0",
@@ -23,12 +22,14 @@
"react-icons": "^5.5.0",
"react-markdown": "^10.1.0",
"react-router": "^7.5.3",
"spark-md5": "^3.0.2",
"tailwindcss": "^4.1.5"
},
"devDependencies": {
"@eslint/js": "^9.22.0",
"@types/react": "^19.0.10",
"@types/react-dom": "^19.0.4",
"@types/spark-md5": "^3.0.5",
"@vitejs/plugin-react": "^4.3.4",
"daisyui": "^5.0.35",
"eslint": "^9.22.0",

View File

@@ -520,7 +520,7 @@ class Network {
}
async initFileUpload(filename: string, description: string, fileSize: number,
resourceId: number, storageId: number, sha1: string): Promise<Response<UploadingFile>> {
resourceId: number, storageId: number, md5: string): Promise<Response<UploadingFile>> {
try {
const response = await axios.post(`${this.apiBaseUrl}/files/upload/init`, {
filename,
@@ -528,7 +528,7 @@ class Network {
file_size: fileSize,
resource_id: resourceId,
storage_id: storageId,
sha1
md5
});
return response.data;
} catch (e: any) {

View File

@@ -1,4 +1,4 @@
import { Sha1 } from "@aws-crypto/sha1-browser";
import SparkMD5 from "spark-md5";
import { Response } from "./models.ts";
import { network } from "./network.ts";
@@ -150,27 +150,35 @@ class UploadingManager extends Listenable {
}
async addTask(file: File, resourceID: number, storageID: number, description: string, onFinished: () => void): Promise<Response<void>> {
// Calculate SHA-1 hash of the file
async function calculateSHA1(file: File): Promise<string> {
const hash = new Sha1();
// Calculate hash of the file
async function calculateMd5(file: File): Promise<string> {
return new Promise((resolve, reject) => {
const reader = new FileReader();
const spark = new SparkMD5.ArrayBuffer();
const chunkSize = 4 * 1024 * 1024;
const totalChunks = Math.ceil(file.size / chunkSize);
for (let i = 0; i < totalChunks; i++) {
const start = i * chunkSize;
const end = Math.min(start + chunkSize, file.size);
const chunk = file.slice(start, end);
const arrayBuffer = await chunk.arrayBuffer();
hash.update(arrayBuffer);
let offset = 0;
reader.onload = (e) => {
spark.append(e.target!.result as ArrayBuffer);
offset += chunkSize;
if (offset < file.size) {
readSlice(offset);
} else {
resolve(spark.end());
}
const hashBuffer = await hash.digest();
const hashArray = new Uint8Array(hashBuffer);
const hashHex = Array.from(hashArray)
.map(byte => byte.toString(16).padStart(2, "0"))
.join("");
return hashHex;
};
reader.onerror = (e) => {
reject(e);
};
const readSlice = (o: number) => {
const end = o + chunkSize >= file.size ? file.size : o + chunkSize;
const slice = file.slice(o, end);
reader.readAsArrayBuffer(slice);
};
readSlice(0);
});
}
const sha1 = await calculateSHA1(file);
const md5 = await calculateMd5(file);
const res = await network.initFileUpload(
file.name,
@@ -178,7 +186,7 @@ class UploadingManager extends Listenable {
file.size,
resourceID,
storageID,
sha1,
md5,
)
if (!res.success) {
return {

View File

@@ -34,7 +34,7 @@ func initUpload(c fiber.Ctx) error {
FileSize int64 `json:"file_size"`
ResourceID uint `json:"resource_id"`
StorageID uint `json:"storage_id"`
Sha1 string `json:"sha1"`
Md5 string `json:"md5"`
}
var req InitUploadRequest
@@ -42,7 +42,7 @@ func initUpload(c fiber.Ctx) error {
return model.NewRequestError("Invalid request parameters")
}
result, err := service.CreateUploadingFile(uid, req.Filename, req.Description, req.FileSize, req.ResourceID, req.StorageID, req.Sha1)
result, err := service.CreateUploadingFile(uid, req.Filename, req.Description, req.FileSize, req.ResourceID, req.StorageID, req.Md5)
if err != nil {
return err
}

View File

@@ -21,7 +21,7 @@ type UploadingFile struct {
TempPath string
Resource Resource `gorm:"foreignKey:TargetResourceID"`
Storage Storage `gorm:"foreignKey:TargetStorageID"`
Sha1 string
Md5 string
}
func (uf *UploadingFile) BlocksCount() int {
@@ -86,7 +86,7 @@ type UploadingFileView struct {
BlocksCount int `json:"blocksCount"`
StorageID uint `json:"storageId"`
ResourceID uint `json:"resourceId"`
Sha1 string `json:"sha1"`
Md5 string `json:"md5"`
}
func (uf *UploadingFile) ToView() *UploadingFileView {
@@ -99,6 +99,6 @@ func (uf *UploadingFile) ToView() *UploadingFileView {
BlocksCount: uf.BlocksCount(),
StorageID: uf.TargetStorageID,
ResourceID: uf.TargetResourceID,
Sha1: uf.Sha1,
Md5: uf.Md5,
}
}

View File

@@ -1,7 +1,7 @@
package service
import (
"crypto/sha1"
"crypto/md5"
"encoding/hex"
"nysoure/server/config"
"nysoure/server/dao"
@@ -84,12 +84,12 @@ func init() {
}()
}
func CreateUploadingFile(uid uint, filename string, description string, fileSize int64, resourceID, storageID uint, sha1Str string) (*model.UploadingFileView, error) {
func CreateUploadingFile(uid uint, filename string, description string, fileSize int64, resourceID, storageID uint, md5Str string) (*model.UploadingFileView, error) {
if filename == "" {
return nil, model.NewRequestError("filename is empty")
}
if sha1Str == "" {
return nil, model.NewRequestError("sha1 is empty")
if md5Str == "" {
return nil, model.NewRequestError("md5 is empty")
}
if len([]rune(filename)) > 128 {
return nil, model.NewRequestError("filename is too long")
@@ -118,7 +118,7 @@ func CreateUploadingFile(uid uint, filename string, description string, fileSize
log.Error("failed to create temp dir: ", err)
return nil, model.NewInternalServerError("failed to create temp dir")
}
uploadingFile, err := dao.CreateUploadingFile(filename, description, fileSize, blockSize, tempPath, resourceID, storageID, uid, sha1Str)
uploadingFile, err := dao.CreateUploadingFile(filename, description, fileSize, blockSize, tempPath, resourceID, storageID, uid, md5Str)
if err != nil {
log.Error("failed to create uploading file: ", err)
_ = os.Remove(tempPath)
@@ -202,7 +202,7 @@ func FinishUploadingFile(uid uint, fid uint) (*model.FileView, error) {
return nil, model.NewInternalServerError("failed to finish uploading file. please re-upload")
}
h := sha1.New()
h := md5.New()
for i := 0; i < uploadingFile.BlocksCount(); i++ {
blockPath := filepath.Join(uploadingFile.TempPath, strconv.Itoa(i))
@@ -234,9 +234,9 @@ func FinishUploadingFile(uid uint, fid uint) (*model.FileView, error) {
sum := h.Sum(nil)
sumStr := hex.EncodeToString(sum)
if sumStr != uploadingFile.Sha1 {
if sumStr != uploadingFile.Md5 {
_ = os.Remove(resultFilePath)
return nil, model.NewRequestError("sha1 checksum is not correct")
return nil, model.NewRequestError("md5 checksum is not correct")
}
s, err := dao.GetStorage(uploadingFile.TargetStorageID)