feat: 上传资源和下载资源更新
This commit is contained in:
178
src/routes-simple/resources/chunk.ts
Normal file
178
src/routes-simple/resources/chunk.ts
Normal file
@@ -0,0 +1,178 @@
|
||||
import { useFileStore } from '@kevisual/use-config/file-store';
|
||||
import { checkAuth, error, router, writeEvents, getKey, getTaskId } from '../router.ts';
|
||||
import { IncomingForm } from 'formidable';
|
||||
import { app, minioClient } from '@/app.ts';
|
||||
|
||||
import { bucketName } from '@/modules/minio.ts';
|
||||
import { getContentType } from '@/utils/get-content-type.ts';
|
||||
import { User } from '@/models/user.ts';
|
||||
import fs from 'fs';
|
||||
import { ConfigModel } from '@/routes/config/models/model.ts';
|
||||
import { validateDirectory } from './util.ts';
|
||||
|
||||
const cacheFilePath = useFileStore('cache-file', { needExists: true });
|
||||
|
||||
router.get('/api/s1/resources/upload/chunk', async (req, res) => {
|
||||
res.writeHead(200, { 'Content-Type': 'text/plain' });
|
||||
res.end('Upload API is ready');
|
||||
});
|
||||
|
||||
// /api/s1/resources/upload
|
||||
router.post('/api/s1/resources/upload/chunk', async (req, res) => {
|
||||
const { tokenUser, token } = await checkAuth(req, res);
|
||||
if (!tokenUser) return;
|
||||
const url = new URL(req.url || '', 'http://localhost');
|
||||
const share = !!url.searchParams.get('public');
|
||||
// 使用 formidable 解析 multipart/form-data
|
||||
const form = new IncomingForm({
|
||||
multiples: false, // 改为单文件上传
|
||||
uploadDir: cacheFilePath, // 上传文件存储目录
|
||||
allowEmptyFiles: true, // 允许空
|
||||
minFileSize: 0, // 最小文件大小
|
||||
createDirsFromUploads: false, // 根据上传的文件夹结构创建目录
|
||||
keepExtensions: true, // 保留文件拓展名
|
||||
hashAlgorithm: 'md5', // 文件哈希算法
|
||||
});
|
||||
const taskId = getTaskId(req);
|
||||
const finalFilePath = `${cacheFilePath}/${taskId}`;
|
||||
if (!taskId) {
|
||||
res.end(error('taskId is required'));
|
||||
return;
|
||||
}
|
||||
// 解析上传的文件
|
||||
form.parse(req, async (err, fields, files) => {
|
||||
const file = Array.isArray(files.file) ? files.file[0] : files.file;
|
||||
const clearFiles = () => {
|
||||
if (file) {
|
||||
fs.unlinkSync(file.filepath);
|
||||
fs.unlinkSync(finalFilePath);
|
||||
}
|
||||
};
|
||||
|
||||
if (err) {
|
||||
res.end(error(`Upload error: ${err.message}`));
|
||||
clearFiles();
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle chunked upload logic here
|
||||
let { chunkIndex, totalChunks, appKey, version, username, directory } = getKey(fields, [
|
||||
'chunkIndex',
|
||||
'totalChunks',
|
||||
'appKey',
|
||||
'version',
|
||||
'username',
|
||||
'directory',
|
||||
]);
|
||||
if (!chunkIndex || !totalChunks) {
|
||||
res.end(error('chunkIndex, totalChunks is required'));
|
||||
clearFiles();
|
||||
return;
|
||||
}
|
||||
const tempPath = file.filepath;
|
||||
const relativePath = file.originalFilename;
|
||||
// Append chunk to the final file
|
||||
|
||||
const writeStream = fs.createWriteStream(finalFilePath, { flags: 'a' });
|
||||
const readStream = fs.createReadStream(tempPath);
|
||||
readStream.pipe(writeStream);
|
||||
|
||||
writeStream.on('finish', async () => {
|
||||
fs.unlinkSync(tempPath); // 删除临时文件
|
||||
|
||||
// Write event for progress tracking
|
||||
const progress = ((parseInt(chunkIndex) + 1) / parseInt(totalChunks)) * 100;
|
||||
writeEvents(req, {
|
||||
progress,
|
||||
message: `Upload progress: ${progress}%`,
|
||||
});
|
||||
|
||||
if (parseInt(chunkIndex) + 1 === parseInt(totalChunks)) {
|
||||
let uid = tokenUser.id;
|
||||
if (username) {
|
||||
const user = await User.getUserByToken(token);
|
||||
const has = await user.hasUser(username, true);
|
||||
if (!has) {
|
||||
res.end(error('username is not found'));
|
||||
clearFiles();
|
||||
return;
|
||||
}
|
||||
const _user = await User.findOne({ where: { username } });
|
||||
uid = _user?.id || '';
|
||||
}
|
||||
if (!appKey || !version) {
|
||||
const config = await ConfigModel.getUploadConfig({ uid });
|
||||
if (config) {
|
||||
appKey = config.config?.data?.key || '';
|
||||
version = config.config?.data?.version || '';
|
||||
}
|
||||
}
|
||||
if (!appKey || !version) {
|
||||
res.end(error('appKey or version is not found, please check the upload config.'));
|
||||
clearFiles();
|
||||
return;
|
||||
}
|
||||
const { code, message } = validateDirectory(directory);
|
||||
if (code !== 200) {
|
||||
res.end(error(message));
|
||||
clearFiles();
|
||||
return;
|
||||
}
|
||||
const minioPath = `${username || tokenUser.username}/${appKey}/${version}${directory ? `/${directory}` : ''}/${relativePath}`;
|
||||
const metadata: any = {};
|
||||
if (share) {
|
||||
metadata.share = 'public';
|
||||
}
|
||||
// All chunks uploaded, now upload to MinIO
|
||||
await minioClient.fPutObject(bucketName, minioPath, finalFilePath, {
|
||||
'Content-Type': getContentType(relativePath),
|
||||
'app-source': 'user-app',
|
||||
'Cache-Control': relativePath.endsWith('.html') ? 'no-cache' : 'max-age=31536000, immutable',
|
||||
...metadata,
|
||||
});
|
||||
|
||||
// Clean up the final file
|
||||
fs.unlinkSync(finalFilePath);
|
||||
|
||||
// Notify the app
|
||||
const r = await app.call({
|
||||
path: 'app',
|
||||
key: 'detect-version-list',
|
||||
payload: {
|
||||
token: token,
|
||||
data: {
|
||||
appKey,
|
||||
version,
|
||||
username,
|
||||
},
|
||||
},
|
||||
});
|
||||
const downloadBase = '/api/s1/share';
|
||||
const data: any = {
|
||||
code: r.code,
|
||||
data: {
|
||||
app: r.body,
|
||||
resource: `${downloadBase}/${minioPath}`,
|
||||
},
|
||||
};
|
||||
if (r.message) {
|
||||
data.message = r.message;
|
||||
}
|
||||
console.log('upload data', data);
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(data));
|
||||
} else {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(
|
||||
JSON.stringify({
|
||||
message: 'Chunk uploaded successfully',
|
||||
data: {
|
||||
chunkIndex,
|
||||
totalChunks,
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user