2017-11-16 14:49:58 +01:00
|
|
|
import { Buffer } from 'buffer';
|
|
|
|
import * as fs from 'fs';
|
|
|
|
import * as stream from 'stream';
|
|
|
|
|
2016-12-28 23:49:51 +01:00
|
|
|
import * as mongodb from 'mongodb';
|
|
|
|
import * as crypto from 'crypto';
|
2017-12-09 14:35:26 +01:00
|
|
|
import * as _gm from 'gm';
|
2017-02-06 13:11:09 +01:00
|
|
|
import * as debug from 'debug';
|
2017-01-02 22:03:19 +01:00
|
|
|
import fileType = require('file-type');
|
2018-06-17 13:04:19 +02:00
|
|
|
const prominence = require('prominence');
|
2017-11-16 14:49:58 +01:00
|
|
|
|
2018-06-15 02:53:30 +02:00
|
|
|
import DriveFile, { IMetadata, getDriveFileBucket, IDriveFile } from '../../models/drive-file';
|
2018-04-04 20:21:11 +02:00
|
|
|
import DriveFolder from '../../models/drive-folder';
|
|
|
|
import { pack } from '../../models/drive-file';
|
2018-07-07 12:19:00 +02:00
|
|
|
import event, { publishDriveStream } from '../../stream';
|
2018-05-26 02:57:31 +02:00
|
|
|
import { isLocalUser, IUser, IRemoteUser } from '../../models/user';
|
2018-06-15 02:53:30 +02:00
|
|
|
import { getDriveFileThumbnailBucket } from '../../models/drive-file-thumbnail';
|
2018-05-03 13:03:14 +02:00
|
|
|
import genThumbnail from '../../drive/gen-thumbnail';
|
2018-06-15 02:53:30 +02:00
|
|
|
import delFile from './delete-file';
|
2016-12-28 23:49:51 +01:00
|
|
|
|
2017-12-09 14:35:26 +01:00
|
|
|
const gm = _gm.subClass({
|
|
|
|
imageMagick: true
|
|
|
|
});
|
|
|
|
|
2018-03-27 09:51:12 +02:00
|
|
|
const log = debug('misskey:drive:add-file');
|
2017-02-06 13:11:09 +01:00
|
|
|
|
2018-05-03 13:03:14 +02:00
|
|
|
const writeChunks = (name: string, readable: stream.Readable, type: string, metadata: any) =>
|
|
|
|
getDriveFileBucket()
|
2017-11-13 19:46:30 +01:00
|
|
|
.then(bucket => new Promise((resolve, reject) => {
|
|
|
|
const writeStream = bucket.openUploadStream(name, { contentType: type, metadata });
|
2018-04-09 19:34:47 +02:00
|
|
|
writeStream.once('finish', resolve);
|
2017-11-13 19:46:30 +01:00
|
|
|
writeStream.on('error', reject);
|
|
|
|
readable.pipe(writeStream);
|
2017-11-13 19:47:42 +01:00
|
|
|
}));
|
2017-11-06 06:37:00 +01:00
|
|
|
|
2018-06-17 13:04:19 +02:00
|
|
|
const writeThumbnailChunks = (name: string, readable: stream.Readable, originalId: mongodb.ObjectID) =>
|
2018-05-03 13:03:14 +02:00
|
|
|
getDriveFileThumbnailBucket()
|
|
|
|
.then(bucket => new Promise((resolve, reject) => {
|
|
|
|
const writeStream = bucket.openUploadStream(name, {
|
|
|
|
contentType: 'image/jpeg',
|
|
|
|
metadata: {
|
|
|
|
originalId
|
|
|
|
}
|
|
|
|
});
|
|
|
|
writeStream.once('finish', resolve);
|
|
|
|
writeStream.on('error', reject);
|
|
|
|
readable.pipe(writeStream);
|
|
|
|
}));
|
|
|
|
|
2018-05-25 11:41:49 +02:00
|
|
|
async function deleteOldFile(user: IRemoteUser) {
|
|
|
|
const oldFile = await DriveFile.findOne({
|
|
|
|
_id: {
|
|
|
|
$nin: [user.avatarId, user.bannerId]
|
|
|
|
}
|
|
|
|
}, {
|
|
|
|
sort: {
|
|
|
|
_id: 1
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
if (oldFile) {
|
2018-06-15 02:53:30 +02:00
|
|
|
delFile(oldFile, true);
|
2018-05-25 11:41:49 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Add file to drive
|
|
|
|
*
|
|
|
|
* @param user User who wish to add file
|
|
|
|
* @param path File path
|
|
|
|
* @param name Name
|
|
|
|
* @param comment Comment
|
|
|
|
* @param folderId Folder ID
|
|
|
|
* @param force If set to true, forcibly upload the file even if there is a file with the same hash.
|
|
|
|
* @return Created drive file
|
|
|
|
*/
|
|
|
|
export default async function(
|
2018-05-26 02:57:31 +02:00
|
|
|
user: IUser,
|
2017-11-13 21:10:28 +01:00
|
|
|
path: string,
|
2016-12-28 23:49:51 +01:00
|
|
|
name: string = null,
|
|
|
|
comment: string = null,
|
|
|
|
folderId: mongodb.ObjectID = null,
|
2018-04-03 16:45:13 +02:00
|
|
|
force: boolean = false,
|
2018-05-25 13:19:14 +02:00
|
|
|
metaOnly: boolean = false,
|
2018-05-04 11:32:03 +02:00
|
|
|
url: string = null,
|
2018-04-03 16:45:13 +02:00
|
|
|
uri: string = null
|
2018-05-25 11:41:49 +02:00
|
|
|
): Promise<IDriveFile> {
|
|
|
|
// Calc md5 hash
|
|
|
|
const calcHash = new Promise<string>((res, rej) => {
|
|
|
|
const readable = fs.createReadStream(path);
|
|
|
|
const hash = crypto.createHash('md5');
|
2018-06-17 13:04:19 +02:00
|
|
|
const chunks: Buffer[] = [];
|
2018-05-25 11:41:49 +02:00
|
|
|
readable
|
|
|
|
.on('error', rej)
|
|
|
|
.pipe(hash)
|
|
|
|
.on('error', rej)
|
|
|
|
.on('data', chunk => chunks.push(chunk))
|
|
|
|
.on('end', () => {
|
|
|
|
const buffer = Buffer.concat(chunks);
|
|
|
|
res(buffer.toString('hex'));
|
2017-11-13 20:54:47 +01:00
|
|
|
});
|
2018-05-25 11:41:49 +02:00
|
|
|
});
|
|
|
|
|
|
|
|
// Detect content type
|
|
|
|
const detectMime = new Promise<[string, string]>((res, rej) => {
|
|
|
|
const readable = fs.createReadStream(path);
|
|
|
|
readable
|
|
|
|
.on('error', rej)
|
|
|
|
.once('data', (buffer: Buffer) => {
|
|
|
|
readable.destroy();
|
|
|
|
const type = fileType(buffer);
|
|
|
|
if (type) {
|
|
|
|
res([type.mime, type.ext]);
|
|
|
|
} else {
|
|
|
|
// 種類が同定できなかったら application/octet-stream にする
|
|
|
|
res(['application/octet-stream', null]);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
// Get file size
|
|
|
|
const getFileSize = new Promise<number>((res, rej) => {
|
|
|
|
fs.stat(path, (err, stats) => {
|
|
|
|
if (err) return rej(err);
|
|
|
|
res(stats.size);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
const [hash, [mime, ext], size] = await Promise.all([calcHash, detectMime, getFileSize]);
|
2017-11-13 19:46:30 +01:00
|
|
|
|
2017-11-13 20:54:47 +01:00
|
|
|
log(`hash: ${hash}, mime: ${mime}, ext: ${ext}, size: ${size}`);
|
2016-12-28 23:49:51 +01:00
|
|
|
|
2017-11-13 20:54:47 +01:00
|
|
|
// detect name
|
2018-05-25 11:41:49 +02:00
|
|
|
const detectedName = name || (ext ? `untitled.${ext}` : 'untitled');
|
2017-11-13 20:54:47 +01:00
|
|
|
|
|
|
|
if (!force) {
|
|
|
|
// Check if there is a file with the same hash
|
|
|
|
const much = await DriveFile.findOne({
|
|
|
|
md5: hash,
|
2018-05-13 09:52:47 +02:00
|
|
|
'metadata.userId': user._id,
|
2018-05-14 05:47:33 +02:00
|
|
|
'metadata.deletedAt': { $exists: false }
|
2017-11-13 20:54:47 +01:00
|
|
|
});
|
|
|
|
|
2018-06-08 15:03:14 +02:00
|
|
|
if (much) {
|
|
|
|
log(`file with same hash is found: ${much._id}`);
|
2017-11-13 20:54:47 +01:00
|
|
|
return much;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-25 11:41:49 +02:00
|
|
|
//#region Check drive usage
|
2018-05-25 13:19:14 +02:00
|
|
|
if (!metaOnly) {
|
|
|
|
const usage = await DriveFile
|
|
|
|
.aggregate([{
|
|
|
|
$match: {
|
|
|
|
'metadata.userId': user._id,
|
|
|
|
'metadata.deletedAt': { $exists: false }
|
|
|
|
}
|
|
|
|
}, {
|
|
|
|
$project: {
|
|
|
|
length: true
|
|
|
|
}
|
|
|
|
}, {
|
|
|
|
$group: {
|
|
|
|
_id: null,
|
|
|
|
usage: { $sum: '$length' }
|
|
|
|
}
|
|
|
|
}])
|
|
|
|
.then((aggregates: any[]) => {
|
|
|
|
if (aggregates.length > 0) {
|
|
|
|
return aggregates[0].usage;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
});
|
2017-11-16 14:49:58 +01:00
|
|
|
|
2018-05-25 13:19:14 +02:00
|
|
|
log(`drive usage is ${usage}`);
|
2017-11-16 14:49:58 +01:00
|
|
|
|
2018-05-25 13:19:14 +02:00
|
|
|
// If usage limit exceeded
|
|
|
|
if (usage + size > user.driveCapacity) {
|
|
|
|
if (isLocalUser(user)) {
|
|
|
|
throw 'no-free-space';
|
|
|
|
} else {
|
|
|
|
// (アバターまたはバナーを含まず)最も古いファイルを削除する
|
|
|
|
deleteOldFile(user);
|
|
|
|
}
|
2018-05-25 11:41:49 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
//#endregion
|
|
|
|
|
|
|
|
const fetchFolder = async () => {
|
|
|
|
if (!folderId) {
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
const driveFolder = await DriveFolder.findOne({
|
|
|
|
_id: folderId,
|
|
|
|
userId: user._id
|
|
|
|
});
|
|
|
|
|
|
|
|
if (driveFolder == null) throw 'folder-not-found';
|
|
|
|
|
|
|
|
return driveFolder;
|
|
|
|
};
|
|
|
|
|
2018-06-17 13:04:19 +02:00
|
|
|
const properties: {[key: string]: any} = {};
|
2018-05-25 11:41:49 +02:00
|
|
|
|
2018-05-27 16:19:57 +02:00
|
|
|
let propPromises: Array<Promise<void>> = [];
|
2017-11-16 14:49:58 +01:00
|
|
|
|
2018-05-25 11:41:49 +02:00
|
|
|
const isImage = ['image/jpeg', 'image/gif', 'image/png'].includes(mime);
|
|
|
|
|
|
|
|
if (isImage) {
|
|
|
|
// Calc width and height
|
|
|
|
const calcWh = async () => {
|
2017-12-11 05:33:33 +01:00
|
|
|
log('calculate image width and height...');
|
|
|
|
|
2017-12-08 11:42:02 +01:00
|
|
|
// Calculate width and height
|
2017-11-13 20:54:47 +01:00
|
|
|
const g = gm(fs.createReadStream(path), name);
|
|
|
|
const size = await prominence(g).size();
|
2017-11-16 14:49:58 +01:00
|
|
|
|
2017-12-11 05:33:33 +01:00
|
|
|
log(`image width and height is calculated: ${size.width}, ${size.height}`);
|
2017-11-16 14:49:58 +01:00
|
|
|
|
2018-05-25 11:41:49 +02:00
|
|
|
properties['width'] = size.width;
|
|
|
|
properties['height'] = size.height;
|
|
|
|
};
|
2017-12-11 05:33:33 +01:00
|
|
|
|
2018-05-25 11:41:49 +02:00
|
|
|
// Calc average color
|
|
|
|
const calcAvg = async () => {
|
2017-12-11 05:33:33 +01:00
|
|
|
log('calculate average color...');
|
|
|
|
|
2018-05-18 08:31:28 +02:00
|
|
|
const info = await prominence(gm(fs.createReadStream(path), name)).identify();
|
|
|
|
const isTransparent = info ? info['Channel depth'].Alpha != null : false;
|
|
|
|
|
2017-12-11 05:33:33 +01:00
|
|
|
const buffer = await prominence(gm(fs.createReadStream(path), name)
|
|
|
|
.setFormat('ppm')
|
|
|
|
.resize(1, 1)) // 1pxのサイズに縮小して平均色を取得するというハック
|
|
|
|
.toBuffer();
|
|
|
|
|
|
|
|
const r = buffer.readUInt8(buffer.length - 3);
|
|
|
|
const g = buffer.readUInt8(buffer.length - 2);
|
|
|
|
const b = buffer.readUInt8(buffer.length - 1);
|
|
|
|
|
|
|
|
log(`average color is calculated: ${r}, ${g}, ${b}`);
|
|
|
|
|
2018-05-25 11:41:49 +02:00
|
|
|
const value = isTransparent ? [r, g, b, 255] : [r, g, b];
|
2017-11-13 19:46:30 +01:00
|
|
|
|
2018-05-25 11:41:49 +02:00
|
|
|
properties['avgColor'] = value;
|
|
|
|
};
|
2017-12-08 11:42:02 +01:00
|
|
|
|
2018-05-25 11:41:49 +02:00
|
|
|
propPromises = [calcWh(), calcAvg()];
|
2017-12-08 11:42:02 +01:00
|
|
|
}
|
|
|
|
|
2018-05-27 16:19:57 +02:00
|
|
|
const [folder] = await Promise.all([fetchFolder(), Promise.all(propPromises)]);
|
2018-05-25 11:41:49 +02:00
|
|
|
|
2018-04-03 16:45:13 +02:00
|
|
|
const metadata = {
|
2018-03-29 07:48:47 +02:00
|
|
|
userId: user._id,
|
2018-05-04 08:30:56 +02:00
|
|
|
_user: {
|
|
|
|
host: user.host
|
|
|
|
},
|
2018-03-29 07:48:47 +02:00
|
|
|
folderId: folder !== null ? folder._id : null,
|
2017-11-13 20:54:47 +01:00
|
|
|
comment: comment,
|
2018-05-25 13:19:14 +02:00
|
|
|
properties: properties,
|
|
|
|
isMetaOnly: metaOnly
|
2018-04-03 16:45:13 +02:00
|
|
|
} as IMetadata;
|
|
|
|
|
2018-05-04 11:32:03 +02:00
|
|
|
if (url !== null) {
|
|
|
|
metadata.url = url;
|
|
|
|
}
|
|
|
|
|
2018-04-03 16:45:13 +02:00
|
|
|
if (uri !== null) {
|
|
|
|
metadata.uri = uri;
|
|
|
|
}
|
|
|
|
|
2018-05-25 13:19:14 +02:00
|
|
|
const driveFile = metaOnly
|
|
|
|
? await DriveFile.insert({
|
|
|
|
length: 0,
|
|
|
|
uploadDate: new Date(),
|
|
|
|
md5: hash,
|
|
|
|
filename: detectedName,
|
|
|
|
metadata: metadata,
|
|
|
|
contentType: mime
|
|
|
|
})
|
|
|
|
: await (writeChunks(detectedName, fs.createReadStream(path), mime, metadata) as Promise<IDriveFile>);
|
2018-05-25 11:41:49 +02:00
|
|
|
|
|
|
|
log(`drive file has been created ${driveFile._id}`);
|
|
|
|
|
|
|
|
pack(driveFile).then(packedFile => {
|
|
|
|
// Publish drive_file_created event
|
|
|
|
event(user._id, 'drive_file_created', packedFile);
|
|
|
|
publishDriveStream(user._id, 'file_created', packedFile);
|
|
|
|
});
|
2018-05-03 13:03:14 +02:00
|
|
|
|
2018-05-25 13:19:14 +02:00
|
|
|
if (!metaOnly) {
|
|
|
|
try {
|
|
|
|
const thumb = await genThumbnail(driveFile);
|
|
|
|
if (thumb) {
|
|
|
|
await writeThumbnailChunks(detectedName, thumb, driveFile._id);
|
|
|
|
}
|
|
|
|
} catch (e) {
|
|
|
|
// noop
|
2018-05-03 13:03:14 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-25 11:41:49 +02:00
|
|
|
return driveFile;
|
|
|
|
}
|