mirror of
https://git.joinsharkey.org/Sharkey/Sharkey.git
synced 2024-11-30 17:03:09 +02:00
support GridFS
This commit is contained in:
parent
ac2a0f46cd
commit
7e81e0db6a
3 changed files with 67 additions and 20 deletions
|
@ -4,14 +4,27 @@ import * as gm from 'gm';
|
||||||
import * as debug from 'debug';
|
import * as debug from 'debug';
|
||||||
import fileType = require('file-type');
|
import fileType = require('file-type');
|
||||||
import prominence = require('prominence');
|
import prominence = require('prominence');
|
||||||
import DriveFile from '../models/drive-file';
|
import DriveFile, { getGridFSBucket } from '../models/drive-file';
|
||||||
import DriveFolder from '../models/drive-folder';
|
import DriveFolder from '../models/drive-folder';
|
||||||
import serialize from '../serializers/drive-file';
|
import serialize from '../serializers/drive-file';
|
||||||
import event from '../event';
|
import event from '../event';
|
||||||
import config from '../../conf';
|
import config from '../../conf';
|
||||||
|
import { Duplex } from 'stream';
|
||||||
|
|
||||||
const log = debug('misskey:register-drive-file');
|
const log = debug('misskey:register-drive-file');
|
||||||
|
|
||||||
|
const addToGridFS = (name, binary, metadata): Promise<any> => new Promise(async (resolve, reject) => {
|
||||||
|
const dataStream = new Duplex()
|
||||||
|
dataStream.push(binary)
|
||||||
|
dataStream.push(null)
|
||||||
|
|
||||||
|
const bucket = await getGridFSBucket()
|
||||||
|
const writeStream = bucket.openUploadStream(name, { metadata })
|
||||||
|
writeStream.once('finish', (doc) => { resolve(doc) })
|
||||||
|
writeStream.on('error', reject)
|
||||||
|
dataStream.pipe(writeStream)
|
||||||
|
})
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add file to drive
|
* Add file to drive
|
||||||
*
|
*
|
||||||
|
@ -58,7 +71,7 @@ export default (
|
||||||
|
|
||||||
// Generate hash
|
// Generate hash
|
||||||
const hash = crypto
|
const hash = crypto
|
||||||
.createHash('sha256')
|
.createHash('md5')
|
||||||
.update(data)
|
.update(data)
|
||||||
.digest('hex') as string;
|
.digest('hex') as string;
|
||||||
|
|
||||||
|
@ -67,8 +80,10 @@ export default (
|
||||||
if (!force) {
|
if (!force) {
|
||||||
// Check if there is a file with the same hash
|
// Check if there is a file with the same hash
|
||||||
const much = await DriveFile.findOne({
|
const much = await DriveFile.findOne({
|
||||||
user_id: user._id,
|
md5: hash,
|
||||||
hash: hash
|
metadata: {
|
||||||
|
user_id: user._id
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
if (much !== null) {
|
if (much !== null) {
|
||||||
|
@ -82,13 +97,13 @@ export default (
|
||||||
// Calculate drive usage
|
// Calculate drive usage
|
||||||
const usage = ((await DriveFile
|
const usage = ((await DriveFile
|
||||||
.aggregate([
|
.aggregate([
|
||||||
{ $match: { user_id: user._id } },
|
{ $match: { metadata: { user_id: user._id } } },
|
||||||
{ $project: {
|
{ $project: {
|
||||||
datasize: true
|
length: true
|
||||||
}},
|
}},
|
||||||
{ $group: {
|
{ $group: {
|
||||||
_id: null,
|
_id: null,
|
||||||
usage: { $sum: '$datasize' }
|
usage: { $sum: '$length' }
|
||||||
}}
|
}}
|
||||||
]))[0] || {
|
]))[0] || {
|
||||||
usage: 0
|
usage: 0
|
||||||
|
@ -131,21 +146,15 @@ export default (
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create DriveFile document
|
// Create DriveFile document
|
||||||
const file = await DriveFile.insert({
|
const file = await addToGridFS(`${user._id}/${name}`, data, {
|
||||||
created_at: new Date(),
|
|
||||||
user_id: user._id,
|
user_id: user._id,
|
||||||
folder_id: folder !== null ? folder._id : null,
|
folder_id: folder !== null ? folder._id : null,
|
||||||
data: data,
|
|
||||||
datasize: size,
|
|
||||||
type: mime,
|
type: mime,
|
||||||
name: name,
|
name: name,
|
||||||
comment: comment,
|
comment: comment,
|
||||||
hash: hash,
|
|
||||||
properties: properties
|
properties: properties
|
||||||
});
|
});
|
||||||
|
|
||||||
delete file.data;
|
|
||||||
|
|
||||||
log(`drive file has been created ${file._id}`);
|
log(`drive file has been created ${file._id}`);
|
||||||
|
|
||||||
resolve(file);
|
resolve(file);
|
||||||
|
|
|
@ -1,11 +1,22 @@
|
||||||
import db from '../../db/mongodb';
|
import * as mongodb from 'mongodb';
|
||||||
|
import monkDb, { nativeDbConn } from '../../db/mongodb';
|
||||||
|
|
||||||
const collection = db.get('drive_files');
|
const collection = monkDb.get('drive_files.files');
|
||||||
|
|
||||||
(collection as any).createIndex('hash'); // fuck type definition
|
(collection as any).createIndex('hash'); // fuck type definition
|
||||||
|
|
||||||
export default collection as any; // fuck type definition
|
export default collection as any; // fuck type definition
|
||||||
|
|
||||||
|
const getGridFSBucket = async (): Promise<mongodb.GridFSBucket> => {
|
||||||
|
const db = await nativeDbConn()
|
||||||
|
const bucket = new mongodb.GridFSBucket(db, {
|
||||||
|
bucketName: 'drive_files'
|
||||||
|
})
|
||||||
|
return bucket
|
||||||
|
}
|
||||||
|
|
||||||
|
export { getGridFSBucket }
|
||||||
|
|
||||||
export function validateFileName(name: string): boolean {
|
export function validateFileName(name: string): boolean {
|
||||||
return (
|
return (
|
||||||
(name.trim().length > 0) &&
|
(name.trim().length > 0) &&
|
||||||
|
|
|
@ -1,11 +1,38 @@
|
||||||
import * as mongo from 'monk';
|
|
||||||
|
|
||||||
import config from '../conf';
|
import config from '../conf';
|
||||||
|
|
||||||
const uri = config.mongodb.user && config.mongodb.pass
|
const uri = config.mongodb.user && config.mongodb.pass
|
||||||
? `mongodb://${config.mongodb.user}:${config.mongodb.pass}@${config.mongodb.host}:${config.mongodb.port}/${config.mongodb.db}`
|
? `mongodb://${config.mongodb.user}:${config.mongodb.pass}@${config.mongodb.host}:${config.mongodb.port}/${config.mongodb.db}`
|
||||||
: `mongodb://${config.mongodb.host}:${config.mongodb.port}/${config.mongodb.db}`;
|
: `mongodb://${config.mongodb.host}:${config.mongodb.port}/${config.mongodb.db}`;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* monk
|
||||||
|
*/
|
||||||
|
import * as mongo from 'monk';
|
||||||
|
|
||||||
const db = mongo(uri);
|
const db = mongo(uri);
|
||||||
|
|
||||||
export default db;
|
export default db;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MongoDB native module (officialy)
|
||||||
|
*/
|
||||||
|
import * as mongodb from 'mongodb'
|
||||||
|
|
||||||
|
let mdb: mongodb.Db;
|
||||||
|
|
||||||
|
const nativeDbConn = async (): Promise<mongodb.Db> => {
|
||||||
|
if (mdb) return mdb;
|
||||||
|
|
||||||
|
const db = await ((): Promise<mongodb.Db> => new Promise((resolve, reject) => {
|
||||||
|
mongodb.MongoClient.connect(uri, (e, db) => {
|
||||||
|
if (e) return reject(e)
|
||||||
|
resolve(db)
|
||||||
|
})
|
||||||
|
}))()
|
||||||
|
|
||||||
|
mdb = db
|
||||||
|
|
||||||
|
return db
|
||||||
|
}
|
||||||
|
|
||||||
|
export { nativeDbConn }
|
||||||
|
|
Loading…
Reference in a new issue