mirror of
https://git.joinsharkey.org/Sharkey/Sharkey.git
synced 2024-11-10 09:23:09 +02:00
fix lint (automattic)
This commit is contained in:
parent
04648db1c2
commit
d5cc4cc9c2
9 changed files with 48 additions and 49 deletions
|
@ -14,16 +14,16 @@ import { Duplex } from 'stream';
|
||||||
const log = debug('misskey:register-drive-file');
|
const log = debug('misskey:register-drive-file');
|
||||||
|
|
||||||
const addToGridFS = (name, binary, metadata): Promise<any> => new Promise(async (resolve, reject) => {
|
const addToGridFS = (name, binary, metadata): Promise<any> => new Promise(async (resolve, reject) => {
|
||||||
const dataStream = new Duplex()
|
const dataStream = new Duplex();
|
||||||
dataStream.push(binary)
|
dataStream.push(binary);
|
||||||
dataStream.push(null)
|
dataStream.push(null);
|
||||||
|
|
||||||
const bucket = await getGridFSBucket()
|
const bucket = await getGridFSBucket();
|
||||||
const writeStream = bucket.openUploadStream(name, { metadata })
|
const writeStream = bucket.openUploadStream(name, { metadata });
|
||||||
writeStream.once('finish', (doc) => { resolve(doc) })
|
writeStream.once('finish', (doc) => { resolve(doc); });
|
||||||
writeStream.on('error', reject)
|
writeStream.on('error', reject);
|
||||||
dataStream.pipe(writeStream)
|
dataStream.pipe(writeStream);
|
||||||
})
|
});
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add file to drive
|
* Add file to drive
|
||||||
|
|
|
@ -63,5 +63,5 @@ module.exports = async (params, user, app) => {
|
||||||
|
|
||||||
// Serialize
|
// Serialize
|
||||||
const _files = await Promise.all(files.map(file => serialize(file)));
|
const _files = await Promise.all(files.map(file => serialize(file)));
|
||||||
return _files
|
return _files;
|
||||||
};
|
};
|
||||||
|
|
|
@ -33,5 +33,5 @@ module.exports = async (params, user) => {
|
||||||
detail: true
|
detail: true
|
||||||
});
|
});
|
||||||
|
|
||||||
return _file
|
return _file;
|
||||||
};
|
};
|
||||||
|
|
|
@ -20,7 +20,6 @@ module.exports = (params, user) => new Promise(async (res, rej) => {
|
||||||
const [fileId, fileIdErr] = $(params.file_id).id().$;
|
const [fileId, fileIdErr] = $(params.file_id).id().$;
|
||||||
if (fileIdErr) return rej('invalid file_id param');
|
if (fileIdErr) return rej('invalid file_id param');
|
||||||
|
|
||||||
|
|
||||||
// Fetch file
|
// Fetch file
|
||||||
const file = await DriveFile
|
const file = await DriveFile
|
||||||
.findOne({
|
.findOne({
|
||||||
|
@ -32,7 +31,7 @@ module.exports = (params, user) => new Promise(async (res, rej) => {
|
||||||
return rej('file-not-found');
|
return rej('file-not-found');
|
||||||
}
|
}
|
||||||
|
|
||||||
const updateQuery: any = {}
|
const updateQuery: any = {};
|
||||||
|
|
||||||
// Get 'name' parameter
|
// Get 'name' parameter
|
||||||
const [name, nameErr] = $(params.name).optional.string().pipe(validateFileName).$;
|
const [name, nameErr] = $(params.name).optional.string().pipe(validateFileName).$;
|
||||||
|
|
|
@ -92,6 +92,6 @@ module.exports = async (params, user, app) => {
|
||||||
});
|
});
|
||||||
|
|
||||||
// Serialize
|
// Serialize
|
||||||
const _timeline = await Promise.all(timeline.map(post => serialize(post, user)))
|
const _timeline = await Promise.all(timeline.map(post => serialize(post, user)));
|
||||||
return _timeline
|
return _timeline;
|
||||||
};
|
};
|
||||||
|
|
|
@ -8,14 +8,14 @@ const collection = monkDb.get('drive_files.files');
|
||||||
export default collection as any; // fuck type definition
|
export default collection as any; // fuck type definition
|
||||||
|
|
||||||
const getGridFSBucket = async (): Promise<mongodb.GridFSBucket> => {
|
const getGridFSBucket = async (): Promise<mongodb.GridFSBucket> => {
|
||||||
const db = await nativeDbConn()
|
const db = await nativeDbConn();
|
||||||
const bucket = new mongodb.GridFSBucket(db, {
|
const bucket = new mongodb.GridFSBucket(db, {
|
||||||
bucketName: 'drive_files'
|
bucketName: 'drive_files'
|
||||||
})
|
});
|
||||||
return bucket
|
return bucket;
|
||||||
}
|
};
|
||||||
|
|
||||||
export { getGridFSBucket }
|
export { getGridFSBucket };
|
||||||
|
|
||||||
export function validateFileName(name: string): boolean {
|
export function validateFileName(name: string): boolean {
|
||||||
return (
|
return (
|
||||||
|
|
|
@ -40,13 +40,13 @@ export default (
|
||||||
_file = deepcopy(file);
|
_file = deepcopy(file);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!_file) return reject('invalid file arg.')
|
if (!_file) return reject('invalid file arg.');
|
||||||
|
|
||||||
// rendered target
|
// rendered target
|
||||||
let _target: any = {};
|
let _target: any = {};
|
||||||
|
|
||||||
_target.id = _file._id;
|
_target.id = _file._id;
|
||||||
_target.created_at = _file.uploadDate
|
_target.created_at = _file.uploadDate;
|
||||||
|
|
||||||
_target = Object.assign(_target, _file.metadata);
|
_target = Object.assign(_target, _file.metadata);
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@ export default db;
|
||||||
/**
|
/**
|
||||||
* MongoDB native module (officialy)
|
* MongoDB native module (officialy)
|
||||||
*/
|
*/
|
||||||
import * as mongodb from 'mongodb'
|
import * as mongodb from 'mongodb';
|
||||||
|
|
||||||
let mdb: mongodb.Db;
|
let mdb: mongodb.Db;
|
||||||
|
|
||||||
|
@ -25,14 +25,14 @@ const nativeDbConn = async (): Promise<mongodb.Db> => {
|
||||||
|
|
||||||
const db = await ((): Promise<mongodb.Db> => new Promise((resolve, reject) => {
|
const db = await ((): Promise<mongodb.Db> => new Promise((resolve, reject) => {
|
||||||
mongodb.MongoClient.connect(uri, (e, db) => {
|
mongodb.MongoClient.connect(uri, (e, db) => {
|
||||||
if (e) return reject(e)
|
if (e) return reject(e);
|
||||||
resolve(db)
|
resolve(db);
|
||||||
})
|
});
|
||||||
}))()
|
}))();
|
||||||
|
|
||||||
mdb = db
|
mdb = db;
|
||||||
|
|
||||||
return db
|
return db;
|
||||||
}
|
};
|
||||||
|
|
||||||
export { nativeDbConn }
|
export { nativeDbConn };
|
||||||
|
|
|
@ -97,7 +97,7 @@ app.get('/:id', async (req, res) => {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const fileId = new mongodb.ObjectID(req.params.id)
|
const fileId = new mongodb.ObjectID(req.params.id);
|
||||||
const file = await DriveFile.findOne({ _id: fileId });
|
const file = await DriveFile.findOne({ _id: fileId });
|
||||||
|
|
||||||
if (file == null) {
|
if (file == null) {
|
||||||
|
@ -105,18 +105,18 @@ app.get('/:id', async (req, res) => {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const bucket = await getGridFSBucket()
|
const bucket = await getGridFSBucket();
|
||||||
|
|
||||||
const buffer = await ((id): Promise<Buffer> => new Promise((resolve, reject) => {
|
const buffer = await ((id): Promise<Buffer> => new Promise((resolve, reject) => {
|
||||||
const chunks = []
|
const chunks = [];
|
||||||
const readableStream = bucket.openDownloadStream(id)
|
const readableStream = bucket.openDownloadStream(id);
|
||||||
readableStream.on('data', chunk => {
|
readableStream.on('data', chunk => {
|
||||||
chunks.push(chunk);
|
chunks.push(chunk);
|
||||||
})
|
});
|
||||||
readableStream.on('end', () => {
|
readableStream.on('end', () => {
|
||||||
resolve(Buffer.concat(chunks))
|
resolve(Buffer.concat(chunks));
|
||||||
})
|
});
|
||||||
}))(fileId)
|
}))(fileId);
|
||||||
|
|
||||||
send(buffer, file.metadata.type, req, res);
|
send(buffer, file.metadata.type, req, res);
|
||||||
});
|
});
|
||||||
|
@ -128,7 +128,7 @@ app.get('/:id/:name', async (req, res) => {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const fileId = new mongodb.ObjectID(req.params.id)
|
const fileId = new mongodb.ObjectID(req.params.id);
|
||||||
const file = await DriveFile.findOne({ _id: fileId });
|
const file = await DriveFile.findOne({ _id: fileId });
|
||||||
|
|
||||||
if (file == null) {
|
if (file == null) {
|
||||||
|
@ -136,18 +136,18 @@ app.get('/:id/:name', async (req, res) => {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const bucket = await getGridFSBucket()
|
const bucket = await getGridFSBucket();
|
||||||
|
|
||||||
const buffer = await ((id): Promise<Buffer> => new Promise((resolve, reject) => {
|
const buffer = await ((id): Promise<Buffer> => new Promise((resolve, reject) => {
|
||||||
const chunks = []
|
const chunks = [];
|
||||||
const readableStream = bucket.openDownloadStream(id)
|
const readableStream = bucket.openDownloadStream(id);
|
||||||
readableStream.on('data', chunk => {
|
readableStream.on('data', chunk => {
|
||||||
chunks.push(chunk);
|
chunks.push(chunk);
|
||||||
})
|
});
|
||||||
readableStream.on('end', () => {
|
readableStream.on('end', () => {
|
||||||
resolve(Buffer.concat(chunks))
|
resolve(Buffer.concat(chunks));
|
||||||
})
|
});
|
||||||
}))(fileId)
|
}))(fileId);
|
||||||
|
|
||||||
send(buffer, file.metadata.type, req, res);
|
send(buffer, file.metadata.type, req, res);
|
||||||
});
|
});
|
||||||
|
|
Loading…
Reference in a new issue