2020-07-23 22:40:20 +02:00
const path = require ( 'path' ) ;
2020-07-24 20:49:32 +02:00
const chalk = require ( 'chalk' ) ;
2020-07-23 22:40:20 +02:00
2020-07-04 12:23:10 +02:00
const { Observer } = require ( "../../../interfaces" ) ;
2020-07-23 22:40:20 +02:00
const { Util , Collection } = require ( '../../../../util/' ) ;
const CONSTANTS = {
IMAGES : {
PREMIUM _LIMIT : 2 ,
UPLOAD _LIMIT : {
'0' : 8 ,
'1' : 8 ,
'2' : 50 ,
'3' : 100
} ,
MB _DIVIDER : 1024 * 1024 ,
PREMIUM _DELETE : {
'0' : 0 ,
'1' : 0 ,
'2' : 2 ,
'3' : 4
}
} ,
DAY : 24 * 60 * 60
} ;
2020-07-04 12:23:10 +02:00
class MessageCache extends Observer {
constructor ( client ) {
super ( client , {
name : 'messageCache' ,
priority : 0
} ) ;
this . client = client ;
this . hooks = [
2020-07-23 22:40:20 +02:00
[ 'message' , this . cache . bind ( this ) ] ,
[ 'messageDelete' , this . cacheUpdate . bind ( this ) ]
2020-07-04 12:23:10 +02:00
] ;
2020-07-23 22:40:20 +02:00
this . messages = new Collection ( ) ;
setInterval ( ( ) => {
this . _sweepCache ( ) ;
this . _sweepDatabase ( ) ;
2020-07-24 20:31:52 +02:00
} , 3600 * 1000 ) ; // 1 hour
2020-07-23 22:40:20 +02:00
}
async cache ( message ) {
if ( ! this . client . _built
|| message . webhookID
|| message . author . bot
|| ! message . guild . available
|| ! message . guild ) return undefined ;
await message . guild . settings ( ) ;
const data = await this . _grabMessageData ( message ) ;
this . messages . set ( message . id , data ) ;
}
async cacheUpdate ( message ) {
2020-07-24 20:31:52 +02:00
const cachedMessage = this . messages . get ( message . id ) ;
if ( ! cachedMessage ) return undefined ;
cachedMessage . deleted = true ;
return this . messages . set ( message . id , cachedMessage ) ;
2020-07-23 22:40:20 +02:00
}
async _grabMessageData ( message ) {
const beforeTime = Date . now ( ) ;
const metadata = {
guild : message . guild . id ,
message : message . id ,
author : message . author . id ,
2020-07-24 20:31:52 +02:00
channel : message . channel . id ,
2020-07-23 22:40:20 +02:00
content : message . content ,
id : message . id ,
timestamp : message . createdTimestamp ,
deleted : false ,
attachments : [ ] ,
2020-07-24 20:31:52 +02:00
removeAt : Math . floor ( Date . now ( ) / 1000 + CONSTANTS . IMAGES . PREMIUM _DELETE [ message . guild . premium ] * 24 * 60 * 60 )
2020-07-23 22:40:20 +02:00
} ;
const { _settings : guildSettings } = message . guild ;
const { messageLog } = guildSettings ;
let ignoredRole = false ;
if ( messageLog . enabled && messageLog . ignoredRoles . length > 0 ) {
for ( const role of message . member . roles . cache . keys ( ) ) {
if ( messageLog . ignoredRoles . includes ( role ) ) ignoredRole = true ;
}
}
if ( message . attachments . size > 0
&& message . guild . premium >= 2
&& messageLog . channel
&& ! messageLog . ignoredChannels . includes ( message . channel . id )
&& ! ignoredRole ) {
2020-07-25 08:02:22 +02:00
let size = 0 ;
2020-07-23 22:40:20 +02:00
for ( const attachment of message . attachments . values ( ) ) {
const data = {
size : attachment . size ,
dimensions : { x : attachment . width , y : attachment . height } ,
extension : path . extname ( attachment . name ) ,
url : attachment . proxyURL || attachment . url ,
name : attachment . name ,
id : attachment . id
} ;
const fsize = data . size / CONSTANTS . IMAGES . MB _DIVIDER ; // File size in MB
if ( fsize > CONSTANTS . IMAGES . UPLOAD _LIMIT [ message . guild . premium ] ) {
metadata . attachments . push ( data ) ;
continue ; //Cannot upload images larger than the guild's upload limit (Users w/ nitro can upload more than that)
}
const buffer = await Util . downloadAsBuffer ( attachment . proxyURL || attachment . url ) . catch ( ( err ) => {
this . client . logger . error ( ` Failed to save buffer with image data: ${ data } \n ${ err . stack || err } ` ) ;
return null ;
} ) ;
2020-07-25 08:02:22 +02:00
if ( buffer && fsize < 15 ) { //Mongodb will not save images larger than 16mb, but I'm checking for 15 just incase.
2020-07-23 22:40:20 +02:00
try {
2020-07-24 20:31:52 +02:00
const result = await this . client . transactionHandler . send ( {
2020-07-23 22:40:20 +02:00
provider : 'mongodb' ,
request : {
type : 'insertOne' ,
collection : 'attachments' ,
data : {
attachmentId : attachment . id ,
buffer
}
}
} ) ;
2020-07-24 20:31:52 +02:00
data . index = result ? . insertedId ;
2020-07-25 08:02:22 +02:00
// this.client.logger.debug(`Saved file ${data.name} (${fsize.toFixed(2)}mb), took ${Date.now()-beforeTime}ms.`);
2020-07-23 22:40:20 +02:00
} catch ( err ) {
this . client . logger . error ( 'Something went wrong with storing image to database: \n' + err . stack || err ) ;
}
}
metadata . attachments . push ( data ) ;
2020-07-25 08:02:22 +02:00
size += fsize ;
2020-07-23 22:40:20 +02:00
}
const afterTime = Date . now ( ) ;
2020-07-25 10:32:26 +02:00
this . client . logger . debug ( ` ${ chalk . bold ( '[IMAGE]' ) } User ${ message . author . tag } in guild ${ message . guild . name } (# ${ message . channel . name } ) uploaded ${ message . attachments . size } attachment ${ message . attachments . size === 1 ? '' : 's' } ( ${ size . toFixed ( 2 ) } mb); took ${ afterTime - beforeTime } ms to save ${ message . attachments . size } attachment ${ message . attachments . size === 1 ? '' : 's' } . ` ) ;
2020-07-23 22:40:20 +02:00
await this . client . transactionHandler . send ( {
provider : 'mongodb' ,
request : {
type : 'insertOne' ,
collection : 'messages' ,
data : metadata
}
} ) ;
}
return metadata ; //NOTE: It is NOT GARAUNTEED FOR EVERY ATTACHMENT TO HAVE AN INDEX. If theres no index, it either failed to be pushed to database or could not be saved.
2020-07-04 12:23:10 +02:00
}
2020-07-23 22:40:20 +02:00
async _sweepDatabase ( ) {
const messages = await this . client . transactionHandler . send ( {
provider : 'mongodb' ,
request : {
collection : 'messages' ,
type : 'find' ,
query : {
removeAt : {
$lt : Date . now ( ) / 1000
}
}
}
} ) ;
if ( messages . length > 0 ) {
const attachmentIds = messages . map ( ( m ) => m . attachments ) . reduce ( ( a , b ) => b . concat ( a ) ) . filter ( ( a ) => a ) . map ( ( a ) => a . index ) ;
const deleteAttachments = await this . client . transactionHandler . send ( {
provider : 'mongodb' ,
request : {
collection : 'attachments' ,
type : 'deleteMany' ,
query : {
_id : { $in : attachmentIds }
}
}
} ) ;
2020-07-25 08:02:22 +02:00
this . client . logger . log ( ` ${ chalk . bold ( '[IMAGE]' ) } Trashed ${ deleteAttachments . length } items from the attachment database. ` ) ;
2020-07-23 22:40:20 +02:00
const msgIds = messages . map ( ( m ) => m . _id ) ;
const deleteMessages = await this . client . transactionHandler . send ( {
provider : 'mongodb' ,
request : {
collection : 'messages' ,
type : 'deleteMany' ,
query : {
_id : { $in : msgIds }
}
}
} ) ;
2020-07-25 08:02:22 +02:00
this . client . emit ( ` ${ chalk . bold ( '[IMAGE]' ) } Trashed ${ deleteMessages . length } items from the attachment database. ` ) ;
2020-07-23 22:40:20 +02:00
}
}
_sweepCache ( ) {
const ms = 3600000 ; // 1 hour in ms ( i think )
const filtered = this . messages . filter ( ( m ) => {
const time = Date . now ( ) - m . timestamp ;
return time < ms ;
} ) ;
2020-07-04 12:23:10 +02:00
2020-07-23 22:40:20 +02:00
const difference = this . messages . size - filtered . size ;
if ( difference > 0 ) {
this . client . logger . debug ( ` Trashed ${ difference } items from the message cache. ` ) ;
this . messages = filtered ;
}
return filtered ;
2020-07-04 12:23:10 +02:00
}
}
module . exports = MessageCache ;