Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
151 changes: 89 additions & 62 deletions dadi/lib/cache/index.js
Original file line number Diff line number Diff line change
@@ -1,37 +1,43 @@
var _ = require('underscore')
var crypto = require('crypto')
var path = require('path')
var pathToRegexp = require('path-to-regexp')
var url = require('url')

var config = require(path.join(__dirname, '/../../../config'))
var log = require('@dadi/logger')
var DadiCache = require('@dadi/cache')
var cache

var Cache = function (server) {
const crypto = require('crypto')
const path = require('path')
const pathToRegexp = require('path-to-regexp')
const url = require('url')

const config = require(path.join(__dirname, '/../../../config'))
const utils = require('../utils')
const log = require('@dadi/logger')
const DadiCache = require('@dadi/cache')
let cache

const Cache = function (server) {
this.cache = cache = new DadiCache(config.get('caching'))

this.server = server
this.enabled = config.get('caching.directory.enabled') || config.get('caching.redis.enabled')
this.encoding = 'utf8'
this.options = {}
}

var instance
let instance
module.exports = function (server) {
if (!instance) {
instance = new Cache(server)
}
return instance
}

/**
* Locate the endpoint relating to the current request in the loaded
* system components (collection models and custom endpoints) to determine
* if caching is enabled.
*
* @param {http.IncomingMessage} req - the current HTTP request
*/
Cache.prototype.cachingEnabled = function (req) {
var options = {}
var endpoints = this.server.components
var requestPath = url.parse(req.url, true).pathname
let options = {}
let endpoints = this.server.components
let requestPath = url.parse(req.url, true).pathname

var endpointKey = _.find(_.keys(endpoints), function (k) { return pathToRegexp(k).exec(requestPath) })
let endpointKey = Object.keys(endpoints).find(key => pathToRegexp(key).exec(requestPath))

if (!endpointKey) return false

Expand All @@ -42,94 +48,112 @@ Cache.prototype.cachingEnabled = function (req) {
return (this.enabled && (options.cache || false))
}

/**
* Return the content type for the current endpoint. Only two possible types: JavaScript or JSON.
*
* @param {http.IncomingMessage} req - the current HTTP request
*/
Cache.prototype.getEndpointContentType = function (req) {
// there are only two possible types javascript or json
var query = url.parse(req.url, true).query
let query = url.parse(req.url, true).query
return query.callback ? 'text/javascript' : 'application/json'
}

/**
* Adds the Cache middleware to the stack
*/
Cache.prototype.init = function () {
var self = this

this.server.app.use((req, res, next) => {
var enabled = self.cachingEnabled(req)
let enabled = this.cachingEnabled(req)
if (!enabled) return next()

// only cache GET requests
// Only cache GET requests.
if (req.method && req.method.toLowerCase() !== 'get') return next()

var query = url.parse(req.url, true).query
let query = url.parse(req.url, true).query

// allow query string param to bypass cache
var noCache = query.cache && query.cache.toString().toLowerCase() === 'false'
// Allow query string param to bypass cache.
let noCache = query.cache && query.cache.toString().toLowerCase() === 'false'
delete query.cache

// we build the filename with a hashed hex string so we can be unique
// and avoid using file system reserved characters in the name.
var modelDir = crypto.createHash('sha1').update(url.parse(req.url).pathname).digest('hex')
var filename = crypto.createHash('sha1').update(url.parse(req.url).pathname + JSON.stringify(query)).digest('hex')
// Build the filename with a hashed hex string so it is unique
// and avoids using file system reserved characters in the name.
let modelDir = crypto.createHash('sha1').update(url.parse(req.url).pathname).digest('hex')
let filename = crypto.createHash('sha1').update(url.parse(req.url).pathname + JSON.stringify(query)).digest('hex')

// Prepend the model's name/folder hierarchy to the filename so it can be used
// later to flush the cache for this model
var cacheKey = modelDir + '_' + filename
let cacheKey = `${modelDir}_${filename}`

// get contentType that current endpoint requires
var contentType = self.getEndpointContentType(req)
let acceptEncoding = req.headers['accept-encoding']

// attempt to get from the cache
cache.get(cacheKey).then((stream) => {
res.setHeader('X-Cache-Lookup', 'HIT')
if (acceptEncoding && acceptEncoding !== 'gzip, deflate' && /\bgzip\b/.test(acceptEncoding)) {
acceptEncoding = 'gzip'
cacheKey += '.gz'
}

if (noCache) {
res.setHeader('X-Cache', 'MISS')
return next()
}
// Get contentType that current endpoint requires.
let contentType = this.getEndpointContentType(req)

log.info({module: 'cache'}, 'Serving ' + req.url + ' from cache')
// Attempt to get from the cache.
cache.get(cacheKey).then(stream => {
cache.getMetadata(cacheKey).then(metadata => {
res.setHeader('X-Cache-Lookup', 'HIT')

res.statusCode = 200
res.setHeader('X-Cache', 'HIT')
res.setHeader('Content-Type', contentType)
// res.setHeader('Content-Length', stats.size)
let compressed = false
if (metadata && metadata.compression === 'gzip') {
compressed = true
}

stream.pipe(res)
if (noCache) {
res.setHeader('X-Cache', 'MISS')
return next()
}

log.info({module: 'cache'}, 'Serving ' + req.url + ' from cache')

res.statusCode = 200
res.setHeader('X-Cache', 'HIT')
res.setHeader('Content-Type', contentType)

return utils.pipeStream(stream, false, compressed, res)
})
}).catch(() => {
if (noCache) {
return next()
}

// not found in cache
res.setHeader('X-Cache', 'MISS')
res.setHeader('X-Cache-Lookup', 'MISS')

return cacheResponse()
})

/**
* cacheResponse
* Writes the current response body to either the filesystem or a Redis server,
* depending on the configuration settings
* Write the current response body to either the filesystem or a Redis server,
* depending on the configuration settings.
*/
function cacheResponse () {
// file is expired or does not exist, wrap res.end and res.write to save to cache
var _end = res.end
var _write = res.write

var data = ''
let _end = res.end
let _write = res.write

res.write = function (chunk) {
_write.apply(res, arguments)
}

res.end = function (chunk) {
// respond before attempting to cache
res.end = function (data) {
// Respond before attempting to cache.
_end.apply(res, arguments)

if (chunk) data += chunk

// if response is not 200 don't cache
if (res.statusCode !== 200) return

// cache the content
cache.set(cacheKey, data).then(() => {
// Cache the content.
cache.set(cacheKey, data, {
metadata: {
compression: !acceptEncoding ? 'none' : acceptEncoding
}
}).then(() => {

})
}
Expand All @@ -144,14 +168,17 @@ module.exports.reset = function () {
}

/**
* Passes the specified pattern to the cache module to delete
* cached items with matching keys.
*
* @param {string} pattern - the cache key pattern to match
*/
module.exports.delete = function (pattern, callback) {
if (!cache) return callback(null)

cache.flush(pattern).then(() => {
return callback(null)
}).catch((err) => {
}).catch(err => {
console.log(err)
return callback(null)
})
Expand Down
2 changes: 1 addition & 1 deletion dadi/lib/controller/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ Controller.prototype.get = function (req, res, next) {
options: queryOptions,
req
}).then(results => {
return done(null, results)
return done(null, results, req)
}).catch(error => {
return done(error)
})
Expand Down
26 changes: 18 additions & 8 deletions dadi/lib/help.js
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
const crypto = require('crypto')
const formatError = require('@dadi/format-error')
const path = require('path')
const Readable = require('stream').Readable
const utils = require(path.join(__dirname, '/utils'))

const cache = require(path.join(__dirname, '/cache'))
const config = require(path.join(__dirname, '/../../config'))
const log = require('@dadi/logger')

// helper that sends json response
module.exports.sendBackJSON = function (successCode, res, next) {
return function (err, results) {
return function (err, results, originalRequest) {
let body = results
let statusCode = successCode

Expand Down Expand Up @@ -40,17 +42,25 @@ module.exports.sendBackJSON = function (successCode, res, next) {
let resBody = JSON.stringify(body)

// log response if it's already been sent
if (res.finished) {
log.info({res: res}, 'Response already sent. Attempting to send results: ' + resBody)
return
// if (res.finished) {
// log.info({res: res}, 'Response already sent. Attempting to send results: ' + resBody)
// return
// }
let compress = false
let acceptEncoding = originalRequest && originalRequest.headers['accept-encoding'] ? originalRequest.headers['accept-encoding'] : ''

if (acceptEncoding !== 'gzip, deflate') {
compress = acceptEncoding.match(/\bgzip\b/)
}

res.setHeader('content-type', 'application/json')
res.setHeader('content-length', Buffer.byteLength(resBody))
let stream = new Readable()
stream.push(resBody)
stream.push(null)

res.setHeader('Content-Type', 'application/json')
res.statusCode = statusCode

res.end(resBody)
return utils.pipeStream(stream, compress, compress, res)
}
}

Expand Down Expand Up @@ -205,7 +215,7 @@ module.exports.validateCollectionSchema = function (obj) {
* Remove each file in the specified cache folder.
*/
module.exports.clearCache = function (pathname, callback) {
var pattern = ''
let pattern = ''

pattern = crypto.createHash('sha1').update(pathname).digest('hex')

Expand Down
34 changes: 34 additions & 0 deletions dadi/lib/utils.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
const concat = require('concat-stream')
const lengthStream = require('length-stream')
const zlib = require('zlib')

module.exports.pipeStream = function (stream, compress, isCompressed, res) {
let contentLength = 0

// Set the content length after the stream has passed through.
function lengthListener (length) {
contentLength = length
}

// Receive the concatenated buffer and send the response.
function sendBuffer (buffer) {
res.setHeader('Content-Length', contentLength)
res.end(buffer)
}

if (compress || isCompressed) {
res.setHeader('Content-Encoding', 'gzip')
}

let concatStream = concat(sendBuffer)

if (compress) {
console.log('gzipping', compress, isCompressed)
console.trace()
stream = stream.pipe(zlib.createGzip())
}

stream = stream.pipe(lengthStream(lengthListener))

return stream.pipe(concatStream)
}
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,9 @@
]
},
"dependencies": {
"@dadi/api-mongodb": "^4.1.0",
"@dadi/boot": "^1.0.0",
"@dadi/cache": "1.5.x",
"@dadi/cache": "~2.0.0",
"@dadi/et": "^2.0.0",
"@dadi/format-error": "^1.6.0",
"@dadi/logger": "^1.3.0",
Expand Down
Loading