1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 | 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 1 4 1 8 8 8 8 8 8 8 1 4 4 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 2 2 2 2 4 4 4 4 4 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 1 24 24 24 24 1 11 1 1 4 1 1 | 'use strict'; var inspect = require('util').inspect; var parse = require('url').parse; var common = require('./common'); var intercept = require('./intercept'); var debug = require('debug')('nock.recorder'); var _ = require('lodash'); var Stream = require('stream'); var URL = require('url'); var SEPARATOR = '\n<<<<<<-- cut here -->>>>>>\n'; var recordingInProgress = false; var outputs = []; function getScope(options) { common.normalizeRequestOptions(options); var scope = []; Iif (options._https_) { scope.push('https://'); } else { scope.push('http://'); } scope.push(options.host); // If a non-standard port wasn't specified in options.host, include it from options.port. Iif(options.host.indexOf(':') === -1 && options.port && ((options._https_ && options.port.toString() !== '443') || (!options._https_ && options.port.toString() !== '80'))) { scope.push(':'); scope.push(options.port); } return scope.join(''); } function getMethod(options) { return (options.method || 'GET'); } var getBodyFromChunks = function(chunks, headers) { // If we have headers and there is content-encoding it means that // the body shouldn't be merged but instead persisted as an array // of hex strings so that the responses can be mocked one by one. Iif(common.isContentEncoded(headers)) { return _.map(chunks, function(chunk) { if(!Buffer.isBuffer(chunk)) { if (typeof chunk === 'string') { chunk = new Buffer(chunk); } else { throw new Error('content-encoded responses must all be binary buffers'); } } return chunk.toString('hex'); }); } var mergedBuffer = common.mergeChunks(chunks); // The merged buffer can be one of three things: // 1. A binary buffer which then has to be recorded as a hex string. // 2. A string buffer which represents a JSON object. // 3. A string buffer which doesn't represent a JSON object. Iif(common.isBinaryBuffer(mergedBuffer)) { return mergedBuffer.toString('hex'); } else { var maybeStringifiedJson = mergedBuffer.toString('utf8'); try { return JSON.parse(maybeStringifiedJson); } catch(err) { return maybeStringifiedJson; } } }; function generateRequestAndResponseObject(req, bodyChunks, options, res, dataChunks) { options.path = req.path; return { scope: getScope(options), method: getMethod(options), path: options.path, body: getBodyFromChunks(bodyChunks), status: res.statusCode, response: getBodyFromChunks(dataChunks, res.headers), headers: res.headers, reqheaders: req._headers }; } function generateRequestAndResponse(req, bodyChunks, options, res, dataChunks) { var requestBody = getBodyFromChunks(bodyChunks); var responseBody = getBodyFromChunks(dataChunks, res.headers); // Remove any query params from options.path so they can be added in the query() function var path = options.path; var queryIndex = 0; var queryObj = {}; if ((queryIndex = req.path.indexOf('?')) !== -1) { path = path.substring(0, queryIndex); // Create the query() object var queries = req.path.slice(queryIndex + 1).split('&'); for (var i = 0; i < queries.length; i++) { var query = queries[i].split('='); queryObj[query[0]] = query[1]; } } var ret = []; ret.push('\nnock(\''); ret.push(getScope(options)); ret.push('\')\n'); ret.push(' .'); ret.push(getMethod(options).toLowerCase()); ret.push('(\''); ret.push(path); ret.push("'"); if (requestBody) { ret.push(', '); ret.push(JSON.stringify(requestBody)); } ret.push(")\n"); if (req.headers) { for (var k in req.headers) { ret.push(' .matchHeader(' + JSON.stringify(k) + ', ' + JSON.stringify(req.headers[k]) + ')\n'); } } if (queryIndex !== -1) { ret.push(' .query('); ret.push(JSON.stringify(queryObj)); ret.push(')\n'); } ret.push(' .reply('); ret.push(res.statusCode.toString()); ret.push(', '); ret.push(JSON.stringify(responseBody)); if (res.headers) { ret.push(', '); ret.push(inspect(res.headers)); } ret.push(');\n'); return ret.join(''); } // This module variable is used to identify a unique recording ID in order to skip // spurious requests that sometimes happen. This problem has been, so far, // exclusively detected in nock's unit testing where 'checks if callback is specified' // interferes with other tests as its t.end() is invoked without waiting for request // to finish (which is the point of the test). var currentRecordingId = 0; function record(rec_options) { // Set the new current recording ID and capture its value in this instance of record(). currentRecordingId = currentRecordingId + 1; var thisRecordingId = currentRecordingId; debug('start recording', thisRecordingId, JSON.stringify(rec_options)); // Trying to start recording with recording already in progress implies an error // in the recording configuration (double recording makes no sense and used to lead // to duplicates in output) Iif(recordingInProgress) { throw new Error('Nock recording already in progress'); } recordingInProgress = true; // Originaly the parameters was a dont_print boolean flag. // To keep the existing code compatible we take that case into account. var optionsIsObject = typeof rec_options === 'object'; var dont_print = (typeof rec_options === 'boolean' && rec_options) || (optionsIsObject && rec_options.dont_print); var output_objects = optionsIsObject && rec_options.output_objects; var enable_reqheaders_recording = optionsIsObject && rec_options.enable_reqheaders_recording; var logging = (optionsIsObject && rec_options.logging) || console.log; var use_separator = true; Iif (optionsIsObject && _.has(rec_options, 'use_separator')) { use_separator = rec_options.use_separator; } debug(thisRecordingId, 'restoring overridden requests before new overrides'); // To preserve backward compatibility (starting recording wasn't throwing if nock was already active) // we restore any requests that may have been overridden by other parts of nock (e.g. intercept) // NOTE: This is hacky as hell but it keeps the backward compatibility *and* allows correct // behavior in the face of other modules also overriding ClientRequest. common.restoreOverriddenRequests(); // We restore ClientRequest as it messes with recording of modules that also override ClientRequest (e.g. xhr2) intercept.restoreOverriddenClientRequest(); // We override the requests so that we can save information on them before executing. common.overrideRequests(function(proto, overriddenRequest, options, callback) { var bodyChunks = []; if (typeof options == 'string') { var url = URL.parse(options); options = { hostname: url.hostname, method: 'GET', port: url.port, path: url.path }; } // Node 0.11 https.request calls http.request -- don't want to record things // twice. Iif (options._recording) { return overriddenRequest(options, callback); } options._recording = true; var req = overriddenRequest(options, function(res) { debug(thisRecordingId, 'intercepting', proto, 'request to record'); Iif (typeof options === 'string') { options = parse(options); } // We put our 'end' listener to the front of the listener array. res.once('end', function() { debug(thisRecordingId, proto, 'intercepted request ended'); var out; Eif(output_objects) { out = generateRequestAndResponseObject(req, bodyChunks, options, res, dataChunks); Eif(out.reqheaders) { // We never record user-agent headers as they are worse than useless - // they actually make testing more difficult without providing any benefit (see README) common.deleteHeadersField(out.reqheaders, 'user-agent'); // Remove request headers completely unless it was explicitly enabled by the user (see README) Eif(!enable_reqheaders_recording) { delete out.reqheaders; } } } else { out = generateRequestAndResponse(req, bodyChunks, options, res, dataChunks); } debug('out:', out); // Check that the request was made during the current recording. // If it hasn't then skip it. There is no other simple way to handle // this as it depends on the timing of requests and responses. Throwing // will make some recordings/unit tests faily randomly depending on how // fast/slow the response arrived. // If you are seeing this error then you need to make sure that all // the requests made during a single recording session finish before // ending the same recording session. if(thisRecordingId !== currentRecordingId) { debug('skipping recording of an out-of-order request', out); return; } outputs.push(out); Iif (!dont_print) { if (use_separator) { logging(SEPARATOR + out + SEPARATOR); } else { logging(out); } } }); var dataChunks = []; var encoding; // We need to be aware of changes to the stream's encoding so that we // don't accidentally mangle the data. var setEncoding = res.setEncoding; res.setEncoding = function (newEncoding) { encoding = newEncoding; return setEncoding.apply(this, arguments); }; // Give the actual client a chance to setup its listeners. // We will use the listener information to figure out // how we need to feed the intercepted data back to the client. if (callback) { callback(res, options, callback); } // Handle clients that listen to 'readable' by intercepting them // and feeding them the data manually. var readableListeners = res.listeners('readable'); Iif (!_.isEmpty(readableListeners)) { debug('handle readable listeners'); // We will replace the client's listeners with our own and manually // invoke them. _.each(readableListeners, function(listener) { res.removeListener('readable', listener); }); // Repleace the actual Stream.Readable prototype 'read' function // so that we can control what the client listener will be reading. var prototypeRead = Stream.Readable.prototype.read; var currentReadIndex = 0; res.read = function() { debug(thisRecordingId, 'client reading data on', proto, dataChunks.length); // Feed the data to the client through from our collected data chunks. if (currentReadIndex < dataChunks.length) { debug('chunk', chunk, 'read'); var chunk = dataChunks[currentReadIndex]; ++currentReadIndex; return chunk; } else { debug('no more chunks to read'); return null; } }; // Put our own listener instead of the removed client listener. var onReadable = function(data) { debug(thisRecordingId, 'new readable data on', proto); var chunk; // Use the prototypeRead function to actually read the data. while (null !== (chunk = prototypeRead.call(res))) { debug('read', chunk); dataChunks.push(chunk); } // Manually invoke the user listeners emulating 'readable' event. _.each(readableListeners, function(listener) { listener(); }); }; res.on('readable', onReadable); } else { // In all other cases we (for now at least) fall back on intercepting // 'data' events. debug('fall back on our original implementation'); // Since we gave client the chance to setup its listeners // before us, we need to remove them and setup our own. var dataListeners = res.listeners('data'); _.each(dataListeners, function(listener) { res.removeListener('data', listener); }); var onData = function(data) { debug(thisRecordingId, 'new data chunk on', proto); Iif (encoding) { data = new Buffer(data, encoding); } dataChunks.push(data); // Manually invoke the user listeners emulating 'data' event. _.each(dataListeners, function(listener) { listener(data); }); }; res.on('data', onData); } debug('finished setting up intercepting'); Iif (proto === 'https') { options._https_ = true; } }); var oldWrite = req.write; req.write = function(data, encoding) { if ('undefined' !== typeof(data)) { if (data) { debug(thisRecordingId, 'new', proto, 'body chunk'); if (! Buffer.isBuffer(data)) { data = new Buffer(data, encoding); } bodyChunks.push(data); } oldWrite.call(req, data); } }; return req; }); } // Restores *all* the overridden http/https modules' properties. function restore() { debug(currentRecordingId, 'restoring all the overridden http/https properties'); common.restoreOverriddenRequests(); intercept.restoreOverriddenClientRequest(); recordingInProgress = false; } function clear() { outputs = []; } exports.record = record; exports.outputs = function() { return outputs; }; exports.restore = restore; exports.clear = clear; |