forked from benbria/uber-watchify
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathindex.js
378 lines (340 loc) · 10.5 KB
/
index.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
var through = require('through2');
var path = require('path');
var chokidar = require('chokidar');
var crypto = require('crypto');
var xtend = require('xtend');
var anymatch = require('anymatch');
var mkdirp = require('mkdirp');
var fs = require('fs');
var writeJSONSync = require('jsonfile').writeFileSync;
var os = require('os');
var tmpdir = os.tmpdir();
module.exports = watchify;
module.exports.args = function() {
return {
cache: {}, packageCache: {}, watch: true
};
}
/**
* Utility method to load our json cache file. Failover to an empty object.
* The native browserify cache object includes the sources. However,
* since parsing large JSON files is extremely slow, it is much more more
* performant to store the source content in their individual cache files,
* and then reconstruct the cache object when reading.
*
* @param cacheFile [String] - full path to the json file
*/
module.exports.getCache = function(cacheFile) {
try {
var start = Date.now()
var f = JSON.parse(fs.readFileSync(cacheFile, 'utf8'))
for (key in f) {
if (~['_files', '_time', '_transformDeps'].indexOf(key)) {
continue
}
if (f[key].source) {
f[key].source = fs.readFileSync(f[key].source, 'utf8');
}
}
return f
} catch (err) {
if (err && err.code === 'ENOENT') {
return {};
} else {
throw err
}
}
};
function watchify (b, opts) {
var wrapStart = Date.now()
if (!opts) opts = {};
var watch = typeof opts.watch !== 'undefined' ? opts.watch : module.exports.args().watch;
var cacheFile = opts.cacheFile;
var cache = b._options.cache || {};
if (!cache._files) cache._files = {};
if (!cache._time) cache._time = {};
if (!cache._transformDeps) cache._transformDeps = {};
var invalid = false;
var pkgcache = b._options.packageCache;
var delay = typeof opts.delay === 'number' ? opts.delay : 600;
var changingDeps = {};
var pending = false;
var updating = false;
if (opts.ignoreWatch) {
var ignored = opts.ignoreWatch !== true
? opts.ignoreWatch
: '**/node_modules/**';
}
var wopts = {persistent: true};
if (opts.poll || typeof opts.poll === 'number') {
wopts.usePolling = true;
wopts.interval = opts.poll !== true
? opts.poll
: undefined;
}
if (cache) {
b.on('reset', collect);
update();
collect();
}
/**
* Walk through the dependency cache. If any dependency's modification time has changed, or the file has been
* removed, invalidate the cache and remove the entry.
*
* If the file implicitly requires other files due to a transform, invalidate them.
* If the file is implicitly required as part of a transform, invalidate the files that require it.
*/
function update() {
if (Object.keys(cache).length === 3) {
invalid = true;
return;
} else {
invalid = false;
}
var transformDepsInverted = {};
Object.keys(cache._transformDeps).forEach(function(mfile) {
cache._transformDeps[mfile].forEach(function(dep) {
transformDepsInverted[dep] = transformDepsInverted[dep] || [];
transformDepsInverted[dep].push(mfile);
});
});
Object.keys(cache._time).forEach(function(file) {
try {
var stats = fs.statSync(file);
} catch (err) {}
if (!stats || cache._time[file] !== stats.mtime.getTime()) {
// checkShasum is an array of files that we should check based
// a hash of their contents as well as the mtime. This is useful
// for files that are often overwritten with the same content
// but are still part of the bundle (e.g generated view partials)
if (stats && opts.checkShasum && ~opts.checkShasum.indexOf(file)) {
cachedSourceHash = shasum(fs.readFileSync(file, 'utf8'))
realSourceHash = shasum(cache[file].source)
if (cachedSourceHash == realSourceHash) {
cache._time[file] = stats.mtime.getTime();
return;
}
}
b.emit('log', 'Watchify cache: dep updated or removed: ' + file);
cleanEntry(cache._files[file], file);
if (transformDepsInverted[file]) {
transformDepsInverted[file].forEach(function(mfile) {
cleanEntry(cache._files[mfile], mfile);
});
}
invalid = true;
}
});
}
function collect () {
b.pipeline.get('deps').push(through.obj(function(row, enc, next) {
var file = row.expose ? b._expose[row.id] : row.file;
cache[file] = {
source: row.source,
deps: xtend({}, row.deps)
};
try {
var stats = fs.statSync(file);
} catch (err) {}
if (stats) {
cache._files[file] = file;
cache._time[file] = stats.mtime.getTime();
}
this.push(row);
next();
}));
}
b.on('file', function (file) {
watchFile(file);
});
b.on('package', function (pkg) {
var file = path.join(pkg.__dirname, 'package.json');
watchFile(file);
if (pkgcache) pkgcache[file] = pkg;
});
b.on('reset', reset);
reset();
function reset () {
var time = null;
var bytes = 0;
b.pipeline.get('record').on('end', function () {
time = Date.now();
});
b.pipeline.get('wrap').push(through(write, end));
function write (buf, enc, next) {
bytes += buf.length;
this.push(buf);
next();
}
function end () {
var delta = Date.now() - time;
b.emit('time', delta);
b.emit('bytes', bytes);
b.emit('log', bytes + ' bytes written ('
+ (delta / 1000).toFixed(2) + ' seconds)'
);
this.push(null);
}
}
var fwatchers = {};
var fwatcherFiles = {};
var ignoredFiles = {};
b.on('transform', function (tr, mfile) {
cleanDependencies(mfile);
tr.on('file', function (dep) {
cache._transformDeps[mfile] = cache._transformDeps[mfile] || [];
cache._transformDeps[mfile].push(dep);
try {
var stats = fs.statSync(dep);
} catch (err) {}
if (stats) {
cache._files[dep] = dep;
cache._time[dep] = stats.mtime.getTime();
}
watchFile(mfile, dep);
});
});
b.on('bundle', function (bundle) {
updating = true;
bundle.on('error', onend);
bundle.on('end', onend);
function onend () { updating = false }
});
function watchFile (file, dep) {
if (!watch) return;
dep = dep || file;
if (ignored) {
if (!ignoredFiles.hasOwnProperty(file)) {
ignoredFiles[file] = anymatch(ignored, file);
}
if (ignoredFiles[file]) return;
}
if (!fwatchers[file]) fwatchers[file] = [];
if (!fwatcherFiles[file]) fwatcherFiles[file] = [];
if (fwatcherFiles[file].indexOf(dep) >= 0) return;
var w = b._watcher(dep, wopts);
w.setMaxListeners(0);
w.on('error', b.emit.bind(b, 'error'));
w.on('change', function () {
invalidate(file);
});
fwatchers[file].push(w);
fwatcherFiles[file].push(dep);
}
function cleanDependencies(file) {
if (cache._transformDeps[file]) {
cache._transformDeps[file].forEach(function(dep) {
cleanEntry(cache._files[dep], dep);
});
}
delete cache._transformDeps[file];
}
function cleanEntry(id, file) {
delete cache._files[file];
delete cache._time[file];
delete cache[id];
cleanDependencies(file);
return;
}
function invalidate (id) {
if (cache && cache[id]) {
cleanEntry(id, cache[id].file);
}
invalid = true;
if (pkgcache) delete pkgcache[id];
changingDeps[id] = true;
if (updating) return;
if (fwatchers[id]) {
fwatchers[id].forEach(function (w) {
w.close();
});
delete fwatchers[id];
delete fwatcherFiles[id];
}
// wait for the disk/editor to quiet down first:
if (!pending) setTimeout(function () {
pending = false;
depsChanged = Object.keys(changingDeps).length > 0
if (!updating && depsChanged) {
b.emit('update', Object.keys(changingDeps));
changingDeps = {};
}
}, delay);
pending = true;
}
function shasum (value) {
return crypto.createHash('sha1').update(value).digest('hex');
}
b.close = function () {
Object.keys(fwatchers).forEach(function (id) {
fwatchers[id].forEach(function (w) { w.close() });
});
};
b._watcher = function (file, opts) {
return chokidar.watch(file, opts);
};
/**
* Write the internal dependency cache to a file on the file system.
*/
b.write = function() {
try {
if (!fs.existsSync(path.dirname(cacheFile))) {
mkdirp.sync(path.dirname(cacheFile));
}
// Takes the source content and writes it to a file. Then
// replaces the source content with the filepath of that file.
omitSources = function(key, value) {
if (key === 'source' && value) {
hash = shasum(value)
var sourceCachePath = path.resolve(tmpdir, hash);
fs.writeFileSync(sourceCachePath, value);
return sourceCachePath
}
return value
}
writeJSONSync(cacheFile, cache, {replacer: omitSources});
} catch (err) {
b.emit('log', 'Erroring writing cache file ' + err.message);
}
}
// Save the reference to the real `bundle`
var _bundle = b.bundle;
/**
* Override the browserify `bundle` function. We need to intercept the call to see if any bundling is really
* needed. When the cache is valid, we just return null. If `watch` is true, though, we setup the watchers on all
* the files in the cache. Otherwise we do the bundling.
*
* @param `cb` {Function} - optional callback
* @returns - either the stream from `_bundle` or `null` if the cache is valid.
*/
b.bundle = function(cb) {
if (invalid) {
invalid = false;
var args = 'function' === typeof(cb) ? [cb] : [];
return _bundle.apply(b, args);
} else {
if (watch) {
setImmediate(function() {
Object.keys(cache).forEach(function(key) {
if (key !== '_time' && key !== '_files' && key !== '_transformDeps') watchFile(key);
});
Object.keys(cache._transformDeps).forEach(function(mfile) {
cache._transformDeps[mfile].forEach(function(dep) {
watchFile(mfile, dep);
});
});
});
// set to true, because we didn't actual bundle anything yet, but want this
// set for the next `update`
b._bundled = true;
}
if ('function' === typeof(cb)) {
b.emit('log', 'Cache is still valid');
cb();
} else {
return null;
}
}
};
return b;
}