summaryrefslogtreecommitdiffstats
path: root/node_modules/node-sass/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'node_modules/node-sass/scripts')
-rw-r--r--node_modules/node-sass/scripts/build.js155
-rw-r--r--node_modules/node-sass/scripts/coverage.js85
-rw-r--r--node_modules/node-sass/scripts/install.js157
-rw-r--r--node_modules/node-sass/scripts/prepublish.js17
-rw-r--r--node_modules/node-sass/scripts/util/downloadoptions.js31
-rw-r--r--node_modules/node-sass/scripts/util/proxy.js22
-rw-r--r--node_modules/node-sass/scripts/util/useragent.js13
7 files changed, 480 insertions, 0 deletions
diff --git a/node_modules/node-sass/scripts/build.js b/node_modules/node-sass/scripts/build.js
new file mode 100644
index 0000000..7bbba5e
--- /dev/null
+++ b/node_modules/node-sass/scripts/build.js
@@ -0,0 +1,155 @@
+/*!
+ * node-sass: scripts/build.js
+ */
+
+var fs = require('fs'),
+ mkdir = require('mkdirp'),
+ path = require('path'),
+ spawn = require('cross-spawn'),
+ sass = require('../lib/extensions');
+
+/**
+ * After build
+ *
+ * @param {Object} options
+ * @api private
+ */
+
+function afterBuild(options) {
+ var install = sass.getBinaryPath();
+ var target = path.join(__dirname, '..', 'build',
+ options.debug ? 'Debug' :
+ process.config.target_defaults
+ ? process.config.target_defaults.default_configuration
+ : 'Release',
+ 'binding.node');
+
+ mkdir(path.dirname(install), function(err) {
+ if (err && err.code !== 'EEXIST') {
+ console.error(err.message);
+ return;
+ }
+
+ fs.stat(target, function(err) {
+ if (err) {
+ console.error('Build succeeded but target not found');
+ return;
+ }
+
+ fs.rename(target, install, function(err) {
+ if (err) {
+ console.error(err.message);
+ return;
+ }
+
+ console.log('Installed to', install);
+ });
+ });
+ });
+}
+
+/**
+ * Build
+ *
+ * @param {Object} options
+ * @api private
+ */
+
+function build(options) {
+ var args = [require.resolve(path.join('node-gyp', 'bin', 'node-gyp.js')), 'rebuild', '--verbose'].concat(
+ ['libsass_ext', 'libsass_cflags', 'libsass_ldflags', 'libsass_library'].map(function(subject) {
+ return ['--', subject, '=', process.env[subject.toUpperCase()] || ''].join('');
+ })).concat(options.args);
+
+ console.log('Building:', [process.execPath].concat(args).join(' '));
+
+ var proc = spawn(process.execPath, args, {
+ stdio: [0, 1, 2]
+ });
+
+ proc.on('exit', function(errorCode) {
+ if (!errorCode) {
+ afterBuild(options);
+ return;
+ }
+
+ if (errorCode === 127 ) {
+ console.error('node-gyp not found!');
+ } else {
+ console.error('Build failed with error code:', errorCode);
+ }
+
+ process.exit(1);
+ });
+}
+
+/**
+ * Parse arguments
+ *
+ * @param {Array} args
+ * @api private
+ */
+
+function parseArgs(args) {
+ var options = {
+ arch: process.arch,
+ platform: process.platform,
+ force: process.env.npm_config_force === 'true',
+ };
+
+ options.args = args.filter(function(arg) {
+ if (arg === '-f' || arg === '--force') {
+ options.force = true;
+ return false;
+ } else if (arg.substring(0, 13) === '--target_arch') {
+ options.arch = arg.substring(14);
+ } else if (arg === '-d' || arg === '--debug') {
+ options.debug = true;
+ } else if (arg.substring(0, 13) === '--libsass_ext' && arg.substring(14) !== 'no') {
+ options.libsassExt = true;
+ }
+
+ return true;
+ });
+
+ return options;
+}
+
+/**
+ * Test for pre-built library
+ *
+ * @param {Object} options
+ * @api private
+ */
+
+function testBinary(options) {
+ if (options.force || process.env.SASS_FORCE_BUILD) {
+ return build(options);
+ }
+
+ if (!sass.hasBinary(sass.getBinaryPath())) {
+ return build(options);
+ }
+
+ console.log('Binary found at', sass.getBinaryPath());
+ console.log('Testing binary');
+
+ try {
+ require('../').renderSync({
+ data: 's { a: ss }'
+ });
+
+ console.log('Binary is fine');
+ } catch (e) {
+ console.log('Binary has a problem:', e);
+ console.log('Building the binary locally');
+
+ return build(options);
+ }
+}
+
+/**
+ * Apply arguments and run
+ */
+
+testBinary(parseArgs(process.argv.slice(2)));
diff --git a/node_modules/node-sass/scripts/coverage.js b/node_modules/node-sass/scripts/coverage.js
new file mode 100644
index 0000000..33836e9
--- /dev/null
+++ b/node_modules/node-sass/scripts/coverage.js
@@ -0,0 +1,85 @@
+/*!
+ * node-sass: scripts/coverage.js
+ */
+
+var Mocha = require('mocha'),
+ fs = require('fs'),
+ path = require('path'),
+ mkdirp = require('mkdirp'),
+ coveralls = require('coveralls'),
+ istanbul = require('istanbul'),
+ sourcefiles = ['index.js', 'binding.js', 'extensions.js', 'render.js', 'errors.js'],
+ summary= istanbul.Report.create('text-summary'),
+ lcov = istanbul.Report.create('lcovonly', { dir: path.join('coverage') }),
+ html = istanbul.Report.create('html', { dir: path.join('coverage', 'html') });
+
+function coverage() {
+ var mocha = new Mocha();
+ var rep = function(runner) {
+ runner.on('end', function(){
+ var cov = global.__coverage__,
+ collector = new istanbul.Collector();
+ if (cov) {
+ mkdirp(path.join('coverage', 'html'), function(err) {
+ if (err) { throw err; }
+ collector.add(cov);
+ summary.writeReport(collector, true);
+ html.writeReport(collector, true);
+ lcov.on('done', function() {
+ fs.readFile(path.join('coverage', 'lcov.info'), function(err, data) {
+ if (err) { console.error(err); }
+ coveralls.handleInput(data.toString(),
+ function (err) { if (err) { console.error(err); } });
+ });
+ });
+ lcov.writeReport(collector, true);
+ });
+ } else {
+ console.warn('No coverage');
+ }
+ });
+ };
+ var instrumenter = new istanbul.Instrumenter();
+ var instrumentedfiles = [];
+ var processfile = function(source) {
+ fs.readFile(path.join('lib', source), function(err, data) {
+ if (err) { throw err; }
+ mkdirp('lib-cov', function(err) {
+ if (err) { throw err; }
+ fs.writeFile(path.join('lib-cov', source),
+ instrumenter.instrumentSync(data.toString(),
+ path.join('lib', source)),
+ function(err) {
+ if (err) { throw err; }
+ instrumentedfiles.push(source);
+ if (instrumentedfiles.length === sourcefiles.length) {
+ fs.readdirSync('test').filter(function(file){
+ return file.substr(-6) === 'api.js' ||
+ file.substr(-11) === 'runtime.js' ||
+ file.substr(-7) === 'spec.js';
+ }).forEach(function(file){
+ mocha.addFile(
+ path.join('test', file)
+ );
+ });
+ process.env.NODESASS_COV = 1;
+ mocha.reporter(rep).run(function(failures) {
+ process.on('exit', function () {
+ process.exit(failures);
+ });
+ });
+ }
+ });
+ });
+ });
+ };
+ for (var i in sourcefiles) {
+ processfile(sourcefiles[i]);
+ }
+}
+
+/**
+ * Run
+ */
+
+coverage();
diff --git a/node_modules/node-sass/scripts/install.js b/node_modules/node-sass/scripts/install.js
new file mode 100644
index 0000000..6febbe4
--- /dev/null
+++ b/node_modules/node-sass/scripts/install.js
@@ -0,0 +1,157 @@
+/*!
+ * node-sass: scripts/install.js
+ */
+
+var fs = require('fs'),
+ eol = require('os').EOL,
+ mkdir = require('mkdirp'),
+ path = require('path'),
+ sass = require('../lib/extensions'),
+ request = require('request'),
+ log = require('npmlog'),
+ downloadOptions = require('./util/downloadoptions');
+
+/**
+ * Download file, if succeeds save, if not delete
+ *
+ * @param {String} url
+ * @param {String} dest
+ * @param {Function} cb
+ * @api private
+ */
+
+function download(url, dest, cb) {
+ var reportError = function(err) {
+ var timeoutMessge;
+
+ if (err.code === 'ETIMEDOUT') {
+ if (err.connect === true) {
+ // timeout is hit while your client is attempting to establish a connection to a remote machine
+ timeoutMessge = 'Timed out attemping to establish a remote connection';
+ } else {
+ timeoutMessge = 'Timed out whilst downloading the prebuilt binary';
+ // occurs any time the server is too slow to send back a part of the response
+ }
+
+ }
+ cb(['Cannot download "', url, '": ', eol, eol,
+ typeof err.message === 'string' ? err.message : err, eol, eol,
+ timeoutMessge ? timeoutMessge + eol + eol : timeoutMessge,
+ 'Hint: If github.com is not accessible in your location', eol,
+ ' try setting a proxy via HTTP_PROXY, e.g. ', eol, eol,
+ ' export HTTP_PROXY=http://example.com:1234',eol, eol,
+ 'or configure npm proxy via', eol, eol,
+ ' npm config set proxy http://example.com:8080'].join(''));
+ };
+
+ var successful = function(response) {
+ return response.statusCode >= 200 && response.statusCode < 300;
+ };
+
+ console.log('Downloading binary from', url);
+
+ try {
+ request(url, downloadOptions(), function(err, response, buffer) {
+ if (err) {
+ reportError(err);
+ } else if (!successful(response)) {
+ reportError(['HTTP error', response.statusCode, response.statusMessage].join(' '));
+ } else {
+ console.log('Download complete');
+
+ if (successful(response)) {
+ fs.createWriteStream(dest)
+ .on('error', cb)
+ .end(buffer, cb);
+ } else {
+ cb();
+ }
+ }
+ })
+ .on('response', function(response) {
+ var length = parseInt(response.headers['content-length'], 10);
+ var progress = log.newItem('', length);
+
+ // The `progress` is true by default. However if it has not
+ // been explicitly set it's `undefined` which is considered
+ // as far as npm is concerned.
+ if (process.env.npm_config_progress === 'true') {
+ log.enableProgress();
+
+ response.on('data', function(chunk) {
+ progress.completeWork(chunk.length);
+ })
+ .on('end', progress.finish);
+ }
+ });
+ } catch (err) {
+ cb(err);
+ }
+}
+
+/**
+ * Check and download binary
+ *
+ * @api private
+ */
+
+function checkAndDownloadBinary() {
+ if (process.env.SKIP_SASS_BINARY_DOWNLOAD_FOR_CI) {
+ console.log('Skipping downloading binaries on CI builds');
+ return;
+ }
+
+ var cachedBinary = sass.getCachedBinary(),
+ cachePath = sass.getBinaryCachePath(),
+ binaryPath = sass.getBinaryPath();
+
+ if (sass.hasBinary(binaryPath)) {
+ console.log('node-sass build', 'Binary found at', binaryPath);
+ return;
+ }
+
+ try {
+ mkdir.sync(path.dirname(binaryPath));
+ } catch (err) {
+ console.error('Unable to save binary', path.dirname(binaryPath), ':', err);
+ return;
+ }
+
+ if (cachedBinary) {
+ console.log('Cached binary found at', cachedBinary);
+ fs.createReadStream(cachedBinary).pipe(fs.createWriteStream(binaryPath));
+ return;
+ }
+
+ download(sass.getBinaryUrl(), binaryPath, function(err) {
+ if (err) {
+ console.error(err);
+ return;
+ }
+
+ console.log('Binary saved to', binaryPath);
+
+ cachedBinary = path.join(cachePath, sass.getBinaryName());
+
+ if (cachePath) {
+ console.log('Caching binary to', cachedBinary);
+
+ try {
+ mkdir.sync(path.dirname(cachedBinary));
+ fs.createReadStream(binaryPath)
+ .pipe(fs.createWriteStream(cachedBinary))
+ .on('error', function (err) {
+ console.log('Failed to cache binary:', err);
+ });
+ } catch (err) {
+ console.log('Failed to cache binary:', err);
+ }
+ }
+ });
+}
+
+/**
+ * If binary does not exist, download it
+ */
+
+checkAndDownloadBinary();
diff --git a/node_modules/node-sass/scripts/prepublish.js b/node_modules/node-sass/scripts/prepublish.js
new file mode 100644
index 0000000..b1befd4
--- /dev/null
+++ b/node_modules/node-sass/scripts/prepublish.js
@@ -0,0 +1,17 @@
+/*!
+ * node-sass: scripts/install.js
+ */
+
+var path = require('path'),
+ rimraf = require('rimraf');
+
+function prepublish() {
+ var vendorPath = path.resolve(__dirname, '..', 'vendor');
+ rimraf.sync(vendorPath);
+}
+
+/**
+ * Run
+ */
+
+prepublish();
diff --git a/node_modules/node-sass/scripts/util/downloadoptions.js b/node_modules/node-sass/scripts/util/downloadoptions.js
new file mode 100644
index 0000000..2352971
--- /dev/null
+++ b/node_modules/node-sass/scripts/util/downloadoptions.js
@@ -0,0 +1,31 @@
+var proxy = require('./proxy'),
+ userAgent = require('./useragent');
+
+/**
+ * The options passed to request when downloading the bibary
+ *
+ * There some nuance to how request handles options. Specifically
+ * we've been caught by their usage of `hasOwnProperty` rather than
+ * falsey checks. By moving the options generation into a util helper
+ * we can test for regressions.
+ *
+ * @return {Object} an options object for request
+ * @api private
+ */
+module.exports = function() {
+ var options = {
+ rejectUnauthorized: false,
+ timeout: 60000,
+ headers: {
+ 'User-Agent': userAgent(),
+ },
+ encoding: null,
+ };
+
+ var proxyConfig = proxy();
+ if (proxyConfig) {
+ options.proxy = proxyConfig;
+ }
+
+ return options;
+};
diff --git a/node_modules/node-sass/scripts/util/proxy.js b/node_modules/node-sass/scripts/util/proxy.js
new file mode 100644
index 0000000..e65eac5
--- /dev/null
+++ b/node_modules/node-sass/scripts/util/proxy.js
@@ -0,0 +1,22 @@
+
+/**
+ * Determine the proxy settings configured by npm
+ *
+ * It's possible to configure npm to use a proxy different
+ * from the system defined proxy. This can be done via the
+ * `npm config` CLI or the `.npmrc` config file.
+ *
+ * If a proxy has been configured in this way we must
+ * tell request explicitly to use it.
+ *
+ * Otherwise we can trust request to the right thing.
+ *
+ * @return {String} the proxy configured by npm or an empty string
+ * @api private
+ */
+module.exports = function() {
+ return process.env.npm_config_https_proxy ||
+ process.env.npm_config_proxy ||
+ process.env.npm_config_http_proxy ||
+ '';
+};
diff --git a/node_modules/node-sass/scripts/util/useragent.js b/node_modules/node-sass/scripts/util/useragent.js
new file mode 100644
index 0000000..2496eec
--- /dev/null
+++ b/node_modules/node-sass/scripts/util/useragent.js
@@ -0,0 +1,13 @@
+var pkg = require('../../package.json');
+
+/**
+ * A custom user agent use for binary downloads.
+ *
+ * @api private
+ */
+module.exports = function() {
+ return [
+ 'node/', process.version, ' ',
+ 'node-sass-installer/', pkg.version
+ ].join('');
+};