Skip to content
Permalink
Browse files

Switch to xo, add yarn, remove grunt

  • Loading branch information...
Rowno committed Aug 4, 2017
1 parent bec738b commit 6564463cf4b37948a112e9654b89fa7d6deec721
Showing with 2,354 additions and 166 deletions.
  1. +3 −3 .editorconfig
  2. +0 −6 .eslintrc
  3. +8 −6 .travis.yml
  4. +0 −24 Gruntfile.js
  5. +2 −0 README.md
  6. +43 −47 lib/cli.js
  7. +12 −13 lib/index.js
  8. +17 −8 package.json
  9. +41 −43 test/cli.js
  10. +15 −16 test/index.js
  11. +2,213 −0 yarn.lock
@@ -3,11 +3,11 @@ root = true

[*]
indent_style = space
indent_size = 4
indent_size = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true

[package.json]
indent_size = 2
[Makefile]
indent_style = tab

This file was deleted.

@@ -1,8 +1,10 @@
sudo: false
language: node_js
node_js:
- "5"
- "4"
- "0.12"
before_script:
- npm install -g grunt-cli
- "0.12"
- "4"
- "6"
- "8"
cache:
yarn: true
directories:
- node_modules

This file was deleted.

@@ -14,6 +14,8 @@ Getting Started
Install the Sitemap Urls command line tool:
```bash
npm install -g sitemap-urls
# or
yarn add -g sitemap-urls
```

Run `sitemap-urls` on a file containing a sitemap:
@@ -1,59 +1,55 @@
#!/usr/bin/env node

/* eslint-disable no-sync, no-process-exit */
'use strict';
var path = require('path');
var fs = require('fs');
var meow = require('meow');
var stdin = require('get-stdin');
var updateNotifier = require('update-notifier');
var sitemapUrls = require('../');
var pkg = require('../package.json');

var HELP_FILE_PATH = path.join(__dirname, 'help.txt');
var cli;


updateNotifier({ pkg: pkg }).notify();

cli = meow({
pkg: pkg,
help: fs.readFileSync(HELP_FILE_PATH, { encoding: 'utf8' }).trim()
'use strict'
const path = require('path')
const fs = require('fs')
const meow = require('meow')
const stdin = require('get-stdin')
const updateNotifier = require('update-notifier')
const sitemapUrls = require('../')
const pkg = require('../package.json')

const HELP_FILE_PATH = path.join(__dirname, 'help.txt')

updateNotifier({pkg}).notify()

const cli = meow({
pkg,
help: fs.readFileSync(HELP_FILE_PATH, {encoding: 'utf8'}).trim()
}, {
alias: {
help: 'h',
version: 'v',
}
});

alias: {
help: 'h',
version: 'v'
}
})

stdin().then(function onStdin(stdinSitemap) {
var urls;
var filepath;
var sitemap;
stdin().then(stdinSitemap => {
let filepath
let sitemap

// Require stdin or file
if (!stdinSitemap && !cli.input[0]) {
cli.showHelp();
process.exit(1);
}
if (!stdinSitemap && !cli.input[0]) {
cli.showHelp()
process.exit(1)
}

// Try reading file if no stdin
if (stdinSitemap) {
sitemap = stdinSitemap;
} else {
filepath = path.resolve(cli.input[0]);
if (!fs.existsSync(filepath) || !fs.statSync(filepath).isFile()) {
console.error('File doesn\'t exist:', filepath);
process.exit(1);
}

sitemap = fs.readFileSync(filepath, { encoding: 'utf8' });
if (stdinSitemap) {
sitemap = stdinSitemap
} else {
filepath = path.resolve(cli.input[0])
if (!fs.existsSync(filepath) || !fs.statSync(filepath).isFile()) {
console.error('File doesn\'t exist:', filepath)
process.exit(1)
}

urls = sitemapUrls.extractUrls(sitemap);
sitemap = fs.readFileSync(filepath, {encoding: 'utf8'})
}

const urls = sitemapUrls.extractUrls(sitemap)

urls.forEach(function forEachUrl(url) {
console.log(url);
});
});
urls.forEach(url => {
console.log(url)
})
})
@@ -1,20 +1,19 @@
'use strict';
var cheerio = require('cheerio');

'use strict'
const cheerio = require('cheerio')

function extractUrls(xml) {
var urls = [];
var $ = cheerio.load(xml, { xmlMode: true });
const urls = []
const $ = cheerio.load(xml, {xmlMode: true})

$('loc').each(function forEachLoc() {
var url = $(this).text();
$('loc').each(function () {
const url = $(this).text()

if (urls.indexOf(url) < 0) {
urls.push(url);
}
});
if (urls.indexOf(url) < 0) {
urls.push(url)
}
})

return urls;
return urls
}

exports.extractUrls = extractUrls;
exports.extractUrls = extractUrls
@@ -16,7 +16,7 @@
"sitemap-urls": "lib/cli.js"
},
"scripts": {
"test": "grunt test"
"test": "mocha && xo"
},
"engines": {
"node": ">=0.12.0"
@@ -29,14 +29,23 @@
},
"devDependencies": {
"chai": "^3.2.0",
"eslint-config-rowno": "^2.1.0",
"grunt": "^0.4.5",
"grunt-eslint": "^17.3.1",
"grunt-mocha-cli": "^2.0.0",
"load-grunt-tasks": "^3.0.0",
"time-grunt": "^1.0.0"
"mocha": "^3.5.0",
"xo": "^0.18.2"
},
"files": [
"lib"
]
],
"xo": {
"space": true,
"semicolon": false,
"overrides": [
{
"files": "test/*.js",
"globals": [
"describe",
"it"
]
}
]
}
}
@@ -1,50 +1,48 @@
'use strict';
var path = require('path');
var fs = require('fs');
var exec = require('child_process').exec;
var expect = require('chai').expect;
var fixtureUrls = require('./fixtures/urls.json');

var CLI = path.resolve(require('../package.json').bin['sitemap-urls']);
var SITEMAP_FILE = path.join(__dirname, 'fixtures/sitemap.xml');
var FIXTURE_OUTPUT = fixtureUrls.join('\n') + '\n';


describe('cli', function () {
it('should extract urls from sitemap file', function (done) {

var child = exec(
'use strict'
const path = require('path')
const fs = require('fs')
const exec = require('child_process').exec
const expect = require('chai').expect
const CLI = path.resolve(require('../package.json').bin['sitemap-urls'])
const fixtureUrls = require('./fixtures/urls.json')

const SITEMAP_FILE = path.join(__dirname, 'fixtures/sitemap.xml')
const FIXTURE_OUTPUT = fixtureUrls.join('\n') + '\n'

describe('cli', () => {
it('should extract urls from sitemap file', done => {
const child = exec(
CLI + ' ' + SITEMAP_FILE,
{ cwd: __dirname },
function (error, stdout, stderr) {
if (error) {
return done(error);
}

expect(stdout, 'stdout').to.equal(FIXTURE_OUTPUT);
expect(stderr, 'stderr').to.equal('');
done();
{cwd: __dirname},
(error, stdout, stderr) => {
if (error) {
return done(error)
}

expect(stdout, 'stdout').to.equal(FIXTURE_OUTPUT)
expect(stderr, 'stderr').to.equal('')
done()
}
);
)

child.stdin.end();
});
child.stdin.end()
})

it('should extract urls from stdin', function (done) {
var child = exec(
it('should extract urls from stdin', done => {
const child = exec(
CLI,
{ cwd: __dirname },
function (error, stdout, stderr) {
if (error) {
return done(error);
}

expect(stdout, 'stdout').to.equal(FIXTURE_OUTPUT);
expect(stderr, 'stderr').to.equal('');
done();
{cwd: __dirname},
(error, stdout, stderr) => {
if (error) {
return done(error)
}

expect(stdout, 'stdout').to.equal(FIXTURE_OUTPUT)
expect(stderr, 'stderr').to.equal('')
done()
}
);
)

fs.createReadStream(SITEMAP_FILE, { encoding: 'utf8' }).pipe(child.stdin);
});
});
fs.createReadStream(SITEMAP_FILE, {encoding: 'utf8'}).pipe(child.stdin)
})
})
@@ -1,20 +1,19 @@
/* eslint-disable no-sync */
'use strict';
var fs = require('fs');
var path = require('path');
var expect = require('chai').expect;
var sitemapUrls = require('../');
var fixtureUrls = require('./fixtures/urls.json');
'use strict'
const fs = require('fs')
const path = require('path')
const expect = require('chai').expect
const sitemapUrls = require('../')
const fixtureUrls = require('./fixtures/urls.json')

var fixtureXml = fs.readFileSync(path.join(__dirname, 'fixtures/sitemap.xml'), 'utf8');
const fixtureXml = fs.readFileSync(path.join(__dirname, 'fixtures/sitemap.xml'), 'utf8')

describe('index', () => {
describe('#extractUrls', () => {
it('should extract urls', () => {
const urls = sitemapUrls.extractUrls(fixtureXml)

describe('index', function () {
describe('#extractUrls', function () {
it('should extract urls', function () {
var urls = sitemapUrls.extractUrls(fixtureXml);

expect(urls).to.have.members(fixtureUrls);
});
});
});
expect(urls).to.have.members(fixtureUrls)
})
})
})

0 comments on commit 6564463

Please sign in to comment.
You can’t perform that action at this time.