Skip to content

Commit a5d078a

Browse files
committed
Change examples and tests to pipeline-based async iteration
This avoids recommending a pattern that exposes a footgun - pipe() needs to have independent error-handling to avoid possible triggering an unhandled error.
1 parent 6dfb130 commit a5d078a

2 files changed

Lines changed: 64 additions & 22 deletions

File tree

README.md

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -65,21 +65,26 @@ feedparser.on('readable', function () {
6565
You can also consume feeds using async iteration:
6666

6767
```js
68-
6968
var FeedParser = require('feedparser');
7069
var fetch = require('node-fetch');
70+
var pipeline = require('stream/promises').pipeline;
7171

7272
async function main() {
73-
var res = await fetch('http://somefeedurl.xml');
73+
var res = await fetch('http://someurl.site/rss.xml');
7474
if (res.status !== 200) throw new Error('Bad status code');
7575

7676
var feedparser = new FeedParser([options]);
77-
res.body.pipe(feedparser);
7877

7978
try {
80-
for await (var item of feedparser) {
81-
console.log(item.title);
82-
}
79+
await pipeline(
80+
res.body,
81+
feedparser,
82+
async function (feedparserIterable) {
83+
for await (var item of feedparserIterable) {
84+
console.log(item.title);
85+
}
86+
}
87+
)
8388
} catch (err) {
8489
console.error(err);
8590
}

test/async-iterator.js

Lines changed: 53 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,31 @@
11
var FeedParser = require('..');
2+
var PassThrough = require('stream').PassThrough;
3+
// We're using this form so we can run tests on older Node versions that don't have stream.promises.pipeline
4+
var pipeline = require('util').promisify(require('stream').pipeline);
25

36
describe('async iterator usage', function () {
47
it('should work as an async iterator', async function () {
58
var feedparser = new FeedParser();
69
var feed = __dirname + '/feeds/rss2sample.xml';
710
var items = [];
811

9-
fs.createReadStream(feed).pipe(feedparser);
10-
11-
for await (const item of feedparser) {
12-
items.push(item);
13-
}
12+
await pipeline(fs.createReadStream(feed), feedparser, async function (fpIterable) {
13+
for await (const item of fpIterable) {
14+
items.push(item);
15+
}
16+
});
1417

1518
assert.equal(items.length, 4);
1619
});
1720

1821
it('should surface errors via try/catch', async function () {
1922
var feedparser = new FeedParser();
2023
var feed = __dirname + '/feeds/notafeed.html';
21-
fs.createReadStream(feed).pipe(feedparser);
22-
2324
var caught = null;
2425
try {
25-
for await (const item of feedparser) {} // eslint-disable-line no-empty, no-unused-vars
26+
await pipeline(fs.createReadStream(feed), feedparser, async function (fpIterable) {
27+
for await (const item of fpIterable) {} // eslint-disable-line no-empty, no-unused-vars
28+
});
2629
} catch (err) {
2730
caught = err;
2831
}
@@ -31,32 +34,66 @@ describe('async iterator usage', function () {
3134
assert.equal(caught.message, 'Not a feed');
3235
});
3336

37+
it('should catch errors after a delayed iteration start', async function () {
38+
var feedparser = new FeedParser();
39+
var source = new PassThrough();
40+
var items = [];
41+
var caught = null;
42+
var uncaught = null;
43+
function onUncaught(err) {
44+
uncaught = err;
45+
}
46+
process.prependOnceListener('uncaughtException', onUncaught);
47+
48+
source.end('not a feed');
49+
50+
await new Promise(setImmediate);
51+
52+
try {
53+
await pipeline(source, feedparser, async function (fpIterable) {
54+
for await (const item of fpIterable) {
55+
items.push(item.title);
56+
}
57+
});
58+
} catch (err) {
59+
caught = err;
60+
} finally {
61+
process.removeListener('uncaughtException', onUncaught);
62+
assert.equal(uncaught, null);
63+
assert.ok(caught instanceof Error);
64+
assert.equal(caught.message, 'Not a feed');
65+
assert.equal(items.length, 0);
66+
}
67+
});
68+
3469
describe('resume_saxerror behavior', function () {
3570
var feed = __dirname + '/feeds/saxerror.xml';
3671

3772
it('should continue iterating past SAX errors by default (resume_saxerror: true)', async function () {
3873
var feedparser = new FeedParser({ strict: true });
39-
fs.createReadStream(feed).pipe(feedparser);
4074
var items = [];
4175

42-
for await (const item of feedparser) {
43-
items.push(item.title);
44-
}
76+
await pipeline(fs.createReadStream(feed), feedparser, async function (fpIterable) {
77+
for await (const item of fpIterable) {
78+
items.push(item.title);
79+
}
80+
});
4581

4682
assert.equal(items.length, 3);
4783
assert.deepEqual(items, ['Good Item', 'Bad Item', 'Item After Error']);
4884
});
4985

5086
it('should throw on SAX errors when (resume_saxerror: false)', async function () {
5187
var feedparser = new FeedParser({ strict: true, resume_saxerror: false });
52-
fs.createReadStream(feed).pipe(feedparser);
5388
var items = [];
5489

5590
var caught = null;
5691
try {
57-
for await (const item of feedparser) {
58-
items.push(item.title);
59-
}
92+
await pipeline(fs.createReadStream(feed), feedparser, async function (fpIterable) {
93+
for await (const item of fpIterable) {
94+
items.push(item.title);
95+
}
96+
});
6097
} catch (err) {
6198
caught = err;
6299
}

0 commit comments

Comments
 (0)