当大量请求非常快时,如何解决"Socket Hangup Error "



我有一个nodejs应用程序,可以聚合来自各种网站的内容。请求使用请求流异步从不同源获取源。发出请求时,我经常收到套接字挂断错误。

err in accessing the link { Error: socket hang up
    at createHangUpError (_http_client.js:331:15)
    at TLSSocket.socketOnEnd (_http_client.js:423:23)
    at emitNone (events.js:111:20)
    at TLSSocket.emit (events.js:208:7)
    at endReadableNT (_stream_readable.js:1064:12)
    at _combinedTickCallback (internal/process/next_tick.js:139:11)
    at process._tickDomainCallback (internal/process/next_tick.js:219:9) code: 'ECONNRESET' } https://arstechnica.com/?p=1488489 

环境详情:节点版本 - v8.12.0

尝试了相关SO帖子中给出的一些建议,但我仍然收到相同的错误。NodeJS - "套接字挂断"实际上是什么意思?

import request from 'request';
import FeedParser from 'feedparser';
const extractor = require('unfluff');
export const getFeedsFromSource = function (urlfeed, etag, LastModified, callback) {
  console.log(urlfeed, etag, LastModified);
  const req = request({
    method: 'GET',
    url: urlfeed,
    headers: {
      'If-None-Match': etag,
      'If-Modified-Since': LastModified,
      Connection: 'keep-alive',
      ciphers: 'DES-CBC3-SHA',
    },
  });
  const feedparser = new FeedParser();
  const metaData = {};
  const htmlData = {};
  const feedData = {};
  // const pList = null;
  req.on('response', function (response) {
    const stream = this;
    if (response.statusCode === 304) {
      console.log('Source not modified: ', urlfeed);
    }
    if (response.statusCode === 200) {
      metaData.etagin = response.headers.etag;
      metaData.LastModifiedin = response.headers['last-modified'];
      metaData.LastModifiedLocal = response.headers['last-modified'];
      stream.pipe(feedparser).end();
    }
  });
  req.on('error', (err) => {
    console.log(`getFeed: err.message == ${err.message}`);
    callback(err);
  });
  // req.end();
  feedparser.on('readable', function () {
    try {
      const item = this.read();
      if (item !== null) {
        request({
          method: 'GET',
          url: item.link,
        }, (err, info) => {
          if (!err) {
            htmlData.body = info.body;
            const parsedData = extractor(htmlData.body, 'en');
            feedData.author = [];
            feedData.videos = [];
            feedData.feedtitle = parsedData.title;
            feedData.feedmainpicture = parsedData.image;
            feedData.feedsummary = parsedData.description;
            feedData.feedmaincontent = parsedData.text;
            feedData.author.push(item.author);
            if (item.author === null) {
              feedData.author = parsedData.author;
            }
            feedData.feedurl = item.link;
            feedData.copyright = item.meta.copyright;
            // feedData.videos = parsedData.videos;
            feedData.publishedDate = item.pubdate;
            if (item.categories.length > 0) {
              feedData.categories = item.categories;
              feedData.feedtags = item.categories;
            } else if (parsedData.keywords !== undefined) {
              feedData.categories = parsedData.keywords.split(' ').join('').split(',');
              feedData.feedtags = parsedData.keywords.split(' ').join('').split(',');
            } else {
              feedData.categories = [];
              feedData.feedtags = [];
            }
            metaData.sourcename = item.meta.title;
            callback(undefined, feedData, metaData);
          } else {
            console.log('err in accessing the link', err, item.link);
          }
        });
      }
    } catch (err) {
      console.log(`getFeed: err.message == ${err.message}`);
    }
  });
  feedparser.on('error', (err) => {
    console.log(`getFeed: err.message == ${err.message}`);
  });
  feedparser.on('end', () => {
    console.log('onend');
  });
};

请帮我解决这个问题。

在生产应用中挂断/重置套接字的原因有很多。从您的描述中,我相信原因不是由于应用程序请求过载(除非您运行的机器非常慢(。IMO,最有可能的候选者是由于来自同一 ip 的连接过多而被远程服务器限制(chrome 最多打开 8 个连接到任何单个服务器的连接,您应该尽量不要超过此限制,尽管每个服务器都有不同的限制(,要解决此问题,您应该执行以下操作之一:

  • 增加主机请求池(基本设置Agent.maxSockets(
  • 使用代理服务(例如Luminati(在许多源IP上分发请求(与高并发要求更相关(

还有一件事要记住,请求可能由于"自然"网络原因(例如互联网连接不良\不稳定,服务器繁忙峰值(而失败,您应该始终在放弃之前至少重试一次请求。

最新更新