python3.4 Pyqt4 web request asyncio



是否可以在 Pyqt4 (QwebPage) 下执行异步(如 asyncio)Web 请求?

例如,如何与以下代码并行调用多个 url:

#!/usr/bin/env python3.4
import sys
import signal
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtWebKit import QWebPage
class Crawler( QWebPage ):
    def __init__(self, url):
        QWebPage.__init__( self )
        self._url = url
        self.content = ''
    def crawl( self ):
        signal.signal( signal.SIGINT, signal.SIG_DFL )
        self.connect( self, SIGNAL( 'loadFinished(bool)' ), self._finished_loading )
        self.mainFrame().load( QUrl( self._url ) )
    def _finished_loading( self, result ):
        self.content = self.mainFrame().toHtml()
        print(self.content)
        sys.exit( 0 )
    def main():
        app = QApplication( sys.argv )
        crawler = Crawler( self._url, self._file )
        crawler.crawl()
        sys.exit( app.exec_() )
if __name__ == '__main__':
     crawl = Crawler( 'http://www.example.com')
     crawl.main()

谢谢

你不能通过 asyncio self.mainFrame().load(QUrl(self._url))工作,对不起——Qt 本身实现的方法。

但是你可以安装 quamash 事件循环并异步调用 aiohttp.request coroutine 来获取网页。

不过,这种方法不适用于QWebPage

请求

已经异步完成,因此您需要做的就是创建多个QWebPage实例。

下面是一个基于示例脚本的简单演示:

import sys, signal
from PyQt4 import QtCore, QtGui, QtWebKit
urls = [
    'http://qt-project.org/doc/qt-4.8/qwebelement.html',
    'http://qt-project.org/doc/qt-4.8/qwebframe.html',
    'http://qt-project.org/doc/qt-4.8/qwebinspector.html',
    'http://qt-project.org/doc/qt-4.8/qwebpage.html',
    'http://qt-project.org/doc/qt-4.8/qwebsettings.html',
    'http://qt-project.org/doc/qt-4.8/qwebview.html',
    ]
class Crawler(QtWebKit.QWebPage):
    def __init__(self, url, identifier):
        super(Crawler, self).__init__()
        self.loadFinished.connect(self._finished_loading)
        self._id = identifier
        self._url = url
        self.content = ''
    def crawl(self):
        self.mainFrame().load(QtCore.QUrl(self._url))
    def _finished_loading(self, result):
        self.content = self.mainFrame().toHtml()
        print('[%d] %s' % (self._id, self._url))
        print(self.content[:250].rstrip(), '...')
        print()
        self.deleteLater()
if __name__ == '__main__':
    app = QtGui.QApplication( sys.argv )
    signal.signal( signal.SIGINT, signal.SIG_DFL)
    crawlers = []
    for index, url in enumerate(urls):
        crawlers.append(Crawler(url, index))
        crawlers[-1].crawl()
    sys.exit( app.exec_() )

最新更新