我正在抓取此页面https://www.betexplorer.com/soccer/netherlands/eerste-divisie-2018-2019/results/我的代码提取所有匹配的url,并使用ciclefor
从每个url中提取数据。(url示例https://www.betexplorer.com/soccer/netherlands/eerste-divisie-2018-2019/den-bosch-g-a-eagles/YkOxU6sM/)这是我的代码:
for i in matches:
driver.get(i)
Country = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.XPATH, "/html/body/div[4]/div[5]/div/div/div[1]/section/ul/li[3]/a"))).text
leagueseason = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.XPATH, "/html/body/div[4]/div[5]/div/div/div[1]/section/header/h1/a"))).text
Date = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.ID, 'match-date'))).text
Date = Date.replace(".", "/").replace("-", "").replace(" ", "", 1)
Home = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.XPATH, "/html/body/div[4]/div[5]/div/div/div[1]/section/ul[2]/li[1]/h2/a"))).text
Away = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.XPATH, "/html/body/div[4]/div[5]/div/div/div[1]/section/ul[2]/li[3]/h2/a"))).text
ft = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.ID, 'js-score'))).text
Res = ""
try:
extrainfo = driver.find_element_by_xpath("//*[@id='js-eventstage']").text
except NoSuchElementException:
extrainfo = " "
try:
driver.find_element_by_xpath("//td[a[.='bet365']]/following-sibling::td[span]")
WebDriverWait(driver, 2).until(EC.element_to_be_clickable((By.XPATH, "(//td[a[.='bet365']]/following-sibling::td[@data-odd])[1]"))).click()
oid = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.XPATH, "(//td[a[.='bet365']]/following-sibling::td[@data-odd])[1]"))).get_attribute("data-oid")
bid = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.XPATH, "(//td[a[.='bet365']]/following-sibling::td[@data-odd])[1]"))).get_attribute("data-bid")
var = oid+'-'+bid
except NoSuchElementException:
B365H = 'no bet365 odd'
except TypeError:
B365H = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.XPATH, "(//td[a[.='bet365']]/following-sibling::td[@data-odd])[1]"))).get_attribute("data-odd")
else:
B365H = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.XPATH, "//*[contains(@id,'%s')]/tr[last()]/td[@class='bold']" % var))).text
try:
driver.find_element_by_xpath("//td[a[.='bet365']]/following-sibling::td[span]")
WebDriverWait(driver, 2).until(EC.element_to_be_clickable((By.XPATH, "(//td[a[.='bet365']]/following-sibling::td[@data-odd])[2]"))).click()
oid = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.XPATH, "(//td[a[.='bet365']]/following-sibling::td[@data-odd])[2]"))).get_attribute("data-oid")
bid = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.XPATH, "(//td[a[.='bet365']]/following-sibling::td[@data-odd])[2]"))).get_attribute("data-bid")
var = oid+'-'+bid
except NoSuchElementException:
B365D = 'no bet365 odd'
except TypeError:
B365D = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.XPATH, "(//td[a[.='bet365']]/following-sibling::td[@data-odd])[2]"))).get_attribute("data-odd")
else:
B365D = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.XPATH, "//*[contains(@id,'%s')]/tr[last()]/td[@class='bold']" % var))).text
try:
driver.find_element_by_xpath("//td[a[.='bet365']]/following-sibling::td[span]")
WebDriverWait(driver, 2).until(EC.element_to_be_clickable((By.XPATH, "(//td[a[.='bet365']]/following-sibling::td[@data-odd])[3]"))).click()
oid = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.XPATH, "(//td[a[.='bet365']]/following-sibling::td[@data-odd])[3]"))).get_attribute("data-oid")
bid = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.XPATH, "(//td[a[.='bet365']]/following-sibling::td[@data-odd])[3]"))).get_attribute("data-bid")
var = oid+'-'+bid
except NoSuchElementException:
B365A = 'no bet365 odd'
except TypeError:
B365A = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.XPATH, "(//td[a[.='bet365']]/following-sibling::td[@data-odd])[3]"))).get_attribute("data-odd")
else:
B365A = WebDriverWait(driver, 2).until(EC.visibility_of_element_located((By.XPATH, "//*[contains(@id,'%s')]/tr[last()]/td[@class='bold']" % var))).text
现在,当页面没有加载时,我会添加一个TimeoutException
,我尝试了这种方式:
try:
#previous code
except TimeoutException:
driver.get(i)
这样,当我遇到超时错误时,代码将跳过url抓取,继续使用下一个url。我该如何解决这个问题?我希望在超时的情况下,它会重新加载页面,并再次为该url抓取数据。
您可以将它放入while循环中,一旦它成功完成,就会继续前进
success = False
while success == False:
try:
# previous code
success = True
except TimeoutException:
print ('Will try again...')