如何使用python将多个抓取的数据保存到mysql中



我有一个从cars.com上刮来的数据。我正试图将它们保存到MySQL数据库,但我没能做到。这是我的完整代码:

#ScrapeData.py

import requests
from bs4 import BeautifulSoup
URL = "https://www.cars.com/shopping/results/?dealer_id=&keyword=&list_price_max=&list_price_min=&makes[]=&maximum_distance=all&mileage_max=&page=1&page_size=100&sort=best_match_desc&stock_type=cpo&year_max=&year_min=&zip="
r = requests.get(URL)
soup = BeautifulSoup(r.content, 'html.parser')
cars = soup.find_all('div', class_='vehicle-card')
name = []
mileage = []
dealer_name = []
rating = []
rating_count = []
price = []

for car in cars:
#name
name.append(car.find('h2').get_text())
#mileage
mileage.append(car.find('div', {'class':'mileage'}).get_text())
#dealer_name
dealer_name.append(car.find('div', {'class':'dealer-name'}).get_text())
#rate
try:
rating.append(car.find('span', {'class':'sds-rating__count'}).get_text())
except:
rating.append("n/a")
#rate_count
rating_count.append(car.find('span', {'class':'sds-rating__link'}).get_text())
#price
price.append(car.find('span', {'class':'primary-price'}).get_text())

#save_to_mysql.py

import pymysql
import scrapeData
import mysql.connector
connection = pymysql.connect(
host='localhost',
user='root',
password='',
db='cars',
)
name = scrapeData.name
mileage = scrapeData.mileage
dealer_name = scrapeData.dealer_name
rating = scrapeData.rating
rating_count = scrapeData.rating_count
price = scrapeData.price
try:
mySql_insert_query = """INSERT INTO cars_details (name, mileage, dealer_name, rating, rating_count, price) 
VALUES (%s, %s, %s, %s, %s, %s) """
records_to_insert = [(name, mileage, dealer_name, rating, rating_count, price)]
print(records_to_insert)
cursor = connection.cursor()
cursor.executemany(mySql_insert_query, records_to_insert)
connection.commit()
print(cursor.rowcount, "Record inserted successfully into cars_details table")
except mysql.connector.Error as error:
print("Failed to insert record into MySQL table {}".format(error))

connection.commit()
finally:
connection.close()

每当我运行此代码时,我都会收到以下错误消息:

Traceback (most recent call last):
File "c:scrapingsave_to_mysql.py", line 28, in <module>
cursor.executemany(mySql_insert_query, records_to_insert)
File "C:UsersPCAppDataLocalProgramsPythonPython310libsite-packagespymysqlcursors.py", line 173, in executemany     
return self._do_execute_many(
File "C:UsersPCAppDataLocalProgramsPythonPython310libsite-packagespymysqlcursors.py", line 211, in _do_execute_many    rows += self.execute(sql + postfix)
File "C:UsersPCAppDataLocalProgramsPythonPython310libsite-packagespymysqlcursors.py", line 148, in execute
result = self._query(query)
File "C:UsersPCAppDataLocalProgramsPythonPython310libsite-packagespymysqlcursors.py", line 310, in _query
conn.query(q)
File "C:UsersPCAppDataLocalProgramsPythonPython310libsite-packagespymysqlconnections.py", line 548, in query       
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "C:UsersPCAppDataLocalProgramsPythonPython310libsite-packagespymysqlconnections.py", line 775, in _read_query_result
result.read()
File "C:UsersPCAppDataLocalProgramsPythonPython310libsite-packagespymysqlconnections.py", line 1156, in read       
first_packet = self.connection._read_packet()
File "C:UsersPCAppDataLocalProgramsPythonPython310libsite-packagespymysqlconnections.py", line 725, in _read_packet    packet.raise_for_error()
File "C:UsersPCAppDataLocalProgramsPythonPython310libsite-packagespymysqlprotocol.py", line 221, in raise_for_error    err.raise_mysql_exception(self._data)
File "C:UsersPCAppDataLocalProgramsPythonPython310libsite-packagespymysqlerr.py", line 143, in raise_mysql_exception
raise errorclass(errno, errval)
pymysql.err.OperationalError: (1241, 'Operand should contain 1 column(s)')

有人知道怎么解决这个问题吗?我想一次在MySQL中插入多个刮取的数据。我会很高兴你的帮助

首先,我不会对所有数据使用单独的列表,而是使用一个列表,收集关于一辆车的所有信息。就像嵌套在里面一样。所以不是

millage = []
delar_name = []

我会创建一个名为汽车的列表:

cars = []

然后,我会为你在这样一辆车上拥有的所有不同的刮擦信息创建dirrerent变量:

#brand
brand = car.find('h2').get_text()
#mileage
mileage = car.find('div', {'class':'mileage'}).get_text()

然后,我将创建用于追加的列表,并将其追加到列表中。

toAppend = brand, mileage, dealer_name, rating, rating_count, price
cars.append(toAppend)

那么输出将是:

[('2018 Mercedes-Benz CLA 250 Base', '21,326 mi.', 'nMercedes-Benz of South Bayn', '4.6', '(1,020 reviews)', '$33,591'), ('2021 Toyota Highlander Hybrid XLE', '9,529 mi.', 'nToyota of Gastonian', '4.6', '(590 reviews)', '$47,869')]

我对mysql做了一个小改动。插入到一个函数中,然后将该函数作为参数输入到主脚本中。工作起来很有魅力。我知道这并不是一个关于事情为什么以及如何运作的详细答案,但它是一个解决方案。

import requests
from bs4 import BeautifulSoup
from scrapertestsql import insertScrapedCars
URL = "https://www.cars.com/shopping/results/?dealer_id=&keyword=&list_price_max=&list_price_min=&makes[]=&maximum_distance=all&mileage_max=&page=1&page_size=100&sort=best_match_desc&stock_type=cpo&year_max=&year_min=&zip="
r = requests.get(URL)
soup = BeautifulSoup(r.content, 'html.parser')
scrapedCars = soup.find_all('div', class_='vehicle-card')
cars = []
# mileage = []
# dealer_name = []
# rating = []
# rating_count = []
# price = []

for car in scrapedCars:
#name
brand = car.find('h2').get_text()
#mileage
mileage = car.find('div', {'class':'mileage'}).get_text()
#dealer_name
dealer_name = car.find('div', {'class':'dealer-name'}).get_text()
#rate
try:
rating = car.find('span', {'class':'sds-rating__count'}).get_text()
except:
rating = "n/a"
#rate_count
rating_count = car.find('span', {'class':'sds-rating__link'}).get_text()
#price
price = car.find('span', {'class':'primary-price'}).get_text()
toAppend = brand, mileage, dealer_name, rating, rating_count, price
cars.append(toAppend)
insertScrapedCars(cars)

print(cars)

接下来我会:

import pymysql
import mysql.connector
connection = pymysql.connect(
host='127.0.0.1',
user='test',
password='123',
db='cars',
port=8889
)

def insertScrapedCars(CarsToInsert):
try:
mySql_insert_query = """INSERT INTO cars_details (name, mileage, dealer_name, rating, rating_count, price) 
VALUES (%s, %s, %s, %s, %s, %s) """
cursor = connection.cursor()
cursor.executemany(mySql_insert_query, CarsToInsert)
connection.commit()
print(cursor.rowcount, "Record inserted successfully into cars_details table")
except mysql.connector.Error as error:
print("Failed to insert record into MySQL table {}".format(error))
finally:
connection.close()

最新更新