我在这里读了很多关于"太多的连接";问题
我正在努力循环上传到ElephantSQL(psql(的少量CSV数据(10000行(。
我正在使用ElephantSQL的免费计划。虽然我可以升级以获得更多的并发连接,但问题是我不知道如何管理连接。
这是我的代码:
首先,我在extractToRaw(psql中的raw_data表(中创建单独的URL以传递给axios
readCSV(async (list) => {
const apiURLList = await list.map((item) => {
return `apiDomain=${domain}&api_key=${apiKey}`;
});
for (const url of apiURLList) {
await extractToRaw(url);
}
});
然后:
const extractToRaw = async (url) => {
let records = [];
try {
await axios({
method: "get",
url: url,
params: {
//things here
},
}).then((data) => {
const contactRecord = data.data.data;
const emailData = data.data.data.emails;
const metaData = data.data.meta;
//
if (metaData.results === 0) {
try {
console.log(`no emails for ${contactRecord.domain}`);
upload_no_email(contactRecord.domain);
} catch (err) {
console.log("name: ", err.name, "message: ", err.message);
}
} else
for (const record of emailData) {
console.log(`Writing ${record.value} record...`);
records.push({
firstname: record.first_name,
lastname: record.last_name,
position: record.position,
seniority: record.seniority,
email: record.value,
website: record.value.split("@")[1],
confidence: record.confidence,
});
console.log(records);
}
//upload to table
uploadToRaw(records);
});
} catch (err) {
console.log(err);
}
};
最后-上传到PSQL
const uploadToRaw = (records) => {
console.log(`uploading from ${records[0].website}`);
for (const record of records) {
const valuesArr = [
record.firstname,
record.lastname,
record.position,
record.seniority,
record.email,
record.website,
record.confidence,
];
pool.query(
`
INSERT INTO raw_data(firstname, lastname, position, seniority, email, website, confidence) VALUES($1, $2, $3, $4, $5, $6, $7)`,
valuesArr
);
}
};
毫无疑问,我会得到一个";太多的连接";错误
我用pool.query错了吗?
UPDATE:添加节点postgres初始化脚本
const { Pool, Client } = require("pg");
const connectionString =
"string here";
const pool = new Pool({
connectionString,
});
const client = new Client({
connectionString,
});
创建新池的调用接受一个参数call max,即该池中的最大连接数将设置为
必须注意它与数据库服务器上可用的连接数量保持一致