then one more ting
2 mil of rows take around 15 minutes. and that is only one day.
is there any way to make this progress faster?
because this is only a small tag historian for us.
my script right now is:
def runAction(self, event):
import csv
# get file path of upload and then convert csv data to dataset
csvPath = "C:/test historyFri Jun 16 09 02 22 CEST 2023/sqlt_data_1_20230611.csv"
csvFile = open(csvPath, 'r')
csvRead = csv.reader(csvFile)
csvDataset = []
count = 0
for row in csvRead:
print "test"
csvDataset.append(row)
count += 1
if count >= 1000:
for row in range(len(csvDataset)):
tagid = csvDataset[row][0]
intvalue = csvDataset[row][1]
if intvalue == "":
intvalue = None
floatvalue = csvDataset[row][2]
if floatvalue == "":
floatvalue = None
stringvalue = csvDataset[row][3]
if stringvalue == "":
stringvalue = None
datevalue = csvDataset[row][4]
if datevalue == "":
datevalue = None
dataintegrity = csvDataset[row][5]
t_stamp = csvDataset[row][6]
# insert orders into sql table
query = """use mijn_database INSERT INTO sqlt_data_1_2023_06 (tagid ,intvalue, floatvalue, stringvalue,datevalue,dataintegrity,t_stamp)
VALUES (?,?,?,?,?,?,?)"""
args = [tagid,intvalue, floatvalue, stringvalue,datevalue,dataintegrity,t_stamp]
system.db.runPrepUpdate(query, args)
csvDataset = []
count = 0
if count>0:
for row in range(len(csvDataset)):
tagid = csvDataset[row][0]
intvalue = csvDataset[row][1]
if intvalue == "":
intvalue = None
floatvalue = csvDataset[row][2]
if floatvalue == "":
floatvalue = None
stringvalue = csvDataset[row][3]
if stringvalue == "":
stringvalue = None
datevalue = csvDataset[row][4]
if datevalue == "":
datevalue = None
dataintegrity = csvDataset[row][5]
t_stamp = csvDataset[row][6]
# insert orders into sql table
query = """use mijn_database INSERT INTO sqlt_data_1_2023_06 (tagid ,intvalue, floatvalue, stringvalue,datevalue,dataintegrity,t_stamp)
VALUES (?,?,?,?,?,?,?)"""
args = [tagid,intvalue, floatvalue, stringvalue,datevalue,dataintegrity,t_stamp]
system.db.runPrepUpdate(query, args)