Let's make this a bit simpler:
if currentValue.value == 1:
tx_id = system.db.beginNamedQueryTransaction("MyProject", "default")
tagpaths = [
"[NP_HI_25L_WDE_ProcessProvider]NP/HI/25L/WDE/OrderBreakdown/PAS-X/WDEDOWN/MatFlow_MATERIAL_CHECK_MESSAGE/shopFloorOrderId",
"[NP_HI_25L_WDE_ProcessProvider]NP/HI/25L/WDE/OrderBreakdown/PAS-X/WDEDOWN/Transaction_DOWNLOAD/targetMaterialNumber",
"[NP_HI_25L_WDE_ProcessProvider]NP/HI/25L/WDE/OrderBreakdown/PAS-X/WDEDOWN/Transaction_DOWNLOAD/targetBatchSize"
]
localOrderShopFloorOrderId, bomTargetMaterialNumber, bomTargetBatchSize = [qv.value for qv in system.tag.readBlocking(tagpaths)]
query_select_target_source_item = """
SELECT
[targetSourceItem],
[materialNumber],
[loQuantity]
FROM [NP_HI_25L_WD_DB].[dbo].[tblBillOfMaterials]
WHERE [targetMaterialNumber] = ? AND [targetBatchSize] = ?
"""
data = system.db.runPrepQuery(query_select_target_source_item, [bomTargetMaterialNumber, bomTargetBatchSize])
targetSourceItem = data.getColumnAsList(0)
materialNumber = data.getColumnAsList(1)
quantity = data.getColumnAsList(2)
life_cycle_params = {
'localOrderShopFloorOrderId': localOrderShopFloorOrderId,
'lifeCycleName': 'LOCAL_ORDER',
'lifeCycleStatus': 'RESERVED',
'operatorComment': 'NONE',
'createdBy': 'NVML',
'archived': 0
}
system.db.runNamedQuery("WDE_System","NP/HI/25L/WD/OrderBreakdown/Insert_life_cycle", life_cycle_params)
material_reservation_params = [
{
'loShopFloorOrderId': localOrderShopFloorOrderId,
'bomTargetMaterialNumber': bomTargetMaterialNumber,
'bomTargetBatchSize': bomTargetBatchSize,
'bomTargetSourceItem': srcItem,
'bomMaterialNumber': matNum,
'reservedQuantity': qty,
'unitOfMesurementId': 'kg ac.ing.',
'createdBy': 'NVML',
'archived': 0
} for srcItem, matNum, qty in zip(bomTargetSourceItem, materialNumber, quantity)
]
if material_reservation_params:
q, v = build_insert_string('destination_table', material_reservation_params)
system.db.runPrepUpdate(q, v, database='destination_db')
- You don't need 3 queries to pull 3 columns from the same table.
- Batched tag reads
- bulk insert instead of inserting row by row in a loop
zip()
instead of index based iteration - it's more pythonic, though not always easier to read.
here's the query used for the insert:
def build_insert_string(table, data, columns=None):
"""
Build the query string and the values list for a multi-insert query.
Use with `system.db.runPrepUpdate`
params:
table (string): The table to insert into
data (list of dicts): A list containing a dict for each row to insert, where the keys are the columns names
columns_names (list of strings): The columns to be inserted. If None, they're deduced from the first row's keys.
return:
a tuple where:
- the first element is the query string formatted for a `prepUdate` (with question marks as place holders)
- the second element is the list of values
"""
if not data:
return None, None
if columns is None:
columns = data[0].keys()
marks = "({})".format(','.join("?" for _ in columns))
marks = ",".join(marks for _ in data)
col_names = ','.join(columns)
q = "insert into {} ({}) values {}".format(table, col_names, marks)
values = [row[c] for row in data for c in columns]
return q, values
It returns the query as a string, and the values to pass as parameters. Use with system.db.runPrepUpdate
:
q, v = build_insert_string('some_table', data)
system.db.runPrepUpdate(q, v)