@@ -20,8 +20,14 @@ def _bulk_full_record_upsert(self, cursor, table_name, fields, db_values):
2020 # calculate and create equal sized chunks of data to insert incrementally
2121 num_of_rows_able_to_insert = calculate_max_sqlite_variables () // len (fields )
2222 num_of_values_able_to_insert = num_of_rows_able_to_insert * len (fields )
23- value_chunks = [db_values [x : x + num_of_values_able_to_insert ] for x in range (0 , len (db_values ), num_of_values_able_to_insert )]
24- placeholder_chunks = [placeholder_list [x : x + num_of_rows_able_to_insert ] for x in range (0 , len (placeholder_list ), num_of_rows_able_to_insert )]
23+ value_chunks = [
24+ db_values [x : x + num_of_values_able_to_insert ]
25+ for x in range (0 , len (db_values ), num_of_values_able_to_insert )
26+ ]
27+ placeholder_chunks = [
28+ placeholder_list [x : x + num_of_rows_able_to_insert ]
29+ for x in range (0 , len (placeholder_list ), num_of_rows_able_to_insert )
30+ ]
2531 # insert data chunks
2632 fields_str = str (tuple (str (f .attname ) for f in fields )).replace ("'" , "" )
2733 for values , params in zip (value_chunks , placeholder_chunks ):
@@ -40,9 +46,14 @@ def _bulk_full_record_upsert(self, cursor, table_name, fields, db_values):
4046 def _bulk_insert (self , cursor , table_name , fields , db_values ):
4147 num_of_rows_able_to_insert = calculate_max_sqlite_variables () // len (fields )
4248 num_of_values_able_to_insert = num_of_rows_able_to_insert * len (fields )
43- value_chunks = [db_values [x : x + num_of_values_able_to_insert ] for x in range (0 , len (db_values ), num_of_values_able_to_insert )]
49+ value_chunks = [
50+ db_values [x : x + num_of_values_able_to_insert ]
51+ for x in range (0 , len (db_values ), num_of_values_able_to_insert )
52+ ]
4453 for value_chunk in value_chunks :
45- super (SQLWrapper , self )._bulk_insert (cursor , table_name , fields , value_chunk )
54+ super (SQLWrapper , self )._bulk_insert (
55+ cursor , table_name , fields , value_chunk
56+ )
4657
4758 def _bulk_update (self , cursor , table_name , fields , db_values ):
4859 """
@@ -53,9 +64,14 @@ def _bulk_update(self, cursor, table_name, fields, db_values):
5364 # calculate and create equal sized chunks of data to update incrementally
5465 # for every field we're updating, we'll require 3 parameters
5566 num_update_fields = len (fields ) - 1
56- num_of_rows_able_to_update = calculate_max_sqlite_variables () // num_update_fields // 3
67+ num_of_rows_able_to_update = (
68+ calculate_max_sqlite_variables () // num_update_fields // 3
69+ )
5770 num_of_values_able_to_update = num_of_rows_able_to_update * len (fields )
58- value_chunks = [db_values [x : x + num_of_values_able_to_update ] for x in range (0 , len (db_values ), num_of_values_able_to_update )]
71+ value_chunks = [
72+ db_values [x : x + num_of_values_able_to_update ]
73+ for x in range (0 , len (db_values ), num_of_values_able_to_update )
74+ ]
5975 pk = get_pk_field (fields )
6076
6177 # insert data chunks
@@ -66,7 +82,9 @@ def _bulk_update(self, cursor, table_name, fields, db_values):
6682 for field in fields :
6783 if field == pk :
6884 continue
69- set_field_sql = " {field} = (CASE {pk_field}" .format (field = field .column , pk_field = pk .column )
85+ set_field_sql = " {field} = (CASE {pk_field}" .format (
86+ field = field .column , pk_field = pk .column
87+ )
7088 for y in range (0 , len (values ), len (fields )):
7189 value_set = values [y : y + len (fields )]
7290 set_field_sql += " WHEN %s THEN %s"
@@ -82,7 +100,9 @@ def _bulk_update(self, cursor, table_name, fields, db_values):
82100 table_name = table_name ,
83101 set_sql = set_sql [:- 1 ],
84102 pk_field = pk .column ,
85- placeholder_str = "({})" .format ("," .join ("%s" for _ in range (len (pk_params )))),
103+ placeholder_str = "({})" .format (
104+ "," .join ("%s" for _ in range (len (pk_params )))
105+ ),
86106 )
87107 # use DB-APIs parameter substitution (2nd parameter expects a sequence)
88108 cursor .execute (update , params )
@@ -142,7 +162,9 @@ def _dequeuing_merge_conflict_buffer(self, cursor, current_id, transfersession_i
142162 )
143163 cursor .execute (merge_conflict_store )
144164
145- def _dequeuing_update_rmcs_last_saved_by (self , cursor , current_id , transfersession_id ):
165+ def _dequeuing_update_rmcs_last_saved_by (
166+ self , cursor , current_id , transfersession_id
167+ ):
146168 # update or create rmc for merge conflicts with local instance id
147169 merge_conflict_store = """REPLACE INTO {rmc} (instance_id, counter, store_model_id)
148170 SELECT '{current_instance_id}', {current_instance_counter}, store.id
0 commit comments