@@ -31,36 +31,19 @@ def errors = {}
3131 end
3232 end
3333
34- def advisory_lock ( key )
35- ::ActiveRecord ::Base . connection . execute ( "SELECT pg_advisory_xact_lock(#{ key } )" )
36- nil
37- end
38-
39- def advisory_lock_try ( key )
40- ::ActiveRecord ::Base . connection . execute ( "SELECT pg_try_advisory_xact_lock(123)" ) . first [ "pg_try_advisory_xact_lock" ]
41- end
42-
4334 def job_get_by_id ( id )
4435 data_set = RiverJob . where ( id : id )
4536 data_set . first ? to_job_row_from_model ( data_set . first ) : nil
4637 end
4738
48- def job_get_by_kind_and_unique_properties ( get_params )
49- data_set = RiverJob . where ( kind : get_params . kind )
50- data_set = data_set . where ( "tstzrange(?, ?, '[)') @> created_at" , get_params . created_at [ 0 ] , get_params . created_at [ 1 ] ) if get_params . created_at
51- data_set = data_set . where ( args : get_params . encoded_args ) if get_params . encoded_args
52- data_set = data_set . where ( queue : get_params . queue ) if get_params . queue
53- data_set = data_set . where ( state : get_params . state ) if get_params . state
54- data_set . first ? to_job_row_from_model ( data_set . first ) : nil
55- end
56-
5739 def job_insert ( insert_params )
58- to_job_row_from_model ( RiverJob . create ( insert_params_to_hash ( insert_params ) ) )
40+ res = job_insert_many ( [ insert_params ] ) . first
41+ [ to_job_row_from_raw ( res ) , res . send ( :hash_rows ) [ 0 ] [ "unique_skipped_as_duplicate" ] ]
5942 end
6043
61- def job_insert_unique ( insert_params , unique_key )
62- res = RiverJob . upsert (
63- insert_params_to_hash ( insert_params ) . merge ( unique_key : unique_key ) ,
44+ def job_insert_many ( insert_params_many )
45+ RiverJob . upsert_all (
46+ insert_params_many . map { | param | insert_params_to_hash ( param ) } ,
6447 on_duplicate : Arel . sql ( "kind = EXCLUDED.kind" ) ,
6548 returning : Arel . sql ( "*, (xmax != 0) AS unique_skipped_as_duplicate" ) ,
6649
@@ -69,15 +52,9 @@ def job_insert_unique(insert_params, unique_key)
6952 # ActiveRecord tries to look up a unique index instead of letting
7053 # Postgres handle that, and of course it doesn't support a `WHERE`
7154 # clause. The workaround is to target the index name instead of columns.
72- unique_by : "river_job_kind_unique_key_idx "
55+ unique_by : "river_job_unique_idx "
7356 )
74-
75- [ to_job_row_from_raw ( res ) , res . send ( :hash_rows ) [ 0 ] [ "unique_skipped_as_duplicate" ] ]
76- end
77-
78- def job_insert_many ( insert_params_many )
79- RiverJob . insert_all ( insert_params_many . map { |p | insert_params_to_hash ( p ) } )
80- insert_params_many . count
57+ . map { |row | to_insert_result ( row ) }
8158 end
8259
8360 def job_list
@@ -104,10 +81,16 @@ def transaction(&)
10481 queue : insert_params . queue ,
10582 state : insert_params . state ,
10683 scheduled_at : insert_params . scheduled_at ,
107- tags : insert_params . tags
84+ tags : insert_params . tags ,
85+ unique_key : insert_params . unique_key ,
86+ unique_states : insert_params . unique_states
10887 } . compact
10988 end
11089
90+ private def to_insert_result ( result )
91+ [ to_job_row_from_model ( result ) , result . send ( :hash_rows ) [ 0 ] [ "unique_skipped_as_duplicate" ] ]
92+ end
93+
11194 private def to_job_row_from_model ( river_job )
11295 # needs to be accessed through values because `errors` is shadowed by both
11396 # ActiveRecord and the patch above
@@ -139,7 +122,8 @@ def transaction(&)
139122 scheduled_at : river_job . scheduled_at . getutc ,
140123 state : river_job . state ,
141124 tags : river_job . tags ,
142- unique_key : river_job . unique_key
125+ unique_key : river_job . unique_key ,
126+ unique_states : river_job . unique_states
143127 )
144128 end
145129
@@ -182,7 +166,8 @@ def transaction(&)
182166 scheduled_at : river_job [ "scheduled_at" ] . getutc ,
183167 state : river_job [ "state" ] ,
184168 tags : river_job [ "tags" ] ,
185- unique_key : river_job [ "unique_key" ]
169+ unique_key : river_job [ "unique_key" ] ,
170+ unique_states : river_job [ "unique_states" ]
186171 )
187172 end
188173 end
0 commit comments