@@ -133,7 +133,7 @@ def show_invocation(self, invocation_id):
133
133
return self ._get (url = url )
134
134
135
135
def rerun_invocation (self , invocation_id : str , remap : bool = False , inputs_update : Optional [dict ] = None ,
136
- history_id : Optional [str ] = None ,
136
+ params_update : Optional [ dict ] = None , history_id : Optional [str ] = None ,
137
137
history_name : Optional [str ] = None , import_inputs_to_history : bool = False ,
138
138
replacement_params : Optional [dict ] = None , allow_tool_state_corrections : bool = False ,
139
139
inputs_by : Optional [str ] = None , parameters_normalized : bool = False ):
@@ -156,6 +156,12 @@ def rerun_invocation(self, invocation_id: str, remap: bool = False, inputs_updat
156
156
invocation, this should contain a mapping of workflow inputs to the new
157
157
datasets and dataset collections.
158
158
159
+ :type params: dict
160
+ :param params: If different non-dataset tool parameters should be
161
+ used to the original invocation, this should contain a mapping of the
162
+ new parameter values. Runtime parameters should be specified through
163
+ ``inputs_update``.
164
+
159
165
:type history_id: str
160
166
:param history_id: The encoded history ID where to store the workflow
161
167
outputs. Alternatively, ``history_name`` may be specified to create a
@@ -222,7 +228,8 @@ def rerun_invocation(self, invocation_id: str, remap: bool = False, inputs_updat
222
228
for inp , input_value in inputs_update .items ():
223
229
inputs [inp ] = input_value
224
230
payload = {'inputs' : inputs }
225
-
231
+ if params :
232
+ payload ['parameters' ] = params
226
233
if replacement_params :
227
234
payload ['replacement_params' ] = replacement_params
228
235
if history_id :
0 commit comments