@@ -43,13 +43,6 @@ class Tracker(object):
43
43
end times are automatically set when using the with statement and the trial component is saved to
44
44
SageMaker at the end of the block.
45
45
46
- .. code-block:: python
47
-
48
- with smexperiments.tracker.Tracker.create() as my_tracker:
49
- my_tracker.log_parameter('learning_rate', 0.01)
50
-
51
- # Perform data-science code within the with block.
52
-
53
46
Attributes:
54
47
trial_component (TrialComponent): The trial component tracked.
55
48
"""
@@ -79,6 +72,13 @@ def load(
79
72
):
80
73
"""Create a new ``Tracker`` by loading an existing trial component.
81
74
75
+ Examples:
76
+ .. code-block:: python
77
+
78
+ from smexperiments import tracker
79
+
80
+ my_tracker = tracker.Tracker.load(trial_component_name='xgboost')
81
+
82
82
Args:
83
83
trial_component_name: (str, optional). The name of the trial component to track. If specified, this
84
84
trial component must exist in SageMaker. If you invoke this method in a running SageMaker training
@@ -140,6 +140,13 @@ def create(
140
140
):
141
141
"""Create a new ``Tracker`` by creating a new trial component.
142
142
143
+ Examples
144
+ .. code-block:: python
145
+
146
+ from smexperiments import tracker
147
+
148
+ my_tracker = tracker.Tracker.create()
149
+
143
150
Args:
144
151
display_name: (str, optional). The display name of the trial component to track.
145
152
artifact_bucket: (str, optional) The name of the S3 bucket to store artifacts to.
@@ -175,6 +182,12 @@ def log_parameter(self, name, value):
175
182
176
183
Overwrites any previous value recorded for the specified parameter name.
177
184
185
+ Examples
186
+ .. code-block:: python
187
+
188
+ # log hyper parameter of learning rate
189
+ my_tracker.log_parameter('learning_rate', 0.01)
190
+
178
191
Args:
179
192
name (str): The name of the parameter
180
193
value (str or numbers.Number): The value of the parameter
@@ -184,6 +197,12 @@ def log_parameter(self, name, value):
184
197
def log_parameters (self , parameters ):
185
198
"""Record a collection of parameter values for this trial component.
186
199
200
+ Examples
201
+ .. code-block:: python
202
+
203
+ # log multiple hyper parameters used in training
204
+ my_tracker.log_parameters({"learning_rate": 1.0, "gamma": 0.9, "dropout": 0.5})
205
+
187
206
Args:
188
207
parameters (dict[str, str or numbers.Number]): The parameters to record.
189
208
"""
@@ -194,6 +213,12 @@ def log_input(self, name, value, media_type=None):
194
213
195
214
Overwrites any previous value recorded for the specified input name.
196
215
216
+ Examples
217
+ .. code-block:: python
218
+
219
+ # log input dataset s3 location
220
+ my_tracker.log_input(name='input', value='s3://inputs/path')
221
+
197
222
Args:
198
223
name (str): The name of the input value.
199
224
value (str): The value.
@@ -206,6 +231,12 @@ def log_output(self, name, value, media_type=None):
206
231
207
232
Overwrites any previous value recorded for the specified output name.
208
233
234
+ Examples
235
+ .. code-block:: python
236
+
237
+ # log input dataset s3 location
238
+ my_tracker.log_output(name='prediction', value='s3://outputs/path')
239
+
209
240
Args:
210
241
name (str): The name of the output value.
211
242
value (str): The value.
@@ -216,6 +247,12 @@ def log_output(self, name, value, media_type=None):
216
247
def log_artifact (self , file_path , name = None , media_type = None ):
217
248
"""Upload a local file to s3 and store it as an artifact in this trial component.
218
249
250
+ Examples
251
+ .. code-block:: python
252
+
253
+ # log local artifact
254
+ my_tracker.log_artifact(file_path='/local/path/artifact.tar.gz')
255
+
219
256
Args:
220
257
file_path (str): The path of the local file to upload.
221
258
name (str, optional): The name of the artifact.
@@ -232,6 +269,14 @@ def log_artifact(self, file_path, name=None, media_type=None):
232
269
def log_metric (self , metric_name , value , timestamp = None , iteration_number = None ):
233
270
"""Record a scalar metric value for this TrialComponent.
234
271
272
+ Examples
273
+ .. code-block:: python
274
+
275
+ for epoch in range(epochs):
276
+ # your training logic and calculate accuracy and loss
277
+ my_tracker.log_metric(metric_name='accuracy', value=0.9, iteration_number=epoch)
278
+ my_tracker.log_metric(metric_name='loss', value=0.03, iteration_number=epoch)
279
+
235
280
Args:
236
281
metric_name (str): The name of the metric.
237
282
value (number): The value of the metric.
0 commit comments