blob: 0cd59610f49ecac1d4f335c5d9174ec29b709fe3 [file] [log] [blame]
Xinan Linc9f01152020-02-05 22:05:13 -08001# Copyright 2020 The Chromium OS Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5"""Module for collecting and uploading analytics metrics."""
6
7import json
8import logging
9import re
10
11import constants
12import rest_client
13
14from chromite.api.gen.test_platform.suite_scheduler import analytics_pb2
15from chromite.api.gen.chromite.api import artifacts_pb2
16from chromite.api.gen.chromiumos import branch_pb2
17from chromite.api.gen.chromiumos import common_pb2
18
Sean McAllister66bf7e92021-07-16 18:46:04 +000019from google.protobuf import timestamp_pb2
20from google.protobuf import json_format
Xinan Linc9f01152020-02-05 22:05:13 -080021
22
23class AnalyticsError(Exception):
24 """Raised when there is a general error."""
25
26
27class AnalyticsBase(object):
28 """Base class to handle analytics data."""
29
30 def __init__(self, table, message):
31 """Initialize a base event.
32
33 Args:
34 table: string of the table name.
35 message: protobuf object to upload.
36 """
37 self.bq_client = _gen_bq_client(table)
38 self.message = message
39
40 def upload(self):
41 """Convey protobuf to json and insert to BQ."""
42 rows = [{
43 'json':
44 json.loads(
45 json_format.MessageToJson(
46 self.message,
47 preserving_proto_field_name=True,
48 including_default_value_fields=True))
49 }]
50 return self.bq_client.insert(rows)
51
52class ExecutionTask(AnalyticsBase):
53 """Track the execution for the queued tasks."""
54
Xinan Lin9b17c5b2020-08-06 10:43:30 -070055 def __init__(self, task_id, request_tag):
Xinan Lin1e8e7912020-07-31 09:52:16 -070056 """Initialize an ExecutionTask.
Xinan Linc9f01152020-02-05 22:05:13 -080057
58 Args:
59 task_id: string of uuid, assigned by the event which created this task.
Xinan Lin9b17c5b2020-08-06 10:43:30 -070060 request_tag: string of request tag to indentify single ScheduleJob in a CTP
61 build.
Xinan Linc9f01152020-02-05 22:05:13 -080062 """
63 self.task = analytics_pb2.ExecutionTask(
64 queued_task_id=task_id,
Xinan Lin9b17c5b2020-08-06 10:43:30 -070065 request_tag=request_tag,
Xinan Linc9f01152020-02-05 22:05:13 -080066 request_sent=timestamp_pb2.Timestamp())
67 self.task.request_sent.GetCurrentTime()
68 super(ExecutionTask, self).__init__(constants.Metrics.EXECUTION_TASK_TABLE,
69 self.task)
70
71 def update_result(self, resp):
72 """Attach the buildbucket id to task_execution.
73
74 Args:
75 resp: build_pb2.Build object of Buildbucket response.
76 """
77 try:
78 if resp.id > 0:
79 self.task.response.ctp_build_id = str(resp.id)
80 return
81 # If id is absent, we should log resp as an error message.
82 logging.warning('No build id in buildbucket response %s.', str(resp))
83 except AttributeError as e:
84 logging.warning('Failed to parse the buildbucket response, %s.'
85 'Error: %s', str(resp), str(e))
86 self.task.error.error_message = str(resp)
87
88
89class ScheduleJobSection(AnalyticsBase):
90 """Record all the scheduling decisions in a job section."""
91
92 def __init__(self, task_info):
93 """Initialize a ScheduleJobSection
94
95 Args:
96 task_info: a config_reader.TaskInfo object.
97 """
98 self.job_section = _job_section(task_info)
99 self.bq_client = _gen_bq_client(constants.Metrics.TRIGGER_EVENT_TABLE)
100 super(ScheduleJobSection, self).__init__(
101 constants.Metrics.TRIGGER_EVENT_TABLE,
102 self.job_section)
103
104 def add_board(self, board):
105 """Add a board to job_section.
106
107 Boards added are the target, on which we schedule the configured
108 suite test.
109
110 Args:
111 board: a string of board name.
112 """
113 self.job_section.build_targets.add(name=board)
114
115 def add_model(self, model):
116 """Add a model to job_section.
117
118 Models added are the target, on which we schedule the configured
119 suite test. If not set in the config, all models under the boards in this
120 section will be appended.
121
122 Args:
123 model: a string of model name.
124 """
125 self.job_section.models.add(value=model)
126
127 def add_matched_build(self, board, build_type, milestone, manifest):
128 """Add an eligible build for this job section.
129
130 Args:
131 board: a string of board name.
132 build_type: a string of build type with the board, like 'release'.
133 milestone: a string milestone, like '80'.
134 manifest: a string of chromeos version, e.g. '12240.0.0'.
135 """
136 self.job_section.matched_builds.add(release_build=analytics_pb2.BuildInfo(
137 build_target=common_pb2.BuildTarget(name=board),
138 milestone=int(milestone),
139 chrome_os_version=manifest,
140 type=_branch_type(build_type)))
141
142 def add_matched_relax_build(self, board, build_type, milestone, manifest):
143 """Add an eligible relax build for this job section.
144
145 Args:
146 board: a string of board name.
147 build_type: a string of build type with the board, like 'release'.
148 milestone: a string milestone, like '80'.
149 manifest: a string of chromeos version, e.g. '12240.0.0'.
150 """
151 self.job_section.matched_builds.add(relax_build=analytics_pb2.BuildInfo(
152 build_target=common_pb2.BuildTarget(name=board),
153 milestone=int(milestone),
154 chrome_os_version=manifest,
155 type=_branch_type(build_type)))
156
157 def add_matched_fw_build(self, board, build_type, artifact, read_only=True):
158 """Add an eligible firmware build for this job section.
159
160 Args:
161 board: a string of board name.
162 build_type: a string of build type with the board, like 'release'.
163 artifact: relative path to the artifact file, e.g.
164 'firmware-board-12345.67.B-firmwarebranch/RFoo-1.0.0-b1e234567/board'.
165 read_only: a boolean if true for RO firmware build and false for RW firmware.
166 """
167 firmware_build = analytics_pb2.FirmwareBuildInfo(
168 build_target=common_pb2.BuildTarget(name=board),
169 artifact=artifacts_pb2.Artifact(path=artifact),
170 type=_branch_type(build_type))
171 if read_only:
172 self.job_section.matched_builds.add(
173 firmware_ro_build=firmware_build)
174 else:
175 self.job_section.matched_builds.add(
176 firmware_rw_build=firmware_build)
177
178 def add_schedule_job(self, board, model, msg=None, task_id=None):
179 """Add a schedule job.
180
181 Args:
182 board: a string of board name.
183 model: a string of model name.
184 msg: a string of the reason we drop this build.
185 task_id: a string of uuid to track it in taskqueue.
186
187 Raise:
188 AnalyticsError: if both msg/task_id are specified or both are None.
189 """
190 if all([msg, task_id]):
191 raise AnalyticsError('msg and task_id should not be set both; got'
192 'msg: %s, task_id: %s.' % (msg, task_id))
193 if not any([msg, task_id]):
194 raise AnalyticsError('At least one of msg and task_id should '
195 'be set; got msg: %s, task_id: %s.' % (msg, task_id))
196 new_job = self.job_section.schedule_jobs.add(
197 generated_time=timestamp_pb2.Timestamp())
198 if model:
199 new_job.model.value = model
200 new_job.build_target.name = board
201 if msg:
202 new_job.justification = msg
203 if task_id:
204 new_job.queued_task_id = task_id
205 new_job.generated_time.GetCurrentTime()
206
207
208def _gen_bq_client(table):
209 """A wrapper to generate Bigquery REST client.
210
211 Args:
212 table: string of the table name.
213
214 Returns:
215 REST client for Bigquery API.
216 """
217 project = constants.Metrics.PROJECT_ID_STAGING
218 if (constants.environment() == constants.RunningEnv.ENV_PROD and
219 constants.application_id() == constants.AppID.PROD_APP):
220 project = constants.Metrics.PROJECT_ID
221 return rest_client.BigqueryRestClient(
222 rest_client.BaseRestClient(
223 constants.RestClient.BIGQUERY_CLIENT.scopes,
224 constants.RestClient.BIGQUERY_CLIENT.service_name,
225 constants.RestClient.BIGQUERY_CLIENT.service_version),
226 project=project,
227 dataset=constants.Metrics.DATASET,
228 table=table)
229
230
231def _parse_branch_spec(spec):
232 """A wrapper to parse the branch spec into channcel and lag.
233
234 Args:
235 spec: string of branch spec, e.g. '>=tot-2', '==tot'
236
237 Returns:
238 channel: analytics_pb2.BranchFilter for the builder type.
239 lag: number of minor versions behind tip-of-tree.
240
241 Raise:
242 AnalyticsError: if the spec are not supported.
243 """
244 if not 'tot' in spec:
245 raise ValueError('Only support branch specs relative to tot, '
246 'e.g. >=tot-2, got %s' % spec)
247 if spec[:2] not in ["<=", ">=", "=="]:
248 raise ValueError('Only "<=", ">=", "==" are supported, '
249 'e.g. >=tot-2, got %s' % spec)
250 to_operator = {
251 "==": analytics_pb2.BranchFilter.EQ,
252 ">=": analytics_pb2.BranchFilter.GE,
253 "<=": analytics_pb2.BranchFilter.LE}
254 TOT_SPEC = r'.*tot-(?P<lag>.+)'
255 match = re.match(TOT_SPEC, spec)
256 if not match:
257 return analytics_pb2.BranchFilter.MASTER, to_operator[spec[:2]], 0
258 lag = match.group('lag')
259 if lag.isdigit():
260 return analytics_pb2.BranchFilter.MASTER, to_operator[spec[:2]], int(lag)
261 raise ValueError('Failed to get the lag number from spec, %s' % spec)
262
263
264def _job_section(task_info):
265 """A wrapper to generate job_section from task_info.
266
267 Args:
268 task_info: a config_reader.TaskInfo object.
269
270 Returns:
271 job_section: a analytics_pb2.ScheduleJobSection object.
272 """
273 job_section = analytics_pb2.ScheduleJobSection(
Brigit Rossbach6a8eea12021-01-11 09:35:31 -0700274 job_name=task_info.analytics_name,
275 pool=task_info.pool,
276 suite=task_info.suite,
277 )
Xinan Linc9f01152020-02-05 22:05:13 -0800278 if task_info.hour:
279 job_section.schedule_job_trigger.nightly.hour = task_info.hour
280 elif task_info.day:
281 job_section.schedule_job_trigger.weekly.day = task_info.day
282 else:
283 job_section.schedule_job_trigger.interval.pause = 0
284 build_filters = job_section.schedule_job_trigger.build_filters
Jared Loucks7068d8b2022-04-28 09:41:33 -0600285 build_filters.only_successful_build_required = (
286 task_info.only_successful_build_required is not None)
Xinan Linc9f01152020-02-05 22:05:13 -0800287 if task_info.branch_specs:
288 for spec in task_info.branch_specs:
289 try:
290 channel, op, lag = _parse_branch_spec(spec)
291 build_filters.branch_filters.add(
292 channel=channel, operator=op, lag=lag)
293 except ValueError as e:
294 logging.warning('Failed to parse the spec, %s', str(e))
295 if task_info.firmware_rw_build_spec:
296 build_filters.firmware_rw_build_spec = branch_pb2.Branch.FIRMWARE
297 if task_info.firmware_ro_build_spec:
298 build_filters.firmware_ro_build_spec = branch_pb2.Branch.FIRMWARE
299 return job_section
300
301
302def _branch_type(build_type):
303 if build_type == "release":
304 return branch_pb2.Branch.RELEASE
305 if build_type == "firmware":
306 return branch_pb2.Branch.FIRMWARE
307 return branch_pb2.Branch.UNSPECIFIED