2 # Copyright (C) 2012 Google Inc. All rights reserved.
4 # Redistribution and use in source and binary forms, with or without
5 # modification, are permitted provided that the following conditions are
8 # * Redistributions of source code must retain the above copyright
9 # notice, this list of conditions and the following disclaimer.
10 # * Redistributions in binary form must reproduce the above
11 # copyright notice, this list of conditions and the following disclaimer
12 # in the documentation and/or other materials provided with the
14 # * Neither the name of Google Inc. nor the names of its
15 # contributors may be used to endorse or promote products derived from
16 # this software without specific prior written permission.
18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35 from datetime import datetime
36 from datetime import timedelta
37 from google.appengine.ext import db
38 from google.appengine.api import memcache
39 from time import mktime
42 class NumericIdHolder(db.Model):
43 owner = db.ReferenceProperty()
44 # Dummy class whose sole purpose is to generate key().id()
47 def create_in_transaction_with_numeric_id_holder(callback):
48 id_holder = NumericIdHolder()
50 id_holder = NumericIdHolder.get(id_holder.key())
53 owner = db.run_in_transaction(callback, id_holder.key().id())
55 id_holder.owner = owner
63 def delete_model_with_numeric_id_holder(model):
64 id_holder = NumericIdHolder.get_by_id(model.id)
69 def model_from_numeric_id(id, expected_kind):
70 id_holder = NumericIdHolder.get_by_id(id)
71 return id_holder.owner if id_holder and id_holder.owner and isinstance(id_holder.owner, expected_kind) else None
74 def _create_if_possible(model, key, name):
77 if model.get_by_key_name(key):
79 branch = model(id=id, name=name, key_name=key)
83 return create_in_transaction_with_numeric_id_holder(execute)
86 class Branch(db.Model):
87 id = db.IntegerProperty(required=True)
88 name = db.StringProperty(required=True)
91 def create_if_possible(key, name):
92 return _create_if_possible(Branch, key, name)
95 class Platform(db.Model):
96 id = db.IntegerProperty(required=True)
97 name = db.StringProperty(required=True)
100 def create_if_possible(key, name):
101 return _create_if_possible(Platform, key, name)
104 class Builder(db.Model):
105 name = db.StringProperty(required=True)
106 password = db.StringProperty(required=True)
109 def create(name, raw_password):
110 return Builder(name=name, password=Builder._hashed_password(raw_password), key_name=name).put()
112 def update_password(self, raw_password):
113 self.password = Builder._hashed_password(raw_password)
116 def authenticate(self, raw_password):
117 return self.password == hashlib.sha256(raw_password).hexdigest()
120 def _hashed_password(raw_password):
121 return hashlib.sha256(raw_password).hexdigest()
124 class Build(db.Model):
125 branch = db.ReferenceProperty(Branch, required=True, collection_name='build_branch')
126 platform = db.ReferenceProperty(Platform, required=True, collection_name='build_platform')
127 builder = db.ReferenceProperty(Builder, required=True, collection_name='builder_key')
128 buildNumber = db.IntegerProperty(required=True)
129 revision = db.IntegerProperty(required=True)
130 chromiumRevision = db.IntegerProperty()
131 timestamp = db.DateTimeProperty(required=True)
134 def get_or_insert_from_log(log):
135 builder = log.builder()
136 key_name = builder.name + ':' + str(int(mktime(log.timestamp().timetuple())))
138 return Build.get_or_insert(key_name, branch=log.branch(), platform=log.platform(), builder=builder,
139 buildNumber=log.build_number(), timestamp=log.timestamp(),
140 revision=log.webkit_revision(), chromiumRevision=log.chromium_revision())
143 # Used to generate TestMap in the manifest efficiently
144 class Test(db.Model):
145 id = db.IntegerProperty(required=True)
146 name = db.StringProperty(required=True)
147 # FIXME: Storing branches and platforms separately is flawed since a test maybe available on
148 # one platform but only on some branch and vice versa.
149 branches = db.ListProperty(db.Key)
150 platforms = db.ListProperty(db.Key)
153 def update_or_insert(test_name, branch, platform):
154 existing_test = [None]
157 test = Test.get_by_key_name(test_name)
159 if branch.key() not in test.branches:
160 test.branches.append(branch.key())
161 if platform.key() not in test.platforms:
162 test.platforms.append(platform.key())
164 existing_test[0] = test
167 test = Test(id=id, name=test_name, key_name=test_name, branches=[branch.key()], platforms=[platform.key()])
171 return create_in_transaction_with_numeric_id_holder(execute) or existing_test[0]
173 def merge(self, other):
174 assert self.key() != other.key()
176 merged_results = TestResult.all()
177 merged_results.filter('name =', other.name)
179 # FIXME: We should be doing this check in a transaction but only ancestor queries are allowed
180 for result in merged_results:
181 if TestResult.get_by_key_name(TestResult.key_name(result.build, self.name)):
184 branches_and_platforms_to_update = set()
185 for result in merged_results:
186 branches_and_platforms_to_update.add((result.build.branch.id, result.build.platform.id))
187 result.replace_to_change_test_name(self.name)
189 delete_model_with_numeric_id_holder(other)
191 return branches_and_platforms_to_update
194 class TestResult(db.Model):
195 name = db.StringProperty(required=True)
196 build = db.ReferenceProperty(Build, required=True)
197 value = db.FloatProperty(required=True)
198 valueMedian = db.FloatProperty()
199 valueStdev = db.FloatProperty()
200 valueMin = db.FloatProperty()
201 valueMax = db.FloatProperty()
204 def key_name(build, test_name):
205 return build.key().name() + ':' + test_name
208 def get_or_insert_from_parsed_json(cls, test_name, build, result):
209 key_name = cls.key_name(build, test_name)
211 def _float_or_none(dictionary, key):
212 value = dictionary.get(key)
217 if not isinstance(result, dict):
218 return cls.get_or_insert(key_name, name=test_name, build=build, value=float(result))
220 return cls.get_or_insert(key_name, name=test_name, build=build, value=float(result['avg']),
221 valueMedian=_float_or_none(result, 'median'), valueStdev=_float_or_none(result, 'stdev'),
222 valueMin=_float_or_none(result, 'min'), valueMax=_float_or_none(result, 'max'))
224 def replace_to_change_test_name(self, new_name):
225 clone = TestResult(key_name=TestResult.key_name(self.build, new_name), name=new_name, build=self.build,
226 value=self.value, valueMedian=self.valueMedian, valueStdev=self.valueMin, valueMin=self.valueMin, valueMax=self.valueMax)
232 class ReportLog(db.Model):
233 timestamp = db.DateTimeProperty(required=True)
234 headers = db.TextProperty()
235 payload = db.TextProperty()
236 commit = db.BooleanProperty()
238 def _parsed_payload(self):
239 if self.__dict__.get('_parsed') == None:
241 self._parsed = json.loads(self.payload)
246 def get_value(self, keyName):
247 if not self._parsed_payload():
249 return self._parsed.get(keyName)
252 return self.get_value('results')
255 return self._model_by_key_name_in_payload(Builder, 'builder-name')
258 return self._model_by_key_name_in_payload(Branch, 'branch')
261 return self._model_by_key_name_in_payload(Platform, 'platform')
263 def build_number(self):
264 return self._integer_in_payload('build-number')
266 def webkit_revision(self):
267 return self._integer_in_payload('webkit-revision')
269 def chromium_revision(self):
270 return self._integer_in_payload('chromium-revision')
272 def _model_by_key_name_in_payload(self, model, keyName):
273 key = self.get_value(keyName)
276 return model.get_by_key_name(key)
278 def _integer_in_payload(self, keyName):
280 return int(self.get_value(keyName))
286 # FIXME: We also have timestamp as a member variable.
289 return datetime.fromtimestamp(self._integer_in_payload('timestamp'))
296 class PersistentCache(db.Model):
297 value = db.TextProperty(required=True)
300 def set_cache(name, value):
301 memcache.set(name, value)
302 PersistentCache(key_name=name, value=value).put()
306 value = memcache.get(name)
309 cache = PersistentCache.get_by_key_name(name)
312 memcache.set(name, cache.value)
316 class Runs(db.Model):
317 branch = db.ReferenceProperty(Branch, required=True, collection_name='runs_branch')
318 platform = db.ReferenceProperty(Platform, required=True, collection_name='runs_platform')
319 test = db.ReferenceProperty(Test, required=True, collection_name='runs_test')
320 json_runs = db.TextProperty()
321 json_averages = db.TextProperty()
322 json_min = db.FloatProperty()
323 json_max = db.FloatProperty()
326 def _generate_runs(branch, platform, test_name):
328 builds.filter('branch =', branch)
329 builds.filter('platform =', platform)
332 results = TestResult.all()
333 results.filter('name =', test_name)
334 results.filter('build =', build)
335 for result in results:
340 def _entry_from_build_and_result(build, result):
341 builder_id = build.builder.key().id()
342 timestamp = mktime(build.timestamp.timetuple())
344 supplementary_revisions = None
346 if result.valueStdev != None and result.valueMin != None and result.valueMax != None:
347 statistics = {'stdev': result.valueStdev, 'min': result.valueMin, 'max': result.valueMax}
349 if build.chromiumRevision != None:
350 supplementary_revisions = {'Chromium': build.chromiumRevision}
352 return [result.key().id(),
353 [build.key().id(), build.buildNumber, build.revision, supplementary_revisions],
354 timestamp, result.value, 0, # runNumber
356 builder_id, statistics]
359 def _key_name(branch_id, platform_id, test_id):
360 return 'runs:%d,%d,%d' % (test_id, branch_id, platform_id)
363 def update_or_insert(cls, branch, platform, test):
364 key_name = cls._key_name(branch.id, platform.id, test.id)
365 runs = Runs(key_name=key_name, branch=branch, platform=platform, test=test, json_runs='', json_averages='')
367 for build, result in cls._generate_runs(branch, platform, test.name):
368 runs.update_incrementally(build, result, check_duplicates_and_save=False)
371 memcache.set(key_name, runs.to_json())
374 def update_incrementally(self, build, result, check_duplicates_and_save=True):
375 new_entry = Runs._entry_from_build_and_result(build, result)
377 # Check for duplicate entries
378 if check_duplicates_and_save:
379 revision_is_in_runs = str(build.revision) in json.loads('{' + self.json_averages + '}')
380 if revision_is_in_runs and new_entry[1] in [entry[1] for entry in json.loads('[' + self.json_runs + ']')]:
384 self.json_runs += ','
386 if self.json_averages:
387 self.json_averages += ','
389 self.json_runs += json.dumps(new_entry)
390 # FIXME: Calculate the average. In practice, we wouldn't have more than one value for a given revision.
391 self.json_averages += '"%d": %f' % (build.revision, result.value)
392 self.json_min = min(self.json_min, result.value) if self.json_min != None else result.value
393 self.json_max = max(self.json_max, result.value)
395 if check_duplicates_and_save:
397 memcache.set(self.key().name(), self.to_json())
400 def get_by_objects(branch, platform, test):
401 return Runs.get_by_key_name(Runs._key_name(branch.id, platform.id, test.id))
404 def json_by_ids(cls, branch_id, platform_id, test_id):
405 key_name = cls._key_name(branch_id, platform_id, test_id)
406 runs_json = memcache.get(key_name)
408 runs = cls.get_by_key_name(key_name)
411 runs_json = runs.to_json()
412 memcache.set(key_name, runs_json)
416 # date_range is never used by common.js.
417 return '{"test_runs": [%s], "averages": {%s}, "min": %s, "max": %s, "date_range": null, "stat": "ok"}' % (self.json_runs,
418 self.json_averages, str(self.json_min) if self.json_min else 'null', str(self.json_max) if self.json_max else 'null')
420 # FIXME: Use data in JSON to compute values to avoid iterating through the datastore.
421 def chart_params(self, display_days, now=datetime.now().replace(hour=12, minute=0, second=0, microsecond=0)):
425 start_timestamp = mktime((end_time - timedelta(display_days)).timetuple())
426 end_timestamp = mktime(end_time.timetuple())
428 for build, result in self._generate_runs(self.branch, self.platform, self.test.name):
429 timestamp = mktime(build.timestamp.timetuple())
430 if timestamp < start_timestamp or timestamp > end_timestamp:
432 chart_data_x.append(timestamp)
433 chart_data_y.append(result.value)
435 dates = [end_time - timedelta(display_days / 7.0 * (7 - i)) for i in range(0, 8)]
437 y_max = max(chart_data_y) * 1.1
438 y_axis_label_step = int(y_max / 5 + 0.5) # This won't work for decimal numbers
441 'cht': 'lxy', # Specify with X and Y coordinates
442 'chxt': 'x,y', # Display both X and Y axies
443 'chxl': '0:|' + '|'.join([date.strftime('%b %d') for date in dates]), # X-axis labels
444 'chxr': '1,0,%f,%f' % (int(y_max + 0.5), y_axis_label_step), # Y-axis range: min=0, max, step
445 'chds': '%f,%f,%f,%f' % (start_timestamp, end_timestamp, 0, y_max), # X, Y data range
446 'chxs': '1,676767,11.167,0,l,676767', # Y-axis label: 1,color,font-size,centerd on tick,axis line/no ticks, tick color
447 'chs': '360x240', # Image size: 360px by 240px
448 'chco': 'ff0000', # Plot line color
449 'chg': '%f,20,0,0' % (100 / (len(dates) - 1)), # X, Y grid line step sizes - max is 100.
450 'chls': '3', # Line thickness
451 'chf': 'bg,s,eff6fd', # Transparent background
452 'chd': 't:' + ','.join([str(x) for x in chart_data_x]) + '|' + ','.join([str(y) for y in chart_data_y]), # X, Y data
456 class DashboardImage(db.Model):
457 image = db.BlobProperty(required=True)
458 createdAt = db.DateTimeProperty(required=True, auto_now=True)
461 def create(branch_id, platform_id, test_id, display_days, image):
462 key_name = DashboardImage.key_name(branch_id, platform_id, test_id, display_days)
463 instance = DashboardImage(key_name=key_name, image=image)
465 memcache.set('dashboard-image:' + key_name, image)
469 def get_image(branch_id, platform_id, test_id, display_days):
470 key_name = DashboardImage.key_name(branch_id, platform_id, test_id, display_days)
471 image = memcache.get('dashboard-image:' + key_name)
473 instance = DashboardImage.get_by_key_name(key_name)
474 image = instance.image
475 memcache.set('dashboard-image:' + key_name, image)
479 def needs_update(cls, branch_id, platform_id, test_id, display_days, now=datetime.now()):
480 if display_days < 10:
482 image = DashboardImage.get_by_key_name(cls.key_name(branch_id, platform_id, test_id, display_days))
483 duration = math.sqrt(display_days) / 10
484 # e.g. 13 hours for 30 days, 23 hours for 90 days, and 46 hours for 365 days
485 return not image or image.createdAt < now - timedelta(duration)
488 def key_name(branch_id, platform_id, test_id, display_days):
489 return '%d:%d:%d:%d' % (branch_id, platform_id, test_id, display_days)