Commit ad9ede13 authored by Miquel Torres's avatar Miquel Torres
Browse files

Merge branch 'add-flake-config'

parents 813ac711 42be6f1a
......@@ -2,13 +2,18 @@ language: python
python:
- 2.7
- 3.4
branches:
only:
- master
env:
- DJANGO_VERSION=1.8.8
- DJANGO_VERSION=1.6.11
- DJANGO_VERSION=1.8.17
- DJANGO_VERSION=1.9.12
install:
- pip install flake8
- pip install -q Django==$DJANGO_VERSION
- python setup.py install
before_script:
flake8 codespeed
script:
- python setup.py test
- python manage.py test codespeed
......@@ -6,73 +6,67 @@ from codespeed.models import (Project, Revision, Executable, Benchmark, Branch,
from django.contrib import admin
@admin.register(Project)
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name', 'repo_type', 'repo_path', 'track')
admin.site.register(Project, ProjectAdmin)
@admin.register(Branch)
class BranchAdmin(admin.ModelAdmin):
list_display = ('name', 'project')
list_filter = ('project',)
admin.site.register(Branch, BranchAdmin)
@admin.register(Revision)
class RevisionAdmin(admin.ModelAdmin):
list_display = ('commitid', 'branch', 'tag', 'date')
list_filter = ('branch__project', 'branch', 'tag', 'date')
search_fields = ('commitid', 'tag')
admin.site.register(Revision, RevisionAdmin)
@admin.register(Executable)
class ExecutableAdmin(admin.ModelAdmin):
list_display = ('name', 'description', 'id', 'project')
list_filter = ('project',)
ordering = ['name']
search_fields = ('name', 'description', 'project__name')
admin.site.register(Executable, ExecutableAdmin)
@admin.register(Benchmark)
class BenchmarkAdmin(admin.ModelAdmin):
list_display = ('name', 'benchmark_type', 'data_type', 'description',
'units_title', 'units', 'lessisbetter',
'default_on_comparison')
list_filter = ('data_type','lessisbetter')
list_filter = ('data_type', 'lessisbetter')
ordering = ['name']
search_fields = ('name', 'description')
admin.site.register(Benchmark, BenchmarkAdmin)
@admin.register(Environment)
class EnvironmentAdmin(admin.ModelAdmin):
list_display = ('name', 'cpu', 'memory', 'os', 'kernel')
ordering = ['name']
search_fields = ('name', 'cpu', 'memory', 'os', 'kernel')
admin.site.register(Environment, EnvironmentAdmin)
@admin.register(Result)
class ResultAdmin(admin.ModelAdmin):
list_display = ('revision', 'benchmark', 'executable', 'environment',
'value', 'date')
list_filter = ('environment', 'executable', 'date', 'benchmark')
admin.site.register(Result, ResultAdmin)
def recalculate_report(modeladmin, request, queryset):
for report in queryset:
report.save()
recalculate_report.short_description = "Recalculate reports"
@admin.register(Report)
class ReportAdmin(admin.ModelAdmin):
list_display = ('revision', 'summary', 'colorcode')
list_filter = ('environment', 'executable')
ordering = ['-revision']
actions = [recalculate_report]
admin.site.register(Report, ReportAdmin)
from .logs import get_logs
from .logs import get_logs # noqa
......@@ -69,23 +69,29 @@ def getlogs(endrev, startrev):
tag = ""
cmd = ["git", "describe", "--tags", commit_id]
Popen(cmd, stdout=PIPE, stderr=PIPE, cwd=working_copy)
proc = Popen(cmd, stdout=PIPE, stderr=PIPE, cwd=working_copy)
try:
stdout, stderr = p.communicate()
stdout, stderr = proc.communicate()
except ValueError:
stdout = b''
stderr = b''
if p.returncode == 0:
if proc.returncode == 0:
tag = stdout
date = datetime.datetime.fromtimestamp(
int(date_t)).strftime("%Y-%m-%d %H:%M:%S")
logs.append({'date': date, 'message': subject, 'commitid': commit_id,
'author': author_name, 'author_email': author_email,
'body': body, 'short_commit_id': short_commit_id,
'tag': tag})
logs.append({
'date': date,
'message': subject,
'commitid': commit_id,
'author': author_name,
'author_email': author_email,
'body': body,
'short_commit_id': short_commit_id,
'tag': tag
})
return logs
......@@ -19,7 +19,7 @@ def updaterepo(project, update=True):
return
p = Popen(['hg', 'pull', '-u'], stdout=PIPE, stderr=PIPE,
cwd=project.working_copy)
cwd=project.working_copy)
stdout, stderr = p.communicate()
if p.returncode != 0 or stderr:
......@@ -32,7 +32,7 @@ def updaterepo(project, update=True):
cmd = ['hg', 'clone', project.repo_path, project.repo_name]
p = Popen(cmd, stdout=PIPE, stderr=PIPE,
cwd=settings.REPOSITORY_BASE_PATH)
cwd=settings.REPOSITORY_BASE_PATH)
logger.debug('Cloning Mercurial repo {0} for project {1}'.format(
project.repo_path, project))
stdout, stderr = p.communicate()
......@@ -48,9 +48,13 @@ def updaterepo(project, update=True):
def getlogs(endrev, startrev):
updaterepo(endrev.branch.project, update=False)
cmd = ["hg", "log",
"-r", "%s::%s" % (startrev.commitid, endrev.commitid),
"--template", "{rev}:{node|short}\n{node}\n{author|user}\n{author|email}\n{date}\n{tags}\n{desc}\n=newlog=\n"]
cmd = [
"hg", "log",
"-r", "%s::%s" % (startrev.commitid, endrev.commitid),
"--template",
("{rev}:{node|short}\n{node}\n{author|user}\n{author|email}"
"\n{date}\n{tags}\n{desc}\n=newlog=\n")
]
working_copy = endrev.branch.project.working_copy
p = Popen(cmd, stdout=PIPE, stderr=PIPE, cwd=working_copy)
......@@ -66,8 +70,8 @@ def getlogs(endrev, startrev):
elements = log.split('\n')[:-1]
if len(elements) < 7:
# "Malformed" log
logs.append(
{'date': '-', 'message': 'error parsing log', 'commitid': '-'})
logs.append({
'date': '-', 'message': 'error parsing log', 'commitid': '-'})
else:
short_commit_id = elements.pop(0)
commit_id = elements.pop(0)
......@@ -81,13 +85,19 @@ def getlogs(endrev, startrev):
# Parse date
date = date.split('-')[0]
date = datetime.datetime.fromtimestamp(float(date)).strftime("%Y-%m-%d %H:%M:%S")
date = datetime.datetime.fromtimestamp(
float(date)).strftime("%Y-%m-%d %H:%M:%S")
# Add changeset info
logs.append({
'date': date, 'author': author_name,
'author_email': author_email, 'message': message,
'short_commit_id': short_commit_id, 'commitid': commit_id, 'tag': tag})
'date': date,
'author': author_name,
'author_email': author_email,
'message': message,
'short_commit_id': short_commit_id,
'commitid': commit_id,
'tag': tag
})
# Remove last log here because mercurial saves the short hast as commitid now
if len(logs) > 1 and logs[-1].get('short_commit_id') == startrev.commitid:
logs.pop()
......
# -*- coding: utf-8 -*-
"""Subversion commit logs support"""
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from datetime import datetime
......@@ -32,7 +31,9 @@ def getlogs(newrev, startrev):
loglimit = 200
def get_login(realm, username, may_save):
return True, newrev.branch.project.repo_user, newrev.branch.project.repo_pass, False
repo_user = newrev.branch.project.repo_user
repo_pass = newrev.branch.project.repo_pass
return True, repo_user, repo_pass, False
client = pysvn.Client()
if newrev.branch.project.repo_user != "":
......@@ -43,7 +44,7 @@ def getlogs(newrev, startrev):
client.log(
newrev.branch.project.repo_path,
revision_start=pysvn.Revision(
pysvn.opt_revision_kind.number, startrev.commitid
pysvn.opt_revision_kind.number, startrev.commitid
),
revision_end=pysvn.Revision(
pysvn.opt_revision_kind.number, newrev.commitid
......
......@@ -47,7 +47,7 @@ def gen_image_from_results(result_data, width, height):
font_sizes[idx] = 8
elif value < 1000:
font_sizes[idx] = 12
if result_data['relative']:
font_sizes[0] -= 2
......
......@@ -122,7 +122,7 @@ class Revision(models.Model):
raise ValidationError("Invalid commit id %s" % self.commitid)
if self.branch.project.repo_type == "S":
try:
long(self.commitid)
int(self.commitid)
except ValueError:
raise ValidationError("Invalid SVN commit id %s" % self.commitid)
......
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Project'
db.create_table('codespeed_project', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=30)),
('repo_type', self.gf('django.db.models.fields.CharField')(default='N', max_length=1)),
('repo_path', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
('repo_user', self.gf('django.db.models.fields.CharField')(max_length=100, blank=True)),
('repo_pass', self.gf('django.db.models.fields.CharField')(max_length=100, blank=True)),
('track', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('codespeed', ['Project'])
# Adding model 'Revision'
db.create_table('codespeed_revision', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('commitid', self.gf('django.db.models.fields.CharField')(max_length=42)),
('project', self.gf('django.db.models.fields.related.ForeignKey')(related_name='revisions', to=orm['codespeed.Project'])),
('tag', self.gf('django.db.models.fields.CharField')(max_length=20, blank=True)),
('date', self.gf('django.db.models.fields.DateTimeField')(null=True)),
('message', self.gf('django.db.models.fields.TextField')(blank=True)),
('author', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
))
db.send_create_signal('codespeed', ['Revision'])
# Adding unique constraint on 'Revision', fields ['commitid', 'project']
db.create_unique('codespeed_revision', ['commitid', 'project_id'])
# Adding model 'Executable'
db.create_table('codespeed_executable', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=30)),
('description', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
('project', self.gf('django.db.models.fields.related.ForeignKey')(related_name='executables', to=orm['codespeed.Project'])),
))
db.send_create_signal('codespeed', ['Executable'])
# Adding model 'Benchmark'
db.create_table('codespeed_benchmark', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=30)),
('benchmark_type', self.gf('django.db.models.fields.CharField')(default='C', max_length=1)),
('description', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
('units_title', self.gf('django.db.models.fields.CharField')(default='Time', max_length=30)),
('units', self.gf('django.db.models.fields.CharField')(default='seconds', max_length=20)),
('lessisbetter', self.gf('django.db.models.fields.BooleanField')(default=True)),
))
db.send_create_signal('codespeed', ['Benchmark'])
# Adding model 'Environment'
db.create_table('codespeed_environment', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=30)),
('cpu', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('memory', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('os', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('kernel', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
))
db.send_create_signal('codespeed', ['Environment'])
# Adding model 'Result'
db.create_table('codespeed_result', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('value', self.gf('django.db.models.fields.FloatField')()),
('std_dev', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('val_min', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('val_max', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('date', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('revision', self.gf('django.db.models.fields.related.ForeignKey')(related_name='results', to=orm['codespeed.Revision'])),
('executable', self.gf('django.db.models.fields.related.ForeignKey')(related_name='results', to=orm['codespeed.Executable'])),
('benchmark', self.gf('django.db.models.fields.related.ForeignKey')(related_name='results', to=orm['codespeed.Benchmark'])),
('environment', self.gf('django.db.models.fields.related.ForeignKey')(related_name='results', to=orm['codespeed.Environment'])),
))
db.send_create_signal('codespeed', ['Result'])
# Adding unique constraint on 'Result', fields ['revision', 'executable', 'benchmark', 'environment']
db.create_unique('codespeed_result', ['revision_id', 'executable_id', 'benchmark_id', 'environment_id'])
# Adding model 'Report'
db.create_table('codespeed_report', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('revision', self.gf('django.db.models.fields.related.ForeignKey')(related_name='reports', to=orm['codespeed.Revision'])),
('environment', self.gf('django.db.models.fields.related.ForeignKey')(related_name='reports', to=orm['codespeed.Environment'])),
('executable', self.gf('django.db.models.fields.related.ForeignKey')(related_name='reports', to=orm['codespeed.Executable'])),
('summary', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('colorcode', self.gf('django.db.models.fields.CharField')(default='none', max_length=10)),
('_tablecache', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal('codespeed', ['Report'])
# Adding unique constraint on 'Report', fields ['revision', 'executable', 'environment']
db.create_unique('codespeed_report', ['revision_id', 'executable_id', 'environment_id'])
def backwards(self, orm):
# Removing unique constraint on 'Report', fields ['revision', 'executable', 'environment']
db.delete_unique('codespeed_report', ['revision_id', 'executable_id', 'environment_id'])
# Removing unique constraint on 'Result', fields ['revision', 'executable', 'benchmark', 'environment']
db.delete_unique('codespeed_result', ['revision_id', 'executable_id', 'benchmark_id', 'environment_id'])
# Removing unique constraint on 'Revision', fields ['commitid', 'project']
db.delete_unique('codespeed_revision', ['commitid', 'project_id'])
# Deleting model 'Project'
db.delete_table('codespeed_project')
# Deleting model 'Revision'
db.delete_table('codespeed_revision')
# Deleting model 'Executable'
db.delete_table('codespeed_executable')
# Deleting model 'Benchmark'
db.delete_table('codespeed_benchmark')
# Deleting model 'Environment'
db.delete_table('codespeed_environment')
# Deleting model 'Result'
db.delete_table('codespeed_result')
# Deleting model 'Report'
db.delete_table('codespeed_report')
models = {
'codespeed.benchmark': {
'Meta': {'object_name': 'Benchmark'},
'benchmark_type': ('django.db.models.fields.CharField', [], {'default': "'C'", 'max_length': '1'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lessisbetter': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'units': ('django.db.models.fields.CharField', [], {'default': "'seconds'", 'max_length': '20'}),
'units_title': ('django.db.models.fields.CharField', [], {'default': "'Time'", 'max_length': '30'})
},
'codespeed.environment': {
'Meta': {'object_name': 'Environment'},
'cpu': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kernel': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'memory': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'os': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'})
},
'codespeed.executable': {
'Meta': {'object_name': 'Executable'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'executables'", 'to': "orm['codespeed.Project']"})
},
'codespeed.project': {
'Meta': {'object_name': 'Project'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'repo_pass': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'repo_path': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'repo_type': ('django.db.models.fields.CharField', [], {'default': "'N'", 'max_length': '1'}),
'repo_user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'track': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'codespeed.report': {
'Meta': {'unique_together': "(('revision', 'executable', 'environment'),)", 'object_name': 'Report'},
'_tablecache': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'colorcode': ('django.db.models.fields.CharField', [], {'default': "'none'", 'max_length': '10'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reports'", 'to': "orm['codespeed.Environment']"}),
'executable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reports'", 'to': "orm['codespeed.Executable']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revision': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reports'", 'to': "orm['codespeed.Revision']"}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'})
},
'codespeed.result': {
'Meta': {'unique_together': "(('revision', 'executable', 'benchmark', 'environment'),)", 'object_name': 'Result'},
'benchmark': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'results'", 'to': "orm['codespeed.Benchmark']"}),
'date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'results'", 'to': "orm['codespeed.Environment']"}),
'executable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'results'", 'to': "orm['codespeed.Executable']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revision': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'results'", 'to': "orm['codespeed.Revision']"}),
'std_dev': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'val_max': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'val_min': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.FloatField', [], {})
},
'codespeed.revision': {
'Meta': {'unique_together': "(('commitid', 'project'),)", 'object_name': 'Revision'},
'author': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'commitid': ('django.db.models.fields.CharField', [], {'max_length': '42'}),
'date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['codespeed.Project']"}),
'tag': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'})
}
}
complete_apps = ['codespeed']
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Report.summary'
db.alter_column('codespeed_report', 'summary', self.gf('django.db.models.fields.CharField')(max_length=64))
def backwards(self, orm):
# Changing field 'Report.summary'
db.alter_column('codespeed_report', 'summary', self.gf('django.db.models.fields.CharField')(max_length=30))
models = {
'codespeed.benchmark': {
'Meta': {'object_name': 'Benchmark'},
'benchmark_type': ('django.db.models.fields.CharField', [], {'default': "'C'", 'max_length': '1'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lessisbetter': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'units': ('django.db.models.fields.CharField', [], {'default': "'seconds'", 'max_length': '20'}),
'units_title': ('django.db.models.fields.CharField', [], {'default': "'Time'", 'max_length': '30'})
},
'codespeed.environment': {
'Meta': {'object_name': 'Environment'},
'cpu': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kernel': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'memory': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'os': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'})
},
'codespeed.executable': {
'Meta': {'object_name': 'Executable'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'executables'", 'to': "orm['codespeed.Project']"})
},
'codespeed.project': {
'Meta': {'object_name': 'Project'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'repo_pass': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'repo_path': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'repo_type': ('django.db.models.fields.CharField', [], {'default': "'N'", 'max_length': '1'}),
'repo_user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'track': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'codespeed.report': {
'Meta': {'unique_together': "(('revision', 'executable', 'environment'),)", 'object_name': 'Report'},
'_tablecache': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'colorcode': ('django.db.models.fields.CharField', [], {'default': "'none'", 'max_length': '10'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reports'", 'to': "orm['codespeed.Environment']"}),
'executable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reports'", 'to': "orm['codespeed.Executable']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revision': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reports'", 'to': "orm['codespeed.Revision']"}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'})
},
'codespeed.result': {
'Meta': {'unique_together': "(('revision', 'executable', 'benchmark', 'environment'),)", 'object_name': 'Result'},
'benchmark': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'results'", 'to': "orm['codespeed.Benchmark']"}),
'date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'results'", 'to': "orm['codespeed.Environment']"}),
'executable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'results'", 'to': "orm['codespeed.Executable']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revision': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'results'", 'to': "orm['codespeed.Revision']"}),
'std_dev': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'val_max': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'val_min': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.FloatField', [], {})
},
'codespeed.revision': {
'Meta': {'unique_together': "(('commitid', 'project'),)", 'object_name': 'Revision'},
'author': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'commitid': ('django.db.models.fields.CharField', [], {'max_length': '42'}),
'date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['codespeed.Project']"}),
'tag': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'})
}
}
complete_apps = ['codespeed']
# encoding: utf-8
import datetime
from south.db import db