fixed lots of datasource issues. still not working with the jobs model, however

This commit is contained in:
Cameron Dawson 2013-04-16 16:45:51 -07:00
Родитель ce6c095690
Коммит f94c0d2796
5 изменённых файлов: 15 добавлений и 10 удалений

Просмотреть файл

@ -29,7 +29,7 @@ def pytest_sessionstart(session):
# this sets up a clean test-only database
session.django_db_config = session.django_runner.setup_databases()
# store the name of the test project based on user custom settings
session.project_name = "{0}testproj".format(prefix)
session.project_name = "project"
increment_cache_key_prefix()

Просмотреть файл

@ -92,7 +92,7 @@ class JobsModel(TreeherderModelBase):
Retrieve JSON blobs from the objectstore.
Does not claim rows for processing; should not be used for actually
processing JSON blobs into perftest schema.
processing JSON blobs into jobs schema.
Used only by the `transfer_data` management command.
@ -274,8 +274,8 @@ class JobsModel(TreeherderModelBase):
return job_id
def _insert_data(self, statement, placeholders, executemany=False):
self.sources["perftest"].dhub.execute(
proc='perftest.inserts.' + statement,
self.sources[self.CT_JOBS].dhub.execute(
proc='jobs.inserts.' + statement,
debug_show=self.DEBUG,
placeholders=placeholders,
executemany=executemany,
@ -286,7 +286,7 @@ class JobsModel(TreeherderModelBase):
self._insert_data(statement, placeholders)
return self._get_last_insert_id()
def _get_last_insert_id(self, source="perftest"):
def _get_last_insert_id(self, source=CT_JOBS):
"""Return last-inserted ID."""
return self.sources[source].dhub.execute(
proc='generic.selects.get_last_insert_id',

Просмотреть файл

@ -38,10 +38,13 @@ Are you sure you want to do this?
Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'])
else:
confirm = 'yes'
if confirm == 'yes':
for sql_file in ('treeherder.sql.tmpl',
'treeherder_reference_1.sql.tmpl'):
'treeherder_reference_1.sql.tmpl',
'project_jobs_1.sql.tmpl',
'project_objectstore_1.sql.tmpl',
):
sql = open(os.path.join(options['template_path'], sql_file)).read()
cursor = connection.cursor()

Просмотреть файл

@ -4,6 +4,8 @@ import uuid
import subprocess
import os
from datasource.bases.BaseHub import BaseHub
from datasource.hubs.MySQL import MySQL
from django.conf import settings
from django.core.cache import cache
from django.db import models

Просмотреть файл

@ -68,7 +68,7 @@ class SQLDataSource(object):
def _get_datasource(self):
candidate_sources = []
for source in DataSource.objects.cached():
for source in Datasource.objects.cached():
if (source.project == self.project and
source.contenttype == self.contenttype):
candidate_sources.append(source)
@ -93,7 +93,7 @@ class SQLDataSource(object):
The database for the new dataset will be located on the same host.
"""
dataset = DataSource.objects.filter(
dataset = Datasource.objects.filter(
project=self.project,
contenttype=self.contenttype
).order_by("-dataset")[0].dataset + 1
@ -159,7 +159,7 @@ class SQLDataSource(object):
oauth_consumer_key = uuid.uuid4()
oauth_consumer_secret = uuid.uuid4()
ds = DataSource.objects.create(
ds = Datasource.objects.create(
host=host,
project=project,
contenttype=contenttype,